1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2.1 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg/tcg-op.h" 27 #include "tcg/tcg-op-gvec.h" 28 #include "qemu/host-utils.h" 29 #include "qemu/main-loop.h" 30 #include "exec/cpu_ldst.h" 31 32 #include "exec/helper-proto.h" 33 #include "exec/helper-gen.h" 34 35 #include "trace-tcg.h" 36 #include "exec/translator.h" 37 #include "exec/log.h" 38 #include "qemu/atomic128.h" 39 #include "spr_tcg.h" 40 41 #include "qemu/qemu-print.h" 42 #include "qapi/error.h" 43 44 #define CPU_SINGLE_STEP 0x1 45 #define CPU_BRANCH_STEP 0x2 46 #define GDBSTUB_SINGLE_STEP 0x4 47 48 /* Include definitions for instructions classes and implementations flags */ 49 /* #define PPC_DEBUG_DISAS */ 50 51 #ifdef PPC_DEBUG_DISAS 52 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 53 #else 54 # define LOG_DISAS(...) do { } while (0) 55 #endif 56 /*****************************************************************************/ 57 /* Code translation helpers */ 58 59 /* global register indexes */ 60 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */ 61 + 10 * 4 + 22 * 5 /* SPE GPRh */ 62 + 8 * 5 /* CRF */]; 63 static TCGv cpu_gpr[32]; 64 static TCGv cpu_gprh[32]; 65 static TCGv_i32 cpu_crf[8]; 66 static TCGv cpu_nip; 67 static TCGv cpu_msr; 68 static TCGv cpu_ctr; 69 static TCGv cpu_lr; 70 #if defined(TARGET_PPC64) 71 static TCGv cpu_cfar; 72 #endif 73 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 74 static TCGv cpu_reserve; 75 static TCGv cpu_reserve_val; 76 static TCGv cpu_fpscr; 77 static TCGv_i32 cpu_access_type; 78 79 #include "exec/gen-icount.h" 80 81 void ppc_translate_init(void) 82 { 83 int i; 84 char *p; 85 size_t cpu_reg_names_size; 86 87 p = cpu_reg_names; 88 cpu_reg_names_size = sizeof(cpu_reg_names); 89 90 for (i = 0; i < 8; i++) { 91 snprintf(p, cpu_reg_names_size, "crf%d", i); 92 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 93 offsetof(CPUPPCState, crf[i]), p); 94 p += 5; 95 cpu_reg_names_size -= 5; 96 } 97 98 for (i = 0; i < 32; i++) { 99 snprintf(p, cpu_reg_names_size, "r%d", i); 100 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 101 offsetof(CPUPPCState, gpr[i]), p); 102 p += (i < 10) ? 3 : 4; 103 cpu_reg_names_size -= (i < 10) ? 3 : 4; 104 snprintf(p, cpu_reg_names_size, "r%dH", i); 105 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 106 offsetof(CPUPPCState, gprh[i]), p); 107 p += (i < 10) ? 4 : 5; 108 cpu_reg_names_size -= (i < 10) ? 4 : 5; 109 } 110 111 cpu_nip = tcg_global_mem_new(cpu_env, 112 offsetof(CPUPPCState, nip), "nip"); 113 114 cpu_msr = tcg_global_mem_new(cpu_env, 115 offsetof(CPUPPCState, msr), "msr"); 116 117 cpu_ctr = tcg_global_mem_new(cpu_env, 118 offsetof(CPUPPCState, ctr), "ctr"); 119 120 cpu_lr = tcg_global_mem_new(cpu_env, 121 offsetof(CPUPPCState, lr), "lr"); 122 123 #if defined(TARGET_PPC64) 124 cpu_cfar = tcg_global_mem_new(cpu_env, 125 offsetof(CPUPPCState, cfar), "cfar"); 126 #endif 127 128 cpu_xer = tcg_global_mem_new(cpu_env, 129 offsetof(CPUPPCState, xer), "xer"); 130 cpu_so = tcg_global_mem_new(cpu_env, 131 offsetof(CPUPPCState, so), "SO"); 132 cpu_ov = tcg_global_mem_new(cpu_env, 133 offsetof(CPUPPCState, ov), "OV"); 134 cpu_ca = tcg_global_mem_new(cpu_env, 135 offsetof(CPUPPCState, ca), "CA"); 136 cpu_ov32 = tcg_global_mem_new(cpu_env, 137 offsetof(CPUPPCState, ov32), "OV32"); 138 cpu_ca32 = tcg_global_mem_new(cpu_env, 139 offsetof(CPUPPCState, ca32), "CA32"); 140 141 cpu_reserve = tcg_global_mem_new(cpu_env, 142 offsetof(CPUPPCState, reserve_addr), 143 "reserve_addr"); 144 cpu_reserve_val = tcg_global_mem_new(cpu_env, 145 offsetof(CPUPPCState, reserve_val), 146 "reserve_val"); 147 148 cpu_fpscr = tcg_global_mem_new(cpu_env, 149 offsetof(CPUPPCState, fpscr), "fpscr"); 150 151 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 152 offsetof(CPUPPCState, access_type), 153 "access_type"); 154 } 155 156 /* internal defines */ 157 struct DisasContext { 158 DisasContextBase base; 159 target_ulong cia; /* current instruction address */ 160 uint32_t opcode; 161 /* Routine used to access memory */ 162 bool pr, hv, dr, le_mode; 163 bool lazy_tlb_flush; 164 bool need_access_type; 165 int mem_idx; 166 int access_type; 167 /* Translation flags */ 168 MemOp default_tcg_memop_mask; 169 #if defined(TARGET_PPC64) 170 bool sf_mode; 171 bool has_cfar; 172 #endif 173 bool fpu_enabled; 174 bool altivec_enabled; 175 bool vsx_enabled; 176 bool spe_enabled; 177 bool tm_enabled; 178 bool gtse; 179 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 180 int singlestep_enabled; 181 uint32_t flags; 182 uint64_t insns_flags; 183 uint64_t insns_flags2; 184 }; 185 186 #define DISAS_EXIT DISAS_TARGET_0 /* exit to main loop, pc updated */ 187 #define DISAS_EXIT_UPDATE DISAS_TARGET_1 /* exit to main loop, pc stale */ 188 #define DISAS_CHAIN DISAS_TARGET_2 /* lookup next tb, pc updated */ 189 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3 /* lookup next tb, pc stale */ 190 191 /* Return true iff byteswap is needed in a scalar memop */ 192 static inline bool need_byteswap(const DisasContext *ctx) 193 { 194 #if defined(TARGET_WORDS_BIGENDIAN) 195 return ctx->le_mode; 196 #else 197 return !ctx->le_mode; 198 #endif 199 } 200 201 /* True when active word size < size of target_long. */ 202 #ifdef TARGET_PPC64 203 # define NARROW_MODE(C) (!(C)->sf_mode) 204 #else 205 # define NARROW_MODE(C) 0 206 #endif 207 208 struct opc_handler_t { 209 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 210 uint32_t inval1; 211 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 212 uint32_t inval2; 213 /* instruction type */ 214 uint64_t type; 215 /* extended instruction type */ 216 uint64_t type2; 217 /* handler */ 218 void (*handler)(DisasContext *ctx); 219 #if defined(PPC_DUMP_CPU) 220 const char *oname; 221 #endif 222 }; 223 224 /* SPR load/store helpers */ 225 static inline void gen_load_spr(TCGv t, int reg) 226 { 227 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 228 } 229 230 static inline void gen_store_spr(int reg, TCGv t) 231 { 232 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 233 } 234 235 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 236 { 237 if (ctx->need_access_type && ctx->access_type != access_type) { 238 tcg_gen_movi_i32(cpu_access_type, access_type); 239 ctx->access_type = access_type; 240 } 241 } 242 243 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 244 { 245 if (NARROW_MODE(ctx)) { 246 nip = (uint32_t)nip; 247 } 248 tcg_gen_movi_tl(cpu_nip, nip); 249 } 250 251 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 252 { 253 TCGv_i32 t0, t1; 254 255 /* 256 * These are all synchronous exceptions, we set the PC back to the 257 * faulting instruction 258 */ 259 gen_update_nip(ctx, ctx->cia); 260 t0 = tcg_const_i32(excp); 261 t1 = tcg_const_i32(error); 262 gen_helper_raise_exception_err(cpu_env, t0, t1); 263 tcg_temp_free_i32(t0); 264 tcg_temp_free_i32(t1); 265 ctx->base.is_jmp = DISAS_NORETURN; 266 } 267 268 static void gen_exception(DisasContext *ctx, uint32_t excp) 269 { 270 TCGv_i32 t0; 271 272 /* 273 * These are all synchronous exceptions, we set the PC back to the 274 * faulting instruction 275 */ 276 gen_update_nip(ctx, ctx->cia); 277 t0 = tcg_const_i32(excp); 278 gen_helper_raise_exception(cpu_env, t0); 279 tcg_temp_free_i32(t0); 280 ctx->base.is_jmp = DISAS_NORETURN; 281 } 282 283 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 284 target_ulong nip) 285 { 286 TCGv_i32 t0; 287 288 gen_update_nip(ctx, nip); 289 t0 = tcg_const_i32(excp); 290 gen_helper_raise_exception(cpu_env, t0); 291 tcg_temp_free_i32(t0); 292 ctx->base.is_jmp = DISAS_NORETURN; 293 } 294 295 static void gen_icount_io_start(DisasContext *ctx) 296 { 297 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 298 gen_io_start(); 299 /* 300 * An I/O instruction must be last in the TB. 301 * Chain to the next TB, and let the code from gen_tb_start 302 * decide if we need to return to the main loop. 303 * Doing this first also allows this value to be overridden. 304 */ 305 ctx->base.is_jmp = DISAS_TOO_MANY; 306 } 307 } 308 309 /* 310 * Tells the caller what is the appropriate exception to generate and prepares 311 * SPR registers for this exception. 312 * 313 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or 314 * POWERPC_EXCP_DEBUG (on BookE). 315 */ 316 static uint32_t gen_prep_dbgex(DisasContext *ctx) 317 { 318 if (ctx->flags & POWERPC_FLAG_DE) { 319 target_ulong dbsr = 0; 320 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) { 321 dbsr = DBCR0_ICMP; 322 } else { 323 /* Must have been branch */ 324 dbsr = DBCR0_BRT; 325 } 326 TCGv t0 = tcg_temp_new(); 327 gen_load_spr(t0, SPR_BOOKE_DBSR); 328 tcg_gen_ori_tl(t0, t0, dbsr); 329 gen_store_spr(SPR_BOOKE_DBSR, t0); 330 tcg_temp_free(t0); 331 return POWERPC_EXCP_DEBUG; 332 } else { 333 return POWERPC_EXCP_TRACE; 334 } 335 } 336 337 static void gen_debug_exception(DisasContext *ctx) 338 { 339 gen_helper_raise_exception(cpu_env, tcg_constant_i32(EXCP_DEBUG)); 340 ctx->base.is_jmp = DISAS_NORETURN; 341 } 342 343 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 344 { 345 /* Will be converted to program check if needed */ 346 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 347 } 348 349 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 350 { 351 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 352 } 353 354 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 355 { 356 /* Will be converted to program check if needed */ 357 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 358 } 359 360 /*****************************************************************************/ 361 /* SPR READ/WRITE CALLBACKS */ 362 363 void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 364 { 365 #if 0 366 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 367 printf("ERROR: try to access SPR %d !\n", sprn); 368 #endif 369 } 370 371 /* #define PPC_DUMP_SPR_ACCESSES */ 372 373 /* 374 * Generic callbacks: 375 * do nothing but store/retrieve spr value 376 */ 377 static void spr_load_dump_spr(int sprn) 378 { 379 #ifdef PPC_DUMP_SPR_ACCESSES 380 TCGv_i32 t0 = tcg_const_i32(sprn); 381 gen_helper_load_dump_spr(cpu_env, t0); 382 tcg_temp_free_i32(t0); 383 #endif 384 } 385 386 void spr_read_generic(DisasContext *ctx, int gprn, int sprn) 387 { 388 gen_load_spr(cpu_gpr[gprn], sprn); 389 spr_load_dump_spr(sprn); 390 } 391 392 static void spr_store_dump_spr(int sprn) 393 { 394 #ifdef PPC_DUMP_SPR_ACCESSES 395 TCGv_i32 t0 = tcg_const_i32(sprn); 396 gen_helper_store_dump_spr(cpu_env, t0); 397 tcg_temp_free_i32(t0); 398 #endif 399 } 400 401 void spr_write_generic(DisasContext *ctx, int sprn, int gprn) 402 { 403 gen_store_spr(sprn, cpu_gpr[gprn]); 404 spr_store_dump_spr(sprn); 405 } 406 407 #if !defined(CONFIG_USER_ONLY) 408 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn) 409 { 410 #ifdef TARGET_PPC64 411 TCGv t0 = tcg_temp_new(); 412 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]); 413 gen_store_spr(sprn, t0); 414 tcg_temp_free(t0); 415 spr_store_dump_spr(sprn); 416 #else 417 spr_write_generic(ctx, sprn, gprn); 418 #endif 419 } 420 421 void spr_write_clear(DisasContext *ctx, int sprn, int gprn) 422 { 423 TCGv t0 = tcg_temp_new(); 424 TCGv t1 = tcg_temp_new(); 425 gen_load_spr(t0, sprn); 426 tcg_gen_neg_tl(t1, cpu_gpr[gprn]); 427 tcg_gen_and_tl(t0, t0, t1); 428 gen_store_spr(sprn, t0); 429 tcg_temp_free(t0); 430 tcg_temp_free(t1); 431 } 432 433 void spr_access_nop(DisasContext *ctx, int sprn, int gprn) 434 { 435 } 436 437 #endif 438 439 /* SPR common to all PowerPC */ 440 /* XER */ 441 void spr_read_xer(DisasContext *ctx, int gprn, int sprn) 442 { 443 TCGv dst = cpu_gpr[gprn]; 444 TCGv t0 = tcg_temp_new(); 445 TCGv t1 = tcg_temp_new(); 446 TCGv t2 = tcg_temp_new(); 447 tcg_gen_mov_tl(dst, cpu_xer); 448 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 449 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 450 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 451 tcg_gen_or_tl(t0, t0, t1); 452 tcg_gen_or_tl(dst, dst, t2); 453 tcg_gen_or_tl(dst, dst, t0); 454 if (is_isa300(ctx)) { 455 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 456 tcg_gen_or_tl(dst, dst, t0); 457 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 458 tcg_gen_or_tl(dst, dst, t0); 459 } 460 tcg_temp_free(t0); 461 tcg_temp_free(t1); 462 tcg_temp_free(t2); 463 } 464 465 void spr_write_xer(DisasContext *ctx, int sprn, int gprn) 466 { 467 TCGv src = cpu_gpr[gprn]; 468 /* Write all flags, while reading back check for isa300 */ 469 tcg_gen_andi_tl(cpu_xer, src, 470 ~((1u << XER_SO) | 471 (1u << XER_OV) | (1u << XER_OV32) | 472 (1u << XER_CA) | (1u << XER_CA32))); 473 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 474 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 475 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 476 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 477 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 478 } 479 480 /* LR */ 481 void spr_read_lr(DisasContext *ctx, int gprn, int sprn) 482 { 483 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr); 484 } 485 486 void spr_write_lr(DisasContext *ctx, int sprn, int gprn) 487 { 488 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]); 489 } 490 491 /* CFAR */ 492 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 493 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn) 494 { 495 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar); 496 } 497 498 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn) 499 { 500 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]); 501 } 502 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */ 503 504 /* CTR */ 505 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn) 506 { 507 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr); 508 } 509 510 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn) 511 { 512 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]); 513 } 514 515 /* User read access to SPR */ 516 /* USPRx */ 517 /* UMMCRx */ 518 /* UPMCx */ 519 /* USIA */ 520 /* UDECR */ 521 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn) 522 { 523 gen_load_spr(cpu_gpr[gprn], sprn + 0x10); 524 } 525 526 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 527 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn) 528 { 529 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]); 530 } 531 #endif 532 533 /* SPR common to all non-embedded PowerPC */ 534 /* DECR */ 535 #if !defined(CONFIG_USER_ONLY) 536 void spr_read_decr(DisasContext *ctx, int gprn, int sprn) 537 { 538 gen_icount_io_start(ctx); 539 gen_helper_load_decr(cpu_gpr[gprn], cpu_env); 540 } 541 542 void spr_write_decr(DisasContext *ctx, int sprn, int gprn) 543 { 544 gen_icount_io_start(ctx); 545 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]); 546 } 547 #endif 548 549 /* SPR common to all non-embedded PowerPC, except 601 */ 550 /* Time base */ 551 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn) 552 { 553 gen_icount_io_start(ctx); 554 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env); 555 } 556 557 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn) 558 { 559 gen_icount_io_start(ctx); 560 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env); 561 } 562 563 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn) 564 { 565 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env); 566 } 567 568 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn) 569 { 570 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env); 571 } 572 573 #if !defined(CONFIG_USER_ONLY) 574 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn) 575 { 576 gen_icount_io_start(ctx); 577 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]); 578 } 579 580 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn) 581 { 582 gen_icount_io_start(ctx); 583 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]); 584 } 585 586 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn) 587 { 588 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]); 589 } 590 591 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn) 592 { 593 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]); 594 } 595 596 #if defined(TARGET_PPC64) 597 void spr_read_purr(DisasContext *ctx, int gprn, int sprn) 598 { 599 gen_icount_io_start(ctx); 600 gen_helper_load_purr(cpu_gpr[gprn], cpu_env); 601 } 602 603 void spr_write_purr(DisasContext *ctx, int sprn, int gprn) 604 { 605 gen_icount_io_start(ctx); 606 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]); 607 } 608 609 /* HDECR */ 610 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn) 611 { 612 gen_icount_io_start(ctx); 613 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env); 614 } 615 616 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn) 617 { 618 gen_icount_io_start(ctx); 619 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]); 620 } 621 622 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn) 623 { 624 gen_icount_io_start(ctx); 625 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env); 626 } 627 628 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn) 629 { 630 gen_icount_io_start(ctx); 631 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]); 632 } 633 634 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn) 635 { 636 gen_icount_io_start(ctx); 637 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]); 638 } 639 640 #endif 641 #endif 642 643 #if !defined(CONFIG_USER_ONLY) 644 /* IBAT0U...IBAT0U */ 645 /* IBAT0L...IBAT7L */ 646 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn) 647 { 648 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 649 offsetof(CPUPPCState, 650 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 651 } 652 653 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn) 654 { 655 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 656 offsetof(CPUPPCState, 657 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4])); 658 } 659 660 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn) 661 { 662 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 663 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 664 tcg_temp_free_i32(t0); 665 } 666 667 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn) 668 { 669 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4); 670 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 671 tcg_temp_free_i32(t0); 672 } 673 674 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn) 675 { 676 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2); 677 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 678 tcg_temp_free_i32(t0); 679 } 680 681 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn) 682 { 683 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4); 684 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 685 tcg_temp_free_i32(t0); 686 } 687 688 /* DBAT0U...DBAT7U */ 689 /* DBAT0L...DBAT7L */ 690 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn) 691 { 692 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 693 offsetof(CPUPPCState, 694 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2])); 695 } 696 697 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn) 698 { 699 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 700 offsetof(CPUPPCState, 701 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4])); 702 } 703 704 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn) 705 { 706 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2); 707 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 708 tcg_temp_free_i32(t0); 709 } 710 711 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn) 712 { 713 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4); 714 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 715 tcg_temp_free_i32(t0); 716 } 717 718 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn) 719 { 720 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2); 721 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 722 tcg_temp_free_i32(t0); 723 } 724 725 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn) 726 { 727 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4); 728 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 729 tcg_temp_free_i32(t0); 730 } 731 732 /* SDR1 */ 733 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn) 734 { 735 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]); 736 } 737 738 #if defined(TARGET_PPC64) 739 /* 64 bits PowerPC specific SPRs */ 740 /* PIDR */ 741 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn) 742 { 743 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]); 744 } 745 746 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn) 747 { 748 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]); 749 } 750 751 void spr_read_hior(DisasContext *ctx, int gprn, int sprn) 752 { 753 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix)); 754 } 755 756 void spr_write_hior(DisasContext *ctx, int sprn, int gprn) 757 { 758 TCGv t0 = tcg_temp_new(); 759 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL); 760 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 761 tcg_temp_free(t0); 762 } 763 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn) 764 { 765 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]); 766 } 767 768 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn) 769 { 770 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]); 771 } 772 773 /* DPDES */ 774 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn) 775 { 776 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env); 777 } 778 779 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn) 780 { 781 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]); 782 } 783 #endif 784 #endif 785 786 /* PowerPC 601 specific registers */ 787 /* RTC */ 788 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn) 789 { 790 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env); 791 } 792 793 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn) 794 { 795 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env); 796 } 797 798 #if !defined(CONFIG_USER_ONLY) 799 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn) 800 { 801 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]); 802 } 803 804 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn) 805 { 806 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]); 807 } 808 809 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn) 810 { 811 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]); 812 /* Must stop the translation as endianness may have changed */ 813 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 814 } 815 #endif 816 817 /* Unified bats */ 818 #if !defined(CONFIG_USER_ONLY) 819 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn) 820 { 821 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 822 offsetof(CPUPPCState, 823 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 824 } 825 826 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn) 827 { 828 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 829 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]); 830 tcg_temp_free_i32(t0); 831 } 832 833 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn) 834 { 835 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 836 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]); 837 tcg_temp_free_i32(t0); 838 } 839 #endif 840 841 /* PowerPC 40x specific registers */ 842 #if !defined(CONFIG_USER_ONLY) 843 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn) 844 { 845 gen_icount_io_start(ctx); 846 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env); 847 } 848 849 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn) 850 { 851 gen_icount_io_start(ctx); 852 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]); 853 } 854 855 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn) 856 { 857 gen_icount_io_start(ctx); 858 gen_store_spr(sprn, cpu_gpr[gprn]); 859 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]); 860 /* We must stop translation as we may have rebooted */ 861 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 862 } 863 864 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn) 865 { 866 gen_icount_io_start(ctx); 867 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]); 868 } 869 870 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn) 871 { 872 gen_icount_io_start(ctx); 873 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]); 874 } 875 876 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn) 877 { 878 gen_icount_io_start(ctx); 879 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]); 880 } 881 #endif 882 883 /* PowerPC 403 specific registers */ 884 /* PBL1 / PBU1 / PBL2 / PBU2 */ 885 #if !defined(CONFIG_USER_ONLY) 886 void spr_read_403_pbr(DisasContext *ctx, int gprn, int sprn) 887 { 888 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 889 offsetof(CPUPPCState, pb[sprn - SPR_403_PBL1])); 890 } 891 892 void spr_write_403_pbr(DisasContext *ctx, int sprn, int gprn) 893 { 894 TCGv_i32 t0 = tcg_const_i32(sprn - SPR_403_PBL1); 895 gen_helper_store_403_pbr(cpu_env, t0, cpu_gpr[gprn]); 896 tcg_temp_free_i32(t0); 897 } 898 899 void spr_write_pir(DisasContext *ctx, int sprn, int gprn) 900 { 901 TCGv t0 = tcg_temp_new(); 902 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF); 903 gen_store_spr(SPR_PIR, t0); 904 tcg_temp_free(t0); 905 } 906 #endif 907 908 /* SPE specific registers */ 909 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn) 910 { 911 TCGv_i32 t0 = tcg_temp_new_i32(); 912 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 913 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0); 914 tcg_temp_free_i32(t0); 915 } 916 917 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn) 918 { 919 TCGv_i32 t0 = tcg_temp_new_i32(); 920 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]); 921 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 922 tcg_temp_free_i32(t0); 923 } 924 925 #if !defined(CONFIG_USER_ONLY) 926 /* Callback used to write the exception vector base */ 927 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn) 928 { 929 TCGv t0 = tcg_temp_new(); 930 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask)); 931 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 932 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 933 gen_store_spr(sprn, t0); 934 tcg_temp_free(t0); 935 } 936 937 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn) 938 { 939 int sprn_offs; 940 941 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) { 942 sprn_offs = sprn - SPR_BOOKE_IVOR0; 943 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) { 944 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32; 945 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) { 946 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38; 947 } else { 948 printf("Trying to write an unknown exception vector %d %03x\n", 949 sprn, sprn); 950 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 951 return; 952 } 953 954 TCGv t0 = tcg_temp_new(); 955 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask)); 956 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 957 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs])); 958 gen_store_spr(sprn, t0); 959 tcg_temp_free(t0); 960 } 961 #endif 962 963 #ifdef TARGET_PPC64 964 #ifndef CONFIG_USER_ONLY 965 void spr_write_amr(DisasContext *ctx, int sprn, int gprn) 966 { 967 TCGv t0 = tcg_temp_new(); 968 TCGv t1 = tcg_temp_new(); 969 TCGv t2 = tcg_temp_new(); 970 971 /* 972 * Note, the HV=1 PR=0 case is handled earlier by simply using 973 * spr_write_generic for HV mode in the SPR table 974 */ 975 976 /* Build insertion mask into t1 based on context */ 977 if (ctx->pr) { 978 gen_load_spr(t1, SPR_UAMOR); 979 } else { 980 gen_load_spr(t1, SPR_AMOR); 981 } 982 983 /* Mask new bits into t2 */ 984 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 985 986 /* Load AMR and clear new bits in t0 */ 987 gen_load_spr(t0, SPR_AMR); 988 tcg_gen_andc_tl(t0, t0, t1); 989 990 /* Or'in new bits and write it out */ 991 tcg_gen_or_tl(t0, t0, t2); 992 gen_store_spr(SPR_AMR, t0); 993 spr_store_dump_spr(SPR_AMR); 994 995 tcg_temp_free(t0); 996 tcg_temp_free(t1); 997 tcg_temp_free(t2); 998 } 999 1000 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn) 1001 { 1002 TCGv t0 = tcg_temp_new(); 1003 TCGv t1 = tcg_temp_new(); 1004 TCGv t2 = tcg_temp_new(); 1005 1006 /* 1007 * Note, the HV=1 case is handled earlier by simply using 1008 * spr_write_generic for HV mode in the SPR table 1009 */ 1010 1011 /* Build insertion mask into t1 based on context */ 1012 gen_load_spr(t1, SPR_AMOR); 1013 1014 /* Mask new bits into t2 */ 1015 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1016 1017 /* Load AMR and clear new bits in t0 */ 1018 gen_load_spr(t0, SPR_UAMOR); 1019 tcg_gen_andc_tl(t0, t0, t1); 1020 1021 /* Or'in new bits and write it out */ 1022 tcg_gen_or_tl(t0, t0, t2); 1023 gen_store_spr(SPR_UAMOR, t0); 1024 spr_store_dump_spr(SPR_UAMOR); 1025 1026 tcg_temp_free(t0); 1027 tcg_temp_free(t1); 1028 tcg_temp_free(t2); 1029 } 1030 1031 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn) 1032 { 1033 TCGv t0 = tcg_temp_new(); 1034 TCGv t1 = tcg_temp_new(); 1035 TCGv t2 = tcg_temp_new(); 1036 1037 /* 1038 * Note, the HV=1 case is handled earlier by simply using 1039 * spr_write_generic for HV mode in the SPR table 1040 */ 1041 1042 /* Build insertion mask into t1 based on context */ 1043 gen_load_spr(t1, SPR_AMOR); 1044 1045 /* Mask new bits into t2 */ 1046 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1047 1048 /* Load AMR and clear new bits in t0 */ 1049 gen_load_spr(t0, SPR_IAMR); 1050 tcg_gen_andc_tl(t0, t0, t1); 1051 1052 /* Or'in new bits and write it out */ 1053 tcg_gen_or_tl(t0, t0, t2); 1054 gen_store_spr(SPR_IAMR, t0); 1055 spr_store_dump_spr(SPR_IAMR); 1056 1057 tcg_temp_free(t0); 1058 tcg_temp_free(t1); 1059 tcg_temp_free(t2); 1060 } 1061 #endif 1062 #endif 1063 1064 #ifndef CONFIG_USER_ONLY 1065 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn) 1066 { 1067 gen_helper_fixup_thrm(cpu_env); 1068 gen_load_spr(cpu_gpr[gprn], sprn); 1069 spr_load_dump_spr(sprn); 1070 } 1071 #endif /* !CONFIG_USER_ONLY */ 1072 1073 #if !defined(CONFIG_USER_ONLY) 1074 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn) 1075 { 1076 TCGv t0 = tcg_temp_new(); 1077 1078 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE); 1079 gen_store_spr(sprn, t0); 1080 tcg_temp_free(t0); 1081 } 1082 1083 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn) 1084 { 1085 TCGv t0 = tcg_temp_new(); 1086 1087 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE); 1088 gen_store_spr(sprn, t0); 1089 tcg_temp_free(t0); 1090 } 1091 1092 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn) 1093 { 1094 TCGv t0 = tcg_temp_new(); 1095 1096 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 1097 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC)); 1098 gen_store_spr(sprn, t0); 1099 tcg_temp_free(t0); 1100 } 1101 1102 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn) 1103 { 1104 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]); 1105 } 1106 1107 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn) 1108 { 1109 TCGv_i32 t0 = tcg_const_i32(sprn); 1110 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]); 1111 tcg_temp_free_i32(t0); 1112 } 1113 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn) 1114 { 1115 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]); 1116 } 1117 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn) 1118 { 1119 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]); 1120 } 1121 1122 #endif 1123 1124 #if !defined(CONFIG_USER_ONLY) 1125 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn) 1126 { 1127 TCGv val = tcg_temp_new(); 1128 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]); 1129 gen_store_spr(SPR_BOOKE_MAS3, val); 1130 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32); 1131 gen_store_spr(SPR_BOOKE_MAS7, val); 1132 tcg_temp_free(val); 1133 } 1134 1135 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn) 1136 { 1137 TCGv mas7 = tcg_temp_new(); 1138 TCGv mas3 = tcg_temp_new(); 1139 gen_load_spr(mas7, SPR_BOOKE_MAS7); 1140 tcg_gen_shli_tl(mas7, mas7, 32); 1141 gen_load_spr(mas3, SPR_BOOKE_MAS3); 1142 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7); 1143 tcg_temp_free(mas3); 1144 tcg_temp_free(mas7); 1145 } 1146 1147 #endif 1148 1149 #ifdef TARGET_PPC64 1150 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn, 1151 int bit, int sprn, int cause) 1152 { 1153 TCGv_i32 t1 = tcg_const_i32(bit); 1154 TCGv_i32 t2 = tcg_const_i32(sprn); 1155 TCGv_i32 t3 = tcg_const_i32(cause); 1156 1157 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3); 1158 1159 tcg_temp_free_i32(t3); 1160 tcg_temp_free_i32(t2); 1161 tcg_temp_free_i32(t1); 1162 } 1163 1164 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn, 1165 int bit, int sprn, int cause) 1166 { 1167 TCGv_i32 t1 = tcg_const_i32(bit); 1168 TCGv_i32 t2 = tcg_const_i32(sprn); 1169 TCGv_i32 t3 = tcg_const_i32(cause); 1170 1171 gen_helper_msr_facility_check(cpu_env, t1, t2, t3); 1172 1173 tcg_temp_free_i32(t3); 1174 tcg_temp_free_i32(t2); 1175 tcg_temp_free_i32(t1); 1176 } 1177 1178 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn) 1179 { 1180 TCGv spr_up = tcg_temp_new(); 1181 TCGv spr = tcg_temp_new(); 1182 1183 gen_load_spr(spr, sprn - 1); 1184 tcg_gen_shri_tl(spr_up, spr, 32); 1185 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up); 1186 1187 tcg_temp_free(spr); 1188 tcg_temp_free(spr_up); 1189 } 1190 1191 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn) 1192 { 1193 TCGv spr = tcg_temp_new(); 1194 1195 gen_load_spr(spr, sprn - 1); 1196 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32); 1197 gen_store_spr(sprn - 1, spr); 1198 1199 tcg_temp_free(spr); 1200 } 1201 1202 #if !defined(CONFIG_USER_ONLY) 1203 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn) 1204 { 1205 TCGv hmer = tcg_temp_new(); 1206 1207 gen_load_spr(hmer, sprn); 1208 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer); 1209 gen_store_spr(sprn, hmer); 1210 spr_store_dump_spr(sprn); 1211 tcg_temp_free(hmer); 1212 } 1213 1214 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn) 1215 { 1216 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]); 1217 } 1218 #endif /* !defined(CONFIG_USER_ONLY) */ 1219 1220 void spr_read_tar(DisasContext *ctx, int gprn, int sprn) 1221 { 1222 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1223 spr_read_generic(ctx, gprn, sprn); 1224 } 1225 1226 void spr_write_tar(DisasContext *ctx, int sprn, int gprn) 1227 { 1228 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1229 spr_write_generic(ctx, sprn, gprn); 1230 } 1231 1232 void spr_read_tm(DisasContext *ctx, int gprn, int sprn) 1233 { 1234 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1235 spr_read_generic(ctx, gprn, sprn); 1236 } 1237 1238 void spr_write_tm(DisasContext *ctx, int sprn, int gprn) 1239 { 1240 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1241 spr_write_generic(ctx, sprn, gprn); 1242 } 1243 1244 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn) 1245 { 1246 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1247 spr_read_prev_upper32(ctx, gprn, sprn); 1248 } 1249 1250 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn) 1251 { 1252 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1253 spr_write_prev_upper32(ctx, sprn, gprn); 1254 } 1255 1256 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn) 1257 { 1258 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1259 spr_read_generic(ctx, gprn, sprn); 1260 } 1261 1262 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn) 1263 { 1264 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1265 spr_write_generic(ctx, sprn, gprn); 1266 } 1267 1268 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn) 1269 { 1270 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1271 spr_read_prev_upper32(ctx, gprn, sprn); 1272 } 1273 1274 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn) 1275 { 1276 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1277 spr_write_prev_upper32(ctx, sprn, gprn); 1278 } 1279 #endif 1280 1281 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 1282 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 1283 1284 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 1285 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 1286 1287 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 1288 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 1289 1290 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 1291 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 1292 1293 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 1294 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 1295 1296 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 1297 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 1298 1299 typedef struct opcode_t { 1300 unsigned char opc1, opc2, opc3, opc4; 1301 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 1302 unsigned char pad[4]; 1303 #endif 1304 opc_handler_t handler; 1305 const char *oname; 1306 } opcode_t; 1307 1308 /* Helpers for priv. check */ 1309 #define GEN_PRIV \ 1310 do { \ 1311 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 1312 } while (0) 1313 1314 #if defined(CONFIG_USER_ONLY) 1315 #define CHK_HV GEN_PRIV 1316 #define CHK_SV GEN_PRIV 1317 #define CHK_HVRM GEN_PRIV 1318 #else 1319 #define CHK_HV \ 1320 do { \ 1321 if (unlikely(ctx->pr || !ctx->hv)) { \ 1322 GEN_PRIV; \ 1323 } \ 1324 } while (0) 1325 #define CHK_SV \ 1326 do { \ 1327 if (unlikely(ctx->pr)) { \ 1328 GEN_PRIV; \ 1329 } \ 1330 } while (0) 1331 #define CHK_HVRM \ 1332 do { \ 1333 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 1334 GEN_PRIV; \ 1335 } \ 1336 } while (0) 1337 #endif 1338 1339 #define CHK_NONE 1340 1341 /*****************************************************************************/ 1342 /* PowerPC instructions table */ 1343 1344 #if defined(DO_PPC_STATISTICS) 1345 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 1346 { \ 1347 .opc1 = op1, \ 1348 .opc2 = op2, \ 1349 .opc3 = op3, \ 1350 .opc4 = 0xff, \ 1351 .handler = { \ 1352 .inval1 = invl, \ 1353 .type = _typ, \ 1354 .type2 = _typ2, \ 1355 .handler = &gen_##name, \ 1356 .oname = stringify(name), \ 1357 }, \ 1358 .oname = stringify(name), \ 1359 } 1360 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 1361 { \ 1362 .opc1 = op1, \ 1363 .opc2 = op2, \ 1364 .opc3 = op3, \ 1365 .opc4 = 0xff, \ 1366 .handler = { \ 1367 .inval1 = invl1, \ 1368 .inval2 = invl2, \ 1369 .type = _typ, \ 1370 .type2 = _typ2, \ 1371 .handler = &gen_##name, \ 1372 .oname = stringify(name), \ 1373 }, \ 1374 .oname = stringify(name), \ 1375 } 1376 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 1377 { \ 1378 .opc1 = op1, \ 1379 .opc2 = op2, \ 1380 .opc3 = op3, \ 1381 .opc4 = 0xff, \ 1382 .handler = { \ 1383 .inval1 = invl, \ 1384 .type = _typ, \ 1385 .type2 = _typ2, \ 1386 .handler = &gen_##name, \ 1387 .oname = onam, \ 1388 }, \ 1389 .oname = onam, \ 1390 } 1391 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 1392 { \ 1393 .opc1 = op1, \ 1394 .opc2 = op2, \ 1395 .opc3 = op3, \ 1396 .opc4 = op4, \ 1397 .handler = { \ 1398 .inval1 = invl, \ 1399 .type = _typ, \ 1400 .type2 = _typ2, \ 1401 .handler = &gen_##name, \ 1402 .oname = stringify(name), \ 1403 }, \ 1404 .oname = stringify(name), \ 1405 } 1406 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 1407 { \ 1408 .opc1 = op1, \ 1409 .opc2 = op2, \ 1410 .opc3 = op3, \ 1411 .opc4 = op4, \ 1412 .handler = { \ 1413 .inval1 = invl, \ 1414 .type = _typ, \ 1415 .type2 = _typ2, \ 1416 .handler = &gen_##name, \ 1417 .oname = onam, \ 1418 }, \ 1419 .oname = onam, \ 1420 } 1421 #else 1422 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 1423 { \ 1424 .opc1 = op1, \ 1425 .opc2 = op2, \ 1426 .opc3 = op3, \ 1427 .opc4 = 0xff, \ 1428 .handler = { \ 1429 .inval1 = invl, \ 1430 .type = _typ, \ 1431 .type2 = _typ2, \ 1432 .handler = &gen_##name, \ 1433 }, \ 1434 .oname = stringify(name), \ 1435 } 1436 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 1437 { \ 1438 .opc1 = op1, \ 1439 .opc2 = op2, \ 1440 .opc3 = op3, \ 1441 .opc4 = 0xff, \ 1442 .handler = { \ 1443 .inval1 = invl1, \ 1444 .inval2 = invl2, \ 1445 .type = _typ, \ 1446 .type2 = _typ2, \ 1447 .handler = &gen_##name, \ 1448 }, \ 1449 .oname = stringify(name), \ 1450 } 1451 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 1452 { \ 1453 .opc1 = op1, \ 1454 .opc2 = op2, \ 1455 .opc3 = op3, \ 1456 .opc4 = 0xff, \ 1457 .handler = { \ 1458 .inval1 = invl, \ 1459 .type = _typ, \ 1460 .type2 = _typ2, \ 1461 .handler = &gen_##name, \ 1462 }, \ 1463 .oname = onam, \ 1464 } 1465 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 1466 { \ 1467 .opc1 = op1, \ 1468 .opc2 = op2, \ 1469 .opc3 = op3, \ 1470 .opc4 = op4, \ 1471 .handler = { \ 1472 .inval1 = invl, \ 1473 .type = _typ, \ 1474 .type2 = _typ2, \ 1475 .handler = &gen_##name, \ 1476 }, \ 1477 .oname = stringify(name), \ 1478 } 1479 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 1480 { \ 1481 .opc1 = op1, \ 1482 .opc2 = op2, \ 1483 .opc3 = op3, \ 1484 .opc4 = op4, \ 1485 .handler = { \ 1486 .inval1 = invl, \ 1487 .type = _typ, \ 1488 .type2 = _typ2, \ 1489 .handler = &gen_##name, \ 1490 }, \ 1491 .oname = onam, \ 1492 } 1493 #endif 1494 1495 /* Invalid instruction */ 1496 static void gen_invalid(DisasContext *ctx) 1497 { 1498 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 1499 } 1500 1501 static opc_handler_t invalid_handler = { 1502 .inval1 = 0xFFFFFFFF, 1503 .inval2 = 0xFFFFFFFF, 1504 .type = PPC_NONE, 1505 .type2 = PPC_NONE, 1506 .handler = gen_invalid, 1507 }; 1508 1509 /*** Integer comparison ***/ 1510 1511 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 1512 { 1513 TCGv t0 = tcg_temp_new(); 1514 TCGv t1 = tcg_temp_new(); 1515 TCGv_i32 t = tcg_temp_new_i32(); 1516 1517 tcg_gen_movi_tl(t0, CRF_EQ); 1518 tcg_gen_movi_tl(t1, CRF_LT); 1519 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), 1520 t0, arg0, arg1, t1, t0); 1521 tcg_gen_movi_tl(t1, CRF_GT); 1522 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), 1523 t0, arg0, arg1, t1, t0); 1524 1525 tcg_gen_trunc_tl_i32(t, t0); 1526 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 1527 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 1528 1529 tcg_temp_free(t0); 1530 tcg_temp_free(t1); 1531 tcg_temp_free_i32(t); 1532 } 1533 1534 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 1535 { 1536 TCGv t0 = tcg_const_tl(arg1); 1537 gen_op_cmp(arg0, t0, s, crf); 1538 tcg_temp_free(t0); 1539 } 1540 1541 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 1542 { 1543 TCGv t0, t1; 1544 t0 = tcg_temp_new(); 1545 t1 = tcg_temp_new(); 1546 if (s) { 1547 tcg_gen_ext32s_tl(t0, arg0); 1548 tcg_gen_ext32s_tl(t1, arg1); 1549 } else { 1550 tcg_gen_ext32u_tl(t0, arg0); 1551 tcg_gen_ext32u_tl(t1, arg1); 1552 } 1553 gen_op_cmp(t0, t1, s, crf); 1554 tcg_temp_free(t1); 1555 tcg_temp_free(t0); 1556 } 1557 1558 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 1559 { 1560 TCGv t0 = tcg_const_tl(arg1); 1561 gen_op_cmp32(arg0, t0, s, crf); 1562 tcg_temp_free(t0); 1563 } 1564 1565 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 1566 { 1567 if (NARROW_MODE(ctx)) { 1568 gen_op_cmpi32(reg, 0, 1, 0); 1569 } else { 1570 gen_op_cmpi(reg, 0, 1, 0); 1571 } 1572 } 1573 1574 /* cmp */ 1575 static void gen_cmp(DisasContext *ctx) 1576 { 1577 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 1578 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 1579 1, crfD(ctx->opcode)); 1580 } else { 1581 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 1582 1, crfD(ctx->opcode)); 1583 } 1584 } 1585 1586 /* cmpi */ 1587 static void gen_cmpi(DisasContext *ctx) 1588 { 1589 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 1590 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 1591 1, crfD(ctx->opcode)); 1592 } else { 1593 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 1594 1, crfD(ctx->opcode)); 1595 } 1596 } 1597 1598 /* cmpl */ 1599 static void gen_cmpl(DisasContext *ctx) 1600 { 1601 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 1602 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 1603 0, crfD(ctx->opcode)); 1604 } else { 1605 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 1606 0, crfD(ctx->opcode)); 1607 } 1608 } 1609 1610 /* cmpli */ 1611 static void gen_cmpli(DisasContext *ctx) 1612 { 1613 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 1614 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 1615 0, crfD(ctx->opcode)); 1616 } else { 1617 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 1618 0, crfD(ctx->opcode)); 1619 } 1620 } 1621 1622 /* cmprb - range comparison: isupper, isaplha, islower*/ 1623 static void gen_cmprb(DisasContext *ctx) 1624 { 1625 TCGv_i32 src1 = tcg_temp_new_i32(); 1626 TCGv_i32 src2 = tcg_temp_new_i32(); 1627 TCGv_i32 src2lo = tcg_temp_new_i32(); 1628 TCGv_i32 src2hi = tcg_temp_new_i32(); 1629 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 1630 1631 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 1632 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 1633 1634 tcg_gen_andi_i32(src1, src1, 0xFF); 1635 tcg_gen_ext8u_i32(src2lo, src2); 1636 tcg_gen_shri_i32(src2, src2, 8); 1637 tcg_gen_ext8u_i32(src2hi, src2); 1638 1639 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1640 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1641 tcg_gen_and_i32(crf, src2lo, src2hi); 1642 1643 if (ctx->opcode & 0x00200000) { 1644 tcg_gen_shri_i32(src2, src2, 8); 1645 tcg_gen_ext8u_i32(src2lo, src2); 1646 tcg_gen_shri_i32(src2, src2, 8); 1647 tcg_gen_ext8u_i32(src2hi, src2); 1648 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1649 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1650 tcg_gen_and_i32(src2lo, src2lo, src2hi); 1651 tcg_gen_or_i32(crf, crf, src2lo); 1652 } 1653 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 1654 tcg_temp_free_i32(src1); 1655 tcg_temp_free_i32(src2); 1656 tcg_temp_free_i32(src2lo); 1657 tcg_temp_free_i32(src2hi); 1658 } 1659 1660 #if defined(TARGET_PPC64) 1661 /* cmpeqb */ 1662 static void gen_cmpeqb(DisasContext *ctx) 1663 { 1664 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1665 cpu_gpr[rB(ctx->opcode)]); 1666 } 1667 #endif 1668 1669 /* isel (PowerPC 2.03 specification) */ 1670 static void gen_isel(DisasContext *ctx) 1671 { 1672 uint32_t bi = rC(ctx->opcode); 1673 uint32_t mask = 0x08 >> (bi & 0x03); 1674 TCGv t0 = tcg_temp_new(); 1675 TCGv zr; 1676 1677 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 1678 tcg_gen_andi_tl(t0, t0, mask); 1679 1680 zr = tcg_const_tl(0); 1681 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 1682 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 1683 cpu_gpr[rB(ctx->opcode)]); 1684 tcg_temp_free(zr); 1685 tcg_temp_free(t0); 1686 } 1687 1688 /* cmpb: PowerPC 2.05 specification */ 1689 static void gen_cmpb(DisasContext *ctx) 1690 { 1691 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 1692 cpu_gpr[rB(ctx->opcode)]); 1693 } 1694 1695 /*** Integer arithmetic ***/ 1696 1697 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 1698 TCGv arg1, TCGv arg2, int sub) 1699 { 1700 TCGv t0 = tcg_temp_new(); 1701 1702 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 1703 tcg_gen_xor_tl(t0, arg1, arg2); 1704 if (sub) { 1705 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 1706 } else { 1707 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 1708 } 1709 tcg_temp_free(t0); 1710 if (NARROW_MODE(ctx)) { 1711 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 1712 if (is_isa300(ctx)) { 1713 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1714 } 1715 } else { 1716 if (is_isa300(ctx)) { 1717 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 1718 } 1719 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 1720 } 1721 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1722 } 1723 1724 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 1725 TCGv res, TCGv arg0, TCGv arg1, 1726 TCGv ca32, int sub) 1727 { 1728 TCGv t0; 1729 1730 if (!is_isa300(ctx)) { 1731 return; 1732 } 1733 1734 t0 = tcg_temp_new(); 1735 if (sub) { 1736 tcg_gen_eqv_tl(t0, arg0, arg1); 1737 } else { 1738 tcg_gen_xor_tl(t0, arg0, arg1); 1739 } 1740 tcg_gen_xor_tl(t0, t0, res); 1741 tcg_gen_extract_tl(ca32, t0, 32, 1); 1742 tcg_temp_free(t0); 1743 } 1744 1745 /* Common add function */ 1746 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 1747 TCGv arg2, TCGv ca, TCGv ca32, 1748 bool add_ca, bool compute_ca, 1749 bool compute_ov, bool compute_rc0) 1750 { 1751 TCGv t0 = ret; 1752 1753 if (compute_ca || compute_ov) { 1754 t0 = tcg_temp_new(); 1755 } 1756 1757 if (compute_ca) { 1758 if (NARROW_MODE(ctx)) { 1759 /* 1760 * Caution: a non-obvious corner case of the spec is that 1761 * we must produce the *entire* 64-bit addition, but 1762 * produce the carry into bit 32. 1763 */ 1764 TCGv t1 = tcg_temp_new(); 1765 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 1766 tcg_gen_add_tl(t0, arg1, arg2); 1767 if (add_ca) { 1768 tcg_gen_add_tl(t0, t0, ca); 1769 } 1770 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 1771 tcg_temp_free(t1); 1772 tcg_gen_extract_tl(ca, ca, 32, 1); 1773 if (is_isa300(ctx)) { 1774 tcg_gen_mov_tl(ca32, ca); 1775 } 1776 } else { 1777 TCGv zero = tcg_const_tl(0); 1778 if (add_ca) { 1779 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 1780 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 1781 } else { 1782 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 1783 } 1784 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 1785 tcg_temp_free(zero); 1786 } 1787 } else { 1788 tcg_gen_add_tl(t0, arg1, arg2); 1789 if (add_ca) { 1790 tcg_gen_add_tl(t0, t0, ca); 1791 } 1792 } 1793 1794 if (compute_ov) { 1795 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 1796 } 1797 if (unlikely(compute_rc0)) { 1798 gen_set_Rc0(ctx, t0); 1799 } 1800 1801 if (t0 != ret) { 1802 tcg_gen_mov_tl(ret, t0); 1803 tcg_temp_free(t0); 1804 } 1805 } 1806 /* Add functions with two operands */ 1807 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 1808 static void glue(gen_, name)(DisasContext *ctx) \ 1809 { \ 1810 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1811 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1812 ca, glue(ca, 32), \ 1813 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1814 } 1815 /* Add functions with one operand and one immediate */ 1816 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 1817 add_ca, compute_ca, compute_ov) \ 1818 static void glue(gen_, name)(DisasContext *ctx) \ 1819 { \ 1820 TCGv t0 = tcg_const_tl(const_val); \ 1821 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1822 cpu_gpr[rA(ctx->opcode)], t0, \ 1823 ca, glue(ca, 32), \ 1824 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1825 tcg_temp_free(t0); \ 1826 } 1827 1828 /* add add. addo addo. */ 1829 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 1830 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 1831 /* addc addc. addco addco. */ 1832 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 1833 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 1834 /* adde adde. addeo addeo. */ 1835 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 1836 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 1837 /* addme addme. addmeo addmeo. */ 1838 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 1839 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 1840 /* addex */ 1841 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 1842 /* addze addze. addzeo addzeo.*/ 1843 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 1844 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 1845 /* addi */ 1846 static void gen_addi(DisasContext *ctx) 1847 { 1848 target_long simm = SIMM(ctx->opcode); 1849 1850 if (rA(ctx->opcode) == 0) { 1851 /* li case */ 1852 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 1853 } else { 1854 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 1855 cpu_gpr[rA(ctx->opcode)], simm); 1856 } 1857 } 1858 /* addic addic.*/ 1859 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 1860 { 1861 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1862 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1863 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 1864 tcg_temp_free(c); 1865 } 1866 1867 static void gen_addic(DisasContext *ctx) 1868 { 1869 gen_op_addic(ctx, 0); 1870 } 1871 1872 static void gen_addic_(DisasContext *ctx) 1873 { 1874 gen_op_addic(ctx, 1); 1875 } 1876 1877 /* addis */ 1878 static void gen_addis(DisasContext *ctx) 1879 { 1880 target_long simm = SIMM(ctx->opcode); 1881 1882 if (rA(ctx->opcode) == 0) { 1883 /* lis case */ 1884 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 1885 } else { 1886 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 1887 cpu_gpr[rA(ctx->opcode)], simm << 16); 1888 } 1889 } 1890 1891 /* addpcis */ 1892 static void gen_addpcis(DisasContext *ctx) 1893 { 1894 target_long d = DX(ctx->opcode); 1895 1896 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16)); 1897 } 1898 1899 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1900 TCGv arg2, int sign, int compute_ov) 1901 { 1902 TCGv_i32 t0 = tcg_temp_new_i32(); 1903 TCGv_i32 t1 = tcg_temp_new_i32(); 1904 TCGv_i32 t2 = tcg_temp_new_i32(); 1905 TCGv_i32 t3 = tcg_temp_new_i32(); 1906 1907 tcg_gen_trunc_tl_i32(t0, arg1); 1908 tcg_gen_trunc_tl_i32(t1, arg2); 1909 if (sign) { 1910 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1911 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1912 tcg_gen_and_i32(t2, t2, t3); 1913 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1914 tcg_gen_or_i32(t2, t2, t3); 1915 tcg_gen_movi_i32(t3, 0); 1916 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1917 tcg_gen_div_i32(t3, t0, t1); 1918 tcg_gen_extu_i32_tl(ret, t3); 1919 } else { 1920 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1921 tcg_gen_movi_i32(t3, 0); 1922 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1923 tcg_gen_divu_i32(t3, t0, t1); 1924 tcg_gen_extu_i32_tl(ret, t3); 1925 } 1926 if (compute_ov) { 1927 tcg_gen_extu_i32_tl(cpu_ov, t2); 1928 if (is_isa300(ctx)) { 1929 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1930 } 1931 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1932 } 1933 tcg_temp_free_i32(t0); 1934 tcg_temp_free_i32(t1); 1935 tcg_temp_free_i32(t2); 1936 tcg_temp_free_i32(t3); 1937 1938 if (unlikely(Rc(ctx->opcode) != 0)) { 1939 gen_set_Rc0(ctx, ret); 1940 } 1941 } 1942 /* Div functions */ 1943 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1944 static void glue(gen_, name)(DisasContext *ctx) \ 1945 { \ 1946 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1947 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1948 sign, compute_ov); \ 1949 } 1950 /* divwu divwu. divwuo divwuo. */ 1951 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1952 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1953 /* divw divw. divwo divwo. */ 1954 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1955 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1956 1957 /* div[wd]eu[o][.] */ 1958 #define GEN_DIVE(name, hlpr, compute_ov) \ 1959 static void gen_##name(DisasContext *ctx) \ 1960 { \ 1961 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1962 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1963 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1964 tcg_temp_free_i32(t0); \ 1965 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1966 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1967 } \ 1968 } 1969 1970 GEN_DIVE(divweu, divweu, 0); 1971 GEN_DIVE(divweuo, divweu, 1); 1972 GEN_DIVE(divwe, divwe, 0); 1973 GEN_DIVE(divweo, divwe, 1); 1974 1975 #if defined(TARGET_PPC64) 1976 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1977 TCGv arg2, int sign, int compute_ov) 1978 { 1979 TCGv_i64 t0 = tcg_temp_new_i64(); 1980 TCGv_i64 t1 = tcg_temp_new_i64(); 1981 TCGv_i64 t2 = tcg_temp_new_i64(); 1982 TCGv_i64 t3 = tcg_temp_new_i64(); 1983 1984 tcg_gen_mov_i64(t0, arg1); 1985 tcg_gen_mov_i64(t1, arg2); 1986 if (sign) { 1987 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1988 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1989 tcg_gen_and_i64(t2, t2, t3); 1990 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1991 tcg_gen_or_i64(t2, t2, t3); 1992 tcg_gen_movi_i64(t3, 0); 1993 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1994 tcg_gen_div_i64(ret, t0, t1); 1995 } else { 1996 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1997 tcg_gen_movi_i64(t3, 0); 1998 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1999 tcg_gen_divu_i64(ret, t0, t1); 2000 } 2001 if (compute_ov) { 2002 tcg_gen_mov_tl(cpu_ov, t2); 2003 if (is_isa300(ctx)) { 2004 tcg_gen_mov_tl(cpu_ov32, t2); 2005 } 2006 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2007 } 2008 tcg_temp_free_i64(t0); 2009 tcg_temp_free_i64(t1); 2010 tcg_temp_free_i64(t2); 2011 tcg_temp_free_i64(t3); 2012 2013 if (unlikely(Rc(ctx->opcode) != 0)) { 2014 gen_set_Rc0(ctx, ret); 2015 } 2016 } 2017 2018 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 2019 static void glue(gen_, name)(DisasContext *ctx) \ 2020 { \ 2021 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 2022 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 2023 sign, compute_ov); \ 2024 } 2025 /* divdu divdu. divduo divduo. */ 2026 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 2027 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 2028 /* divd divd. divdo divdo. */ 2029 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 2030 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 2031 2032 GEN_DIVE(divdeu, divdeu, 0); 2033 GEN_DIVE(divdeuo, divdeu, 1); 2034 GEN_DIVE(divde, divde, 0); 2035 GEN_DIVE(divdeo, divde, 1); 2036 #endif 2037 2038 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 2039 TCGv arg2, int sign) 2040 { 2041 TCGv_i32 t0 = tcg_temp_new_i32(); 2042 TCGv_i32 t1 = tcg_temp_new_i32(); 2043 2044 tcg_gen_trunc_tl_i32(t0, arg1); 2045 tcg_gen_trunc_tl_i32(t1, arg2); 2046 if (sign) { 2047 TCGv_i32 t2 = tcg_temp_new_i32(); 2048 TCGv_i32 t3 = tcg_temp_new_i32(); 2049 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 2050 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 2051 tcg_gen_and_i32(t2, t2, t3); 2052 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 2053 tcg_gen_or_i32(t2, t2, t3); 2054 tcg_gen_movi_i32(t3, 0); 2055 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 2056 tcg_gen_rem_i32(t3, t0, t1); 2057 tcg_gen_ext_i32_tl(ret, t3); 2058 tcg_temp_free_i32(t2); 2059 tcg_temp_free_i32(t3); 2060 } else { 2061 TCGv_i32 t2 = tcg_const_i32(1); 2062 TCGv_i32 t3 = tcg_const_i32(0); 2063 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 2064 tcg_gen_remu_i32(t3, t0, t1); 2065 tcg_gen_extu_i32_tl(ret, t3); 2066 tcg_temp_free_i32(t2); 2067 tcg_temp_free_i32(t3); 2068 } 2069 tcg_temp_free_i32(t0); 2070 tcg_temp_free_i32(t1); 2071 } 2072 2073 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 2074 static void glue(gen_, name)(DisasContext *ctx) \ 2075 { \ 2076 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 2077 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 2078 sign); \ 2079 } 2080 2081 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 2082 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 2083 2084 #if defined(TARGET_PPC64) 2085 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 2086 TCGv arg2, int sign) 2087 { 2088 TCGv_i64 t0 = tcg_temp_new_i64(); 2089 TCGv_i64 t1 = tcg_temp_new_i64(); 2090 2091 tcg_gen_mov_i64(t0, arg1); 2092 tcg_gen_mov_i64(t1, arg2); 2093 if (sign) { 2094 TCGv_i64 t2 = tcg_temp_new_i64(); 2095 TCGv_i64 t3 = tcg_temp_new_i64(); 2096 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 2097 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 2098 tcg_gen_and_i64(t2, t2, t3); 2099 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 2100 tcg_gen_or_i64(t2, t2, t3); 2101 tcg_gen_movi_i64(t3, 0); 2102 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 2103 tcg_gen_rem_i64(ret, t0, t1); 2104 tcg_temp_free_i64(t2); 2105 tcg_temp_free_i64(t3); 2106 } else { 2107 TCGv_i64 t2 = tcg_const_i64(1); 2108 TCGv_i64 t3 = tcg_const_i64(0); 2109 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 2110 tcg_gen_remu_i64(ret, t0, t1); 2111 tcg_temp_free_i64(t2); 2112 tcg_temp_free_i64(t3); 2113 } 2114 tcg_temp_free_i64(t0); 2115 tcg_temp_free_i64(t1); 2116 } 2117 2118 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 2119 static void glue(gen_, name)(DisasContext *ctx) \ 2120 { \ 2121 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 2122 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 2123 sign); \ 2124 } 2125 2126 GEN_INT_ARITH_MODD(modud, 0x08, 0); 2127 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 2128 #endif 2129 2130 /* mulhw mulhw. */ 2131 static void gen_mulhw(DisasContext *ctx) 2132 { 2133 TCGv_i32 t0 = tcg_temp_new_i32(); 2134 TCGv_i32 t1 = tcg_temp_new_i32(); 2135 2136 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2137 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2138 tcg_gen_muls2_i32(t0, t1, t0, t1); 2139 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 2140 tcg_temp_free_i32(t0); 2141 tcg_temp_free_i32(t1); 2142 if (unlikely(Rc(ctx->opcode) != 0)) { 2143 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2144 } 2145 } 2146 2147 /* mulhwu mulhwu. */ 2148 static void gen_mulhwu(DisasContext *ctx) 2149 { 2150 TCGv_i32 t0 = tcg_temp_new_i32(); 2151 TCGv_i32 t1 = tcg_temp_new_i32(); 2152 2153 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2154 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2155 tcg_gen_mulu2_i32(t0, t1, t0, t1); 2156 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 2157 tcg_temp_free_i32(t0); 2158 tcg_temp_free_i32(t1); 2159 if (unlikely(Rc(ctx->opcode) != 0)) { 2160 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2161 } 2162 } 2163 2164 /* mullw mullw. */ 2165 static void gen_mullw(DisasContext *ctx) 2166 { 2167 #if defined(TARGET_PPC64) 2168 TCGv_i64 t0, t1; 2169 t0 = tcg_temp_new_i64(); 2170 t1 = tcg_temp_new_i64(); 2171 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 2172 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 2173 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2174 tcg_temp_free(t0); 2175 tcg_temp_free(t1); 2176 #else 2177 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2178 cpu_gpr[rB(ctx->opcode)]); 2179 #endif 2180 if (unlikely(Rc(ctx->opcode) != 0)) { 2181 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2182 } 2183 } 2184 2185 /* mullwo mullwo. */ 2186 static void gen_mullwo(DisasContext *ctx) 2187 { 2188 TCGv_i32 t0 = tcg_temp_new_i32(); 2189 TCGv_i32 t1 = tcg_temp_new_i32(); 2190 2191 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2192 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2193 tcg_gen_muls2_i32(t0, t1, t0, t1); 2194 #if defined(TARGET_PPC64) 2195 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2196 #else 2197 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 2198 #endif 2199 2200 tcg_gen_sari_i32(t0, t0, 31); 2201 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 2202 tcg_gen_extu_i32_tl(cpu_ov, t0); 2203 if (is_isa300(ctx)) { 2204 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2205 } 2206 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2207 2208 tcg_temp_free_i32(t0); 2209 tcg_temp_free_i32(t1); 2210 if (unlikely(Rc(ctx->opcode) != 0)) { 2211 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2212 } 2213 } 2214 2215 /* mulli */ 2216 static void gen_mulli(DisasContext *ctx) 2217 { 2218 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2219 SIMM(ctx->opcode)); 2220 } 2221 2222 #if defined(TARGET_PPC64) 2223 /* mulhd mulhd. */ 2224 static void gen_mulhd(DisasContext *ctx) 2225 { 2226 TCGv lo = tcg_temp_new(); 2227 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2228 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2229 tcg_temp_free(lo); 2230 if (unlikely(Rc(ctx->opcode) != 0)) { 2231 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2232 } 2233 } 2234 2235 /* mulhdu mulhdu. */ 2236 static void gen_mulhdu(DisasContext *ctx) 2237 { 2238 TCGv lo = tcg_temp_new(); 2239 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2240 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2241 tcg_temp_free(lo); 2242 if (unlikely(Rc(ctx->opcode) != 0)) { 2243 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2244 } 2245 } 2246 2247 /* mulld mulld. */ 2248 static void gen_mulld(DisasContext *ctx) 2249 { 2250 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2251 cpu_gpr[rB(ctx->opcode)]); 2252 if (unlikely(Rc(ctx->opcode) != 0)) { 2253 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2254 } 2255 } 2256 2257 /* mulldo mulldo. */ 2258 static void gen_mulldo(DisasContext *ctx) 2259 { 2260 TCGv_i64 t0 = tcg_temp_new_i64(); 2261 TCGv_i64 t1 = tcg_temp_new_i64(); 2262 2263 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 2264 cpu_gpr[rB(ctx->opcode)]); 2265 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 2266 2267 tcg_gen_sari_i64(t0, t0, 63); 2268 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 2269 if (is_isa300(ctx)) { 2270 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2271 } 2272 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2273 2274 tcg_temp_free_i64(t0); 2275 tcg_temp_free_i64(t1); 2276 2277 if (unlikely(Rc(ctx->opcode) != 0)) { 2278 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2279 } 2280 } 2281 #endif 2282 2283 /* Common subf function */ 2284 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 2285 TCGv arg2, bool add_ca, bool compute_ca, 2286 bool compute_ov, bool compute_rc0) 2287 { 2288 TCGv t0 = ret; 2289 2290 if (compute_ca || compute_ov) { 2291 t0 = tcg_temp_new(); 2292 } 2293 2294 if (compute_ca) { 2295 /* dest = ~arg1 + arg2 [+ ca]. */ 2296 if (NARROW_MODE(ctx)) { 2297 /* 2298 * Caution: a non-obvious corner case of the spec is that 2299 * we must produce the *entire* 64-bit addition, but 2300 * produce the carry into bit 32. 2301 */ 2302 TCGv inv1 = tcg_temp_new(); 2303 TCGv t1 = tcg_temp_new(); 2304 tcg_gen_not_tl(inv1, arg1); 2305 if (add_ca) { 2306 tcg_gen_add_tl(t0, arg2, cpu_ca); 2307 } else { 2308 tcg_gen_addi_tl(t0, arg2, 1); 2309 } 2310 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 2311 tcg_gen_add_tl(t0, t0, inv1); 2312 tcg_temp_free(inv1); 2313 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 2314 tcg_temp_free(t1); 2315 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 2316 if (is_isa300(ctx)) { 2317 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2318 } 2319 } else if (add_ca) { 2320 TCGv zero, inv1 = tcg_temp_new(); 2321 tcg_gen_not_tl(inv1, arg1); 2322 zero = tcg_const_tl(0); 2323 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 2324 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 2325 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 2326 tcg_temp_free(zero); 2327 tcg_temp_free(inv1); 2328 } else { 2329 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 2330 tcg_gen_sub_tl(t0, arg2, arg1); 2331 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 2332 } 2333 } else if (add_ca) { 2334 /* 2335 * Since we're ignoring carry-out, we can simplify the 2336 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. 2337 */ 2338 tcg_gen_sub_tl(t0, arg2, arg1); 2339 tcg_gen_add_tl(t0, t0, cpu_ca); 2340 tcg_gen_subi_tl(t0, t0, 1); 2341 } else { 2342 tcg_gen_sub_tl(t0, arg2, arg1); 2343 } 2344 2345 if (compute_ov) { 2346 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 2347 } 2348 if (unlikely(compute_rc0)) { 2349 gen_set_Rc0(ctx, t0); 2350 } 2351 2352 if (t0 != ret) { 2353 tcg_gen_mov_tl(ret, t0); 2354 tcg_temp_free(t0); 2355 } 2356 } 2357 /* Sub functions with Two operands functions */ 2358 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 2359 static void glue(gen_, name)(DisasContext *ctx) \ 2360 { \ 2361 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2362 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 2363 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2364 } 2365 /* Sub functions with one operand and one immediate */ 2366 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 2367 add_ca, compute_ca, compute_ov) \ 2368 static void glue(gen_, name)(DisasContext *ctx) \ 2369 { \ 2370 TCGv t0 = tcg_const_tl(const_val); \ 2371 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2372 cpu_gpr[rA(ctx->opcode)], t0, \ 2373 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2374 tcg_temp_free(t0); \ 2375 } 2376 /* subf subf. subfo subfo. */ 2377 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 2378 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 2379 /* subfc subfc. subfco subfco. */ 2380 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 2381 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 2382 /* subfe subfe. subfeo subfo. */ 2383 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 2384 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 2385 /* subfme subfme. subfmeo subfmeo. */ 2386 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 2387 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 2388 /* subfze subfze. subfzeo subfzeo.*/ 2389 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 2390 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 2391 2392 /* subfic */ 2393 static void gen_subfic(DisasContext *ctx) 2394 { 2395 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 2396 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2397 c, 0, 1, 0, 0); 2398 tcg_temp_free(c); 2399 } 2400 2401 /* neg neg. nego nego. */ 2402 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 2403 { 2404 TCGv zero = tcg_const_tl(0); 2405 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2406 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 2407 tcg_temp_free(zero); 2408 } 2409 2410 static void gen_neg(DisasContext *ctx) 2411 { 2412 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2413 if (unlikely(Rc(ctx->opcode))) { 2414 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2415 } 2416 } 2417 2418 static void gen_nego(DisasContext *ctx) 2419 { 2420 gen_op_arith_neg(ctx, 1); 2421 } 2422 2423 /*** Integer logical ***/ 2424 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 2425 static void glue(gen_, name)(DisasContext *ctx) \ 2426 { \ 2427 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 2428 cpu_gpr[rB(ctx->opcode)]); \ 2429 if (unlikely(Rc(ctx->opcode) != 0)) \ 2430 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2431 } 2432 2433 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 2434 static void glue(gen_, name)(DisasContext *ctx) \ 2435 { \ 2436 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 2437 if (unlikely(Rc(ctx->opcode) != 0)) \ 2438 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2439 } 2440 2441 /* and & and. */ 2442 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 2443 /* andc & andc. */ 2444 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 2445 2446 /* andi. */ 2447 static void gen_andi_(DisasContext *ctx) 2448 { 2449 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2450 UIMM(ctx->opcode)); 2451 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2452 } 2453 2454 /* andis. */ 2455 static void gen_andis_(DisasContext *ctx) 2456 { 2457 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2458 UIMM(ctx->opcode) << 16); 2459 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2460 } 2461 2462 /* cntlzw */ 2463 static void gen_cntlzw(DisasContext *ctx) 2464 { 2465 TCGv_i32 t = tcg_temp_new_i32(); 2466 2467 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2468 tcg_gen_clzi_i32(t, t, 32); 2469 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2470 tcg_temp_free_i32(t); 2471 2472 if (unlikely(Rc(ctx->opcode) != 0)) { 2473 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2474 } 2475 } 2476 2477 /* cnttzw */ 2478 static void gen_cnttzw(DisasContext *ctx) 2479 { 2480 TCGv_i32 t = tcg_temp_new_i32(); 2481 2482 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2483 tcg_gen_ctzi_i32(t, t, 32); 2484 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2485 tcg_temp_free_i32(t); 2486 2487 if (unlikely(Rc(ctx->opcode) != 0)) { 2488 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2489 } 2490 } 2491 2492 /* eqv & eqv. */ 2493 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 2494 /* extsb & extsb. */ 2495 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 2496 /* extsh & extsh. */ 2497 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 2498 /* nand & nand. */ 2499 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 2500 /* nor & nor. */ 2501 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 2502 2503 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 2504 static void gen_pause(DisasContext *ctx) 2505 { 2506 TCGv_i32 t0 = tcg_const_i32(0); 2507 tcg_gen_st_i32(t0, cpu_env, 2508 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 2509 tcg_temp_free_i32(t0); 2510 2511 /* Stop translation, this gives other CPUs a chance to run */ 2512 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 2513 } 2514 #endif /* defined(TARGET_PPC64) */ 2515 2516 /* or & or. */ 2517 static void gen_or(DisasContext *ctx) 2518 { 2519 int rs, ra, rb; 2520 2521 rs = rS(ctx->opcode); 2522 ra = rA(ctx->opcode); 2523 rb = rB(ctx->opcode); 2524 /* Optimisation for mr. ri case */ 2525 if (rs != ra || rs != rb) { 2526 if (rs != rb) { 2527 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 2528 } else { 2529 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 2530 } 2531 if (unlikely(Rc(ctx->opcode) != 0)) { 2532 gen_set_Rc0(ctx, cpu_gpr[ra]); 2533 } 2534 } else if (unlikely(Rc(ctx->opcode) != 0)) { 2535 gen_set_Rc0(ctx, cpu_gpr[rs]); 2536 #if defined(TARGET_PPC64) 2537 } else if (rs != 0) { /* 0 is nop */ 2538 int prio = 0; 2539 2540 switch (rs) { 2541 case 1: 2542 /* Set process priority to low */ 2543 prio = 2; 2544 break; 2545 case 6: 2546 /* Set process priority to medium-low */ 2547 prio = 3; 2548 break; 2549 case 2: 2550 /* Set process priority to normal */ 2551 prio = 4; 2552 break; 2553 #if !defined(CONFIG_USER_ONLY) 2554 case 31: 2555 if (!ctx->pr) { 2556 /* Set process priority to very low */ 2557 prio = 1; 2558 } 2559 break; 2560 case 5: 2561 if (!ctx->pr) { 2562 /* Set process priority to medium-hight */ 2563 prio = 5; 2564 } 2565 break; 2566 case 3: 2567 if (!ctx->pr) { 2568 /* Set process priority to high */ 2569 prio = 6; 2570 } 2571 break; 2572 case 7: 2573 if (ctx->hv && !ctx->pr) { 2574 /* Set process priority to very high */ 2575 prio = 7; 2576 } 2577 break; 2578 #endif 2579 default: 2580 break; 2581 } 2582 if (prio) { 2583 TCGv t0 = tcg_temp_new(); 2584 gen_load_spr(t0, SPR_PPR); 2585 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 2586 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 2587 gen_store_spr(SPR_PPR, t0); 2588 tcg_temp_free(t0); 2589 } 2590 #if !defined(CONFIG_USER_ONLY) 2591 /* 2592 * Pause out of TCG otherwise spin loops with smt_low eat too 2593 * much CPU and the kernel hangs. This applies to all 2594 * encodings other than no-op, e.g., miso(rs=26), yield(27), 2595 * mdoio(29), mdoom(30), and all currently undefined. 2596 */ 2597 gen_pause(ctx); 2598 #endif 2599 #endif 2600 } 2601 } 2602 /* orc & orc. */ 2603 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 2604 2605 /* xor & xor. */ 2606 static void gen_xor(DisasContext *ctx) 2607 { 2608 /* Optimisation for "set to zero" case */ 2609 if (rS(ctx->opcode) != rB(ctx->opcode)) { 2610 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2611 cpu_gpr[rB(ctx->opcode)]); 2612 } else { 2613 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 2614 } 2615 if (unlikely(Rc(ctx->opcode) != 0)) { 2616 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2617 } 2618 } 2619 2620 /* ori */ 2621 static void gen_ori(DisasContext *ctx) 2622 { 2623 target_ulong uimm = UIMM(ctx->opcode); 2624 2625 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2626 return; 2627 } 2628 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2629 } 2630 2631 /* oris */ 2632 static void gen_oris(DisasContext *ctx) 2633 { 2634 target_ulong uimm = UIMM(ctx->opcode); 2635 2636 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2637 /* NOP */ 2638 return; 2639 } 2640 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2641 uimm << 16); 2642 } 2643 2644 /* xori */ 2645 static void gen_xori(DisasContext *ctx) 2646 { 2647 target_ulong uimm = UIMM(ctx->opcode); 2648 2649 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2650 /* NOP */ 2651 return; 2652 } 2653 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2654 } 2655 2656 /* xoris */ 2657 static void gen_xoris(DisasContext *ctx) 2658 { 2659 target_ulong uimm = UIMM(ctx->opcode); 2660 2661 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2662 /* NOP */ 2663 return; 2664 } 2665 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2666 uimm << 16); 2667 } 2668 2669 /* popcntb : PowerPC 2.03 specification */ 2670 static void gen_popcntb(DisasContext *ctx) 2671 { 2672 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2673 } 2674 2675 static void gen_popcntw(DisasContext *ctx) 2676 { 2677 #if defined(TARGET_PPC64) 2678 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2679 #else 2680 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2681 #endif 2682 } 2683 2684 #if defined(TARGET_PPC64) 2685 /* popcntd: PowerPC 2.06 specification */ 2686 static void gen_popcntd(DisasContext *ctx) 2687 { 2688 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2689 } 2690 #endif 2691 2692 /* prtyw: PowerPC 2.05 specification */ 2693 static void gen_prtyw(DisasContext *ctx) 2694 { 2695 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2696 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2697 TCGv t0 = tcg_temp_new(); 2698 tcg_gen_shri_tl(t0, rs, 16); 2699 tcg_gen_xor_tl(ra, rs, t0); 2700 tcg_gen_shri_tl(t0, ra, 8); 2701 tcg_gen_xor_tl(ra, ra, t0); 2702 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 2703 tcg_temp_free(t0); 2704 } 2705 2706 #if defined(TARGET_PPC64) 2707 /* prtyd: PowerPC 2.05 specification */ 2708 static void gen_prtyd(DisasContext *ctx) 2709 { 2710 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2711 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2712 TCGv t0 = tcg_temp_new(); 2713 tcg_gen_shri_tl(t0, rs, 32); 2714 tcg_gen_xor_tl(ra, rs, t0); 2715 tcg_gen_shri_tl(t0, ra, 16); 2716 tcg_gen_xor_tl(ra, ra, t0); 2717 tcg_gen_shri_tl(t0, ra, 8); 2718 tcg_gen_xor_tl(ra, ra, t0); 2719 tcg_gen_andi_tl(ra, ra, 1); 2720 tcg_temp_free(t0); 2721 } 2722 #endif 2723 2724 #if defined(TARGET_PPC64) 2725 /* bpermd */ 2726 static void gen_bpermd(DisasContext *ctx) 2727 { 2728 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 2729 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2730 } 2731 #endif 2732 2733 #if defined(TARGET_PPC64) 2734 /* extsw & extsw. */ 2735 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 2736 2737 /* cntlzd */ 2738 static void gen_cntlzd(DisasContext *ctx) 2739 { 2740 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2741 if (unlikely(Rc(ctx->opcode) != 0)) { 2742 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2743 } 2744 } 2745 2746 /* cnttzd */ 2747 static void gen_cnttzd(DisasContext *ctx) 2748 { 2749 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2750 if (unlikely(Rc(ctx->opcode) != 0)) { 2751 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2752 } 2753 } 2754 2755 /* darn */ 2756 static void gen_darn(DisasContext *ctx) 2757 { 2758 int l = L(ctx->opcode); 2759 2760 if (l > 2) { 2761 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 2762 } else { 2763 gen_icount_io_start(ctx); 2764 if (l == 0) { 2765 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 2766 } else { 2767 /* Return 64-bit random for both CRN and RRN */ 2768 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 2769 } 2770 } 2771 } 2772 #endif 2773 2774 /*** Integer rotate ***/ 2775 2776 /* rlwimi & rlwimi. */ 2777 static void gen_rlwimi(DisasContext *ctx) 2778 { 2779 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2780 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2781 uint32_t sh = SH(ctx->opcode); 2782 uint32_t mb = MB(ctx->opcode); 2783 uint32_t me = ME(ctx->opcode); 2784 2785 if (sh == (31 - me) && mb <= me) { 2786 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2787 } else { 2788 target_ulong mask; 2789 bool mask_in_32b = true; 2790 TCGv t1; 2791 2792 #if defined(TARGET_PPC64) 2793 mb += 32; 2794 me += 32; 2795 #endif 2796 mask = MASK(mb, me); 2797 2798 #if defined(TARGET_PPC64) 2799 if (mask > 0xffffffffu) { 2800 mask_in_32b = false; 2801 } 2802 #endif 2803 t1 = tcg_temp_new(); 2804 if (mask_in_32b) { 2805 TCGv_i32 t0 = tcg_temp_new_i32(); 2806 tcg_gen_trunc_tl_i32(t0, t_rs); 2807 tcg_gen_rotli_i32(t0, t0, sh); 2808 tcg_gen_extu_i32_tl(t1, t0); 2809 tcg_temp_free_i32(t0); 2810 } else { 2811 #if defined(TARGET_PPC64) 2812 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 2813 tcg_gen_rotli_i64(t1, t1, sh); 2814 #else 2815 g_assert_not_reached(); 2816 #endif 2817 } 2818 2819 tcg_gen_andi_tl(t1, t1, mask); 2820 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2821 tcg_gen_or_tl(t_ra, t_ra, t1); 2822 tcg_temp_free(t1); 2823 } 2824 if (unlikely(Rc(ctx->opcode) != 0)) { 2825 gen_set_Rc0(ctx, t_ra); 2826 } 2827 } 2828 2829 /* rlwinm & rlwinm. */ 2830 static void gen_rlwinm(DisasContext *ctx) 2831 { 2832 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2833 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2834 int sh = SH(ctx->opcode); 2835 int mb = MB(ctx->opcode); 2836 int me = ME(ctx->opcode); 2837 int len = me - mb + 1; 2838 int rsh = (32 - sh) & 31; 2839 2840 if (sh != 0 && len > 0 && me == (31 - sh)) { 2841 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2842 } else if (me == 31 && rsh + len <= 32) { 2843 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2844 } else { 2845 target_ulong mask; 2846 bool mask_in_32b = true; 2847 #if defined(TARGET_PPC64) 2848 mb += 32; 2849 me += 32; 2850 #endif 2851 mask = MASK(mb, me); 2852 #if defined(TARGET_PPC64) 2853 if (mask > 0xffffffffu) { 2854 mask_in_32b = false; 2855 } 2856 #endif 2857 if (mask_in_32b) { 2858 if (sh == 0) { 2859 tcg_gen_andi_tl(t_ra, t_rs, mask); 2860 } else { 2861 TCGv_i32 t0 = tcg_temp_new_i32(); 2862 tcg_gen_trunc_tl_i32(t0, t_rs); 2863 tcg_gen_rotli_i32(t0, t0, sh); 2864 tcg_gen_andi_i32(t0, t0, mask); 2865 tcg_gen_extu_i32_tl(t_ra, t0); 2866 tcg_temp_free_i32(t0); 2867 } 2868 } else { 2869 #if defined(TARGET_PPC64) 2870 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2871 tcg_gen_rotli_i64(t_ra, t_ra, sh); 2872 tcg_gen_andi_i64(t_ra, t_ra, mask); 2873 #else 2874 g_assert_not_reached(); 2875 #endif 2876 } 2877 } 2878 if (unlikely(Rc(ctx->opcode) != 0)) { 2879 gen_set_Rc0(ctx, t_ra); 2880 } 2881 } 2882 2883 /* rlwnm & rlwnm. */ 2884 static void gen_rlwnm(DisasContext *ctx) 2885 { 2886 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2887 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2888 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2889 uint32_t mb = MB(ctx->opcode); 2890 uint32_t me = ME(ctx->opcode); 2891 target_ulong mask; 2892 bool mask_in_32b = true; 2893 2894 #if defined(TARGET_PPC64) 2895 mb += 32; 2896 me += 32; 2897 #endif 2898 mask = MASK(mb, me); 2899 2900 #if defined(TARGET_PPC64) 2901 if (mask > 0xffffffffu) { 2902 mask_in_32b = false; 2903 } 2904 #endif 2905 if (mask_in_32b) { 2906 TCGv_i32 t0 = tcg_temp_new_i32(); 2907 TCGv_i32 t1 = tcg_temp_new_i32(); 2908 tcg_gen_trunc_tl_i32(t0, t_rb); 2909 tcg_gen_trunc_tl_i32(t1, t_rs); 2910 tcg_gen_andi_i32(t0, t0, 0x1f); 2911 tcg_gen_rotl_i32(t1, t1, t0); 2912 tcg_gen_extu_i32_tl(t_ra, t1); 2913 tcg_temp_free_i32(t0); 2914 tcg_temp_free_i32(t1); 2915 } else { 2916 #if defined(TARGET_PPC64) 2917 TCGv_i64 t0 = tcg_temp_new_i64(); 2918 tcg_gen_andi_i64(t0, t_rb, 0x1f); 2919 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2920 tcg_gen_rotl_i64(t_ra, t_ra, t0); 2921 tcg_temp_free_i64(t0); 2922 #else 2923 g_assert_not_reached(); 2924 #endif 2925 } 2926 2927 tcg_gen_andi_tl(t_ra, t_ra, mask); 2928 2929 if (unlikely(Rc(ctx->opcode) != 0)) { 2930 gen_set_Rc0(ctx, t_ra); 2931 } 2932 } 2933 2934 #if defined(TARGET_PPC64) 2935 #define GEN_PPC64_R2(name, opc1, opc2) \ 2936 static void glue(gen_, name##0)(DisasContext *ctx) \ 2937 { \ 2938 gen_##name(ctx, 0); \ 2939 } \ 2940 \ 2941 static void glue(gen_, name##1)(DisasContext *ctx) \ 2942 { \ 2943 gen_##name(ctx, 1); \ 2944 } 2945 #define GEN_PPC64_R4(name, opc1, opc2) \ 2946 static void glue(gen_, name##0)(DisasContext *ctx) \ 2947 { \ 2948 gen_##name(ctx, 0, 0); \ 2949 } \ 2950 \ 2951 static void glue(gen_, name##1)(DisasContext *ctx) \ 2952 { \ 2953 gen_##name(ctx, 0, 1); \ 2954 } \ 2955 \ 2956 static void glue(gen_, name##2)(DisasContext *ctx) \ 2957 { \ 2958 gen_##name(ctx, 1, 0); \ 2959 } \ 2960 \ 2961 static void glue(gen_, name##3)(DisasContext *ctx) \ 2962 { \ 2963 gen_##name(ctx, 1, 1); \ 2964 } 2965 2966 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2967 { 2968 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2969 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2970 int len = me - mb + 1; 2971 int rsh = (64 - sh) & 63; 2972 2973 if (sh != 0 && len > 0 && me == (63 - sh)) { 2974 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2975 } else if (me == 63 && rsh + len <= 64) { 2976 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2977 } else { 2978 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2979 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2980 } 2981 if (unlikely(Rc(ctx->opcode) != 0)) { 2982 gen_set_Rc0(ctx, t_ra); 2983 } 2984 } 2985 2986 /* rldicl - rldicl. */ 2987 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2988 { 2989 uint32_t sh, mb; 2990 2991 sh = SH(ctx->opcode) | (shn << 5); 2992 mb = MB(ctx->opcode) | (mbn << 5); 2993 gen_rldinm(ctx, mb, 63, sh); 2994 } 2995 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2996 2997 /* rldicr - rldicr. */ 2998 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2999 { 3000 uint32_t sh, me; 3001 3002 sh = SH(ctx->opcode) | (shn << 5); 3003 me = MB(ctx->opcode) | (men << 5); 3004 gen_rldinm(ctx, 0, me, sh); 3005 } 3006 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 3007 3008 /* rldic - rldic. */ 3009 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 3010 { 3011 uint32_t sh, mb; 3012 3013 sh = SH(ctx->opcode) | (shn << 5); 3014 mb = MB(ctx->opcode) | (mbn << 5); 3015 gen_rldinm(ctx, mb, 63 - sh, sh); 3016 } 3017 GEN_PPC64_R4(rldic, 0x1E, 0x04); 3018 3019 static void gen_rldnm(DisasContext *ctx, int mb, int me) 3020 { 3021 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 3022 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 3023 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 3024 TCGv t0; 3025 3026 t0 = tcg_temp_new(); 3027 tcg_gen_andi_tl(t0, t_rb, 0x3f); 3028 tcg_gen_rotl_tl(t_ra, t_rs, t0); 3029 tcg_temp_free(t0); 3030 3031 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 3032 if (unlikely(Rc(ctx->opcode) != 0)) { 3033 gen_set_Rc0(ctx, t_ra); 3034 } 3035 } 3036 3037 /* rldcl - rldcl. */ 3038 static inline void gen_rldcl(DisasContext *ctx, int mbn) 3039 { 3040 uint32_t mb; 3041 3042 mb = MB(ctx->opcode) | (mbn << 5); 3043 gen_rldnm(ctx, mb, 63); 3044 } 3045 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 3046 3047 /* rldcr - rldcr. */ 3048 static inline void gen_rldcr(DisasContext *ctx, int men) 3049 { 3050 uint32_t me; 3051 3052 me = MB(ctx->opcode) | (men << 5); 3053 gen_rldnm(ctx, 0, me); 3054 } 3055 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 3056 3057 /* rldimi - rldimi. */ 3058 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 3059 { 3060 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 3061 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 3062 uint32_t sh = SH(ctx->opcode) | (shn << 5); 3063 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 3064 uint32_t me = 63 - sh; 3065 3066 if (mb <= me) { 3067 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 3068 } else { 3069 target_ulong mask = MASK(mb, me); 3070 TCGv t1 = tcg_temp_new(); 3071 3072 tcg_gen_rotli_tl(t1, t_rs, sh); 3073 tcg_gen_andi_tl(t1, t1, mask); 3074 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 3075 tcg_gen_or_tl(t_ra, t_ra, t1); 3076 tcg_temp_free(t1); 3077 } 3078 if (unlikely(Rc(ctx->opcode) != 0)) { 3079 gen_set_Rc0(ctx, t_ra); 3080 } 3081 } 3082 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 3083 #endif 3084 3085 /*** Integer shift ***/ 3086 3087 /* slw & slw. */ 3088 static void gen_slw(DisasContext *ctx) 3089 { 3090 TCGv t0, t1; 3091 3092 t0 = tcg_temp_new(); 3093 /* AND rS with a mask that is 0 when rB >= 0x20 */ 3094 #if defined(TARGET_PPC64) 3095 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 3096 tcg_gen_sari_tl(t0, t0, 0x3f); 3097 #else 3098 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 3099 tcg_gen_sari_tl(t0, t0, 0x1f); 3100 #endif 3101 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3102 t1 = tcg_temp_new(); 3103 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 3104 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3105 tcg_temp_free(t1); 3106 tcg_temp_free(t0); 3107 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 3108 if (unlikely(Rc(ctx->opcode) != 0)) { 3109 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3110 } 3111 } 3112 3113 /* sraw & sraw. */ 3114 static void gen_sraw(DisasContext *ctx) 3115 { 3116 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 3117 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3118 if (unlikely(Rc(ctx->opcode) != 0)) { 3119 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3120 } 3121 } 3122 3123 /* srawi & srawi. */ 3124 static void gen_srawi(DisasContext *ctx) 3125 { 3126 int sh = SH(ctx->opcode); 3127 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3128 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3129 if (sh == 0) { 3130 tcg_gen_ext32s_tl(dst, src); 3131 tcg_gen_movi_tl(cpu_ca, 0); 3132 if (is_isa300(ctx)) { 3133 tcg_gen_movi_tl(cpu_ca32, 0); 3134 } 3135 } else { 3136 TCGv t0; 3137 tcg_gen_ext32s_tl(dst, src); 3138 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 3139 t0 = tcg_temp_new(); 3140 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 3141 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 3142 tcg_temp_free(t0); 3143 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 3144 if (is_isa300(ctx)) { 3145 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 3146 } 3147 tcg_gen_sari_tl(dst, dst, sh); 3148 } 3149 if (unlikely(Rc(ctx->opcode) != 0)) { 3150 gen_set_Rc0(ctx, dst); 3151 } 3152 } 3153 3154 /* srw & srw. */ 3155 static void gen_srw(DisasContext *ctx) 3156 { 3157 TCGv t0, t1; 3158 3159 t0 = tcg_temp_new(); 3160 /* AND rS with a mask that is 0 when rB >= 0x20 */ 3161 #if defined(TARGET_PPC64) 3162 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 3163 tcg_gen_sari_tl(t0, t0, 0x3f); 3164 #else 3165 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 3166 tcg_gen_sari_tl(t0, t0, 0x1f); 3167 #endif 3168 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3169 tcg_gen_ext32u_tl(t0, t0); 3170 t1 = tcg_temp_new(); 3171 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 3172 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3173 tcg_temp_free(t1); 3174 tcg_temp_free(t0); 3175 if (unlikely(Rc(ctx->opcode) != 0)) { 3176 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3177 } 3178 } 3179 3180 #if defined(TARGET_PPC64) 3181 /* sld & sld. */ 3182 static void gen_sld(DisasContext *ctx) 3183 { 3184 TCGv t0, t1; 3185 3186 t0 = tcg_temp_new(); 3187 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3188 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3189 tcg_gen_sari_tl(t0, t0, 0x3f); 3190 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3191 t1 = tcg_temp_new(); 3192 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3193 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3194 tcg_temp_free(t1); 3195 tcg_temp_free(t0); 3196 if (unlikely(Rc(ctx->opcode) != 0)) { 3197 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3198 } 3199 } 3200 3201 /* srad & srad. */ 3202 static void gen_srad(DisasContext *ctx) 3203 { 3204 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 3205 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3206 if (unlikely(Rc(ctx->opcode) != 0)) { 3207 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3208 } 3209 } 3210 /* sradi & sradi. */ 3211 static inline void gen_sradi(DisasContext *ctx, int n) 3212 { 3213 int sh = SH(ctx->opcode) + (n << 5); 3214 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3215 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3216 if (sh == 0) { 3217 tcg_gen_mov_tl(dst, src); 3218 tcg_gen_movi_tl(cpu_ca, 0); 3219 if (is_isa300(ctx)) { 3220 tcg_gen_movi_tl(cpu_ca32, 0); 3221 } 3222 } else { 3223 TCGv t0; 3224 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 3225 t0 = tcg_temp_new(); 3226 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 3227 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 3228 tcg_temp_free(t0); 3229 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 3230 if (is_isa300(ctx)) { 3231 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 3232 } 3233 tcg_gen_sari_tl(dst, src, sh); 3234 } 3235 if (unlikely(Rc(ctx->opcode) != 0)) { 3236 gen_set_Rc0(ctx, dst); 3237 } 3238 } 3239 3240 static void gen_sradi0(DisasContext *ctx) 3241 { 3242 gen_sradi(ctx, 0); 3243 } 3244 3245 static void gen_sradi1(DisasContext *ctx) 3246 { 3247 gen_sradi(ctx, 1); 3248 } 3249 3250 /* extswsli & extswsli. */ 3251 static inline void gen_extswsli(DisasContext *ctx, int n) 3252 { 3253 int sh = SH(ctx->opcode) + (n << 5); 3254 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3255 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3256 3257 tcg_gen_ext32s_tl(dst, src); 3258 tcg_gen_shli_tl(dst, dst, sh); 3259 if (unlikely(Rc(ctx->opcode) != 0)) { 3260 gen_set_Rc0(ctx, dst); 3261 } 3262 } 3263 3264 static void gen_extswsli0(DisasContext *ctx) 3265 { 3266 gen_extswsli(ctx, 0); 3267 } 3268 3269 static void gen_extswsli1(DisasContext *ctx) 3270 { 3271 gen_extswsli(ctx, 1); 3272 } 3273 3274 /* srd & srd. */ 3275 static void gen_srd(DisasContext *ctx) 3276 { 3277 TCGv t0, t1; 3278 3279 t0 = tcg_temp_new(); 3280 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3281 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3282 tcg_gen_sari_tl(t0, t0, 0x3f); 3283 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3284 t1 = tcg_temp_new(); 3285 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3286 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3287 tcg_temp_free(t1); 3288 tcg_temp_free(t0); 3289 if (unlikely(Rc(ctx->opcode) != 0)) { 3290 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3291 } 3292 } 3293 #endif 3294 3295 /*** Addressing modes ***/ 3296 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 3297 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 3298 target_long maskl) 3299 { 3300 target_long simm = SIMM(ctx->opcode); 3301 3302 simm &= ~maskl; 3303 if (rA(ctx->opcode) == 0) { 3304 if (NARROW_MODE(ctx)) { 3305 simm = (uint32_t)simm; 3306 } 3307 tcg_gen_movi_tl(EA, simm); 3308 } else if (likely(simm != 0)) { 3309 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 3310 if (NARROW_MODE(ctx)) { 3311 tcg_gen_ext32u_tl(EA, EA); 3312 } 3313 } else { 3314 if (NARROW_MODE(ctx)) { 3315 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3316 } else { 3317 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3318 } 3319 } 3320 } 3321 3322 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 3323 { 3324 if (rA(ctx->opcode) == 0) { 3325 if (NARROW_MODE(ctx)) { 3326 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3327 } else { 3328 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3329 } 3330 } else { 3331 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3332 if (NARROW_MODE(ctx)) { 3333 tcg_gen_ext32u_tl(EA, EA); 3334 } 3335 } 3336 } 3337 3338 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 3339 { 3340 if (rA(ctx->opcode) == 0) { 3341 tcg_gen_movi_tl(EA, 0); 3342 } else if (NARROW_MODE(ctx)) { 3343 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3344 } else { 3345 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3346 } 3347 } 3348 3349 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 3350 target_long val) 3351 { 3352 tcg_gen_addi_tl(ret, arg1, val); 3353 if (NARROW_MODE(ctx)) { 3354 tcg_gen_ext32u_tl(ret, ret); 3355 } 3356 } 3357 3358 static inline void gen_align_no_le(DisasContext *ctx) 3359 { 3360 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 3361 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 3362 } 3363 3364 /*** Integer load ***/ 3365 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 3366 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 3367 3368 #define GEN_QEMU_LOAD_TL(ldop, op) \ 3369 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 3370 TCGv val, \ 3371 TCGv addr) \ 3372 { \ 3373 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 3374 } 3375 3376 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 3377 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 3378 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 3379 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 3380 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 3381 3382 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 3383 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 3384 3385 #define GEN_QEMU_LOAD_64(ldop, op) \ 3386 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 3387 TCGv_i64 val, \ 3388 TCGv addr) \ 3389 { \ 3390 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 3391 } 3392 3393 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 3394 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 3395 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 3396 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 3397 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 3398 3399 #if defined(TARGET_PPC64) 3400 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 3401 #endif 3402 3403 #define GEN_QEMU_STORE_TL(stop, op) \ 3404 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 3405 TCGv val, \ 3406 TCGv addr) \ 3407 { \ 3408 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 3409 } 3410 3411 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 3412 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 3413 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 3414 3415 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 3416 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 3417 3418 #define GEN_QEMU_STORE_64(stop, op) \ 3419 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 3420 TCGv_i64 val, \ 3421 TCGv addr) \ 3422 { \ 3423 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 3424 } 3425 3426 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 3427 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 3428 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 3429 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 3430 3431 #if defined(TARGET_PPC64) 3432 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 3433 #endif 3434 3435 #define GEN_LD(name, ldop, opc, type) \ 3436 static void glue(gen_, name)(DisasContext *ctx) \ 3437 { \ 3438 TCGv EA; \ 3439 gen_set_access_type(ctx, ACCESS_INT); \ 3440 EA = tcg_temp_new(); \ 3441 gen_addr_imm_index(ctx, EA, 0); \ 3442 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 3443 tcg_temp_free(EA); \ 3444 } 3445 3446 #define GEN_LDU(name, ldop, opc, type) \ 3447 static void glue(gen_, name##u)(DisasContext *ctx) \ 3448 { \ 3449 TCGv EA; \ 3450 if (unlikely(rA(ctx->opcode) == 0 || \ 3451 rA(ctx->opcode) == rD(ctx->opcode))) { \ 3452 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 3453 return; \ 3454 } \ 3455 gen_set_access_type(ctx, ACCESS_INT); \ 3456 EA = tcg_temp_new(); \ 3457 if (type == PPC_64B) \ 3458 gen_addr_imm_index(ctx, EA, 0x03); \ 3459 else \ 3460 gen_addr_imm_index(ctx, EA, 0); \ 3461 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 3462 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 3463 tcg_temp_free(EA); \ 3464 } 3465 3466 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 3467 static void glue(gen_, name##ux)(DisasContext *ctx) \ 3468 { \ 3469 TCGv EA; \ 3470 if (unlikely(rA(ctx->opcode) == 0 || \ 3471 rA(ctx->opcode) == rD(ctx->opcode))) { \ 3472 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 3473 return; \ 3474 } \ 3475 gen_set_access_type(ctx, ACCESS_INT); \ 3476 EA = tcg_temp_new(); \ 3477 gen_addr_reg_index(ctx, EA); \ 3478 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 3479 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 3480 tcg_temp_free(EA); \ 3481 } 3482 3483 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 3484 static void glue(gen_, name##x)(DisasContext *ctx) \ 3485 { \ 3486 TCGv EA; \ 3487 chk; \ 3488 gen_set_access_type(ctx, ACCESS_INT); \ 3489 EA = tcg_temp_new(); \ 3490 gen_addr_reg_index(ctx, EA); \ 3491 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 3492 tcg_temp_free(EA); \ 3493 } 3494 3495 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 3496 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3497 3498 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 3499 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3500 3501 #define GEN_LDS(name, ldop, op, type) \ 3502 GEN_LD(name, ldop, op | 0x20, type); \ 3503 GEN_LDU(name, ldop, op | 0x21, type); \ 3504 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 3505 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 3506 3507 /* lbz lbzu lbzux lbzx */ 3508 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 3509 /* lha lhau lhaux lhax */ 3510 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 3511 /* lhz lhzu lhzux lhzx */ 3512 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 3513 /* lwz lwzu lwzux lwzx */ 3514 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 3515 3516 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 3517 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3518 { \ 3519 TCGv EA; \ 3520 CHK_SV; \ 3521 gen_set_access_type(ctx, ACCESS_INT); \ 3522 EA = tcg_temp_new(); \ 3523 gen_addr_reg_index(ctx, EA); \ 3524 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 3525 tcg_temp_free(EA); \ 3526 } 3527 3528 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 3529 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 3530 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 3531 #if defined(TARGET_PPC64) 3532 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 3533 #endif 3534 3535 #if defined(TARGET_PPC64) 3536 /* lwaux */ 3537 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 3538 /* lwax */ 3539 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 3540 /* ldux */ 3541 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 3542 /* ldx */ 3543 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 3544 3545 /* CI load/store variants */ 3546 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 3547 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 3548 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 3549 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 3550 3551 static void gen_ld(DisasContext *ctx) 3552 { 3553 TCGv EA; 3554 if (Rc(ctx->opcode)) { 3555 if (unlikely(rA(ctx->opcode) == 0 || 3556 rA(ctx->opcode) == rD(ctx->opcode))) { 3557 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3558 return; 3559 } 3560 } 3561 gen_set_access_type(ctx, ACCESS_INT); 3562 EA = tcg_temp_new(); 3563 gen_addr_imm_index(ctx, EA, 0x03); 3564 if (ctx->opcode & 0x02) { 3565 /* lwa (lwau is undefined) */ 3566 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 3567 } else { 3568 /* ld - ldu */ 3569 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 3570 } 3571 if (Rc(ctx->opcode)) { 3572 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 3573 } 3574 tcg_temp_free(EA); 3575 } 3576 3577 /* lq */ 3578 static void gen_lq(DisasContext *ctx) 3579 { 3580 int ra, rd; 3581 TCGv EA, hi, lo; 3582 3583 /* lq is a legal user mode instruction starting in ISA 2.07 */ 3584 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3585 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3586 3587 if (!legal_in_user_mode && ctx->pr) { 3588 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 3589 return; 3590 } 3591 3592 if (!le_is_supported && ctx->le_mode) { 3593 gen_align_no_le(ctx); 3594 return; 3595 } 3596 ra = rA(ctx->opcode); 3597 rd = rD(ctx->opcode); 3598 if (unlikely((rd & 1) || rd == ra)) { 3599 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3600 return; 3601 } 3602 3603 gen_set_access_type(ctx, ACCESS_INT); 3604 EA = tcg_temp_new(); 3605 gen_addr_imm_index(ctx, EA, 0x0F); 3606 3607 /* Note that the low part is always in RD+1, even in LE mode. */ 3608 lo = cpu_gpr[rd + 1]; 3609 hi = cpu_gpr[rd]; 3610 3611 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3612 if (HAVE_ATOMIC128) { 3613 TCGv_i32 oi = tcg_temp_new_i32(); 3614 if (ctx->le_mode) { 3615 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 3616 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3617 } else { 3618 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 3619 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3620 } 3621 tcg_temp_free_i32(oi); 3622 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3623 } else { 3624 /* Restart with exclusive lock. */ 3625 gen_helper_exit_atomic(cpu_env); 3626 ctx->base.is_jmp = DISAS_NORETURN; 3627 } 3628 } else if (ctx->le_mode) { 3629 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 3630 gen_addr_add(ctx, EA, EA, 8); 3631 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3632 } else { 3633 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 3634 gen_addr_add(ctx, EA, EA, 8); 3635 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3636 } 3637 tcg_temp_free(EA); 3638 } 3639 #endif 3640 3641 /*** Integer store ***/ 3642 #define GEN_ST(name, stop, opc, type) \ 3643 static void glue(gen_, name)(DisasContext *ctx) \ 3644 { \ 3645 TCGv EA; \ 3646 gen_set_access_type(ctx, ACCESS_INT); \ 3647 EA = tcg_temp_new(); \ 3648 gen_addr_imm_index(ctx, EA, 0); \ 3649 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 3650 tcg_temp_free(EA); \ 3651 } 3652 3653 #define GEN_STU(name, stop, opc, type) \ 3654 static void glue(gen_, stop##u)(DisasContext *ctx) \ 3655 { \ 3656 TCGv EA; \ 3657 if (unlikely(rA(ctx->opcode) == 0)) { \ 3658 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 3659 return; \ 3660 } \ 3661 gen_set_access_type(ctx, ACCESS_INT); \ 3662 EA = tcg_temp_new(); \ 3663 if (type == PPC_64B) \ 3664 gen_addr_imm_index(ctx, EA, 0x03); \ 3665 else \ 3666 gen_addr_imm_index(ctx, EA, 0); \ 3667 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 3668 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 3669 tcg_temp_free(EA); \ 3670 } 3671 3672 #define GEN_STUX(name, stop, opc2, opc3, type) \ 3673 static void glue(gen_, name##ux)(DisasContext *ctx) \ 3674 { \ 3675 TCGv EA; \ 3676 if (unlikely(rA(ctx->opcode) == 0)) { \ 3677 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 3678 return; \ 3679 } \ 3680 gen_set_access_type(ctx, ACCESS_INT); \ 3681 EA = tcg_temp_new(); \ 3682 gen_addr_reg_index(ctx, EA); \ 3683 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 3684 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 3685 tcg_temp_free(EA); \ 3686 } 3687 3688 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 3689 static void glue(gen_, name##x)(DisasContext *ctx) \ 3690 { \ 3691 TCGv EA; \ 3692 chk; \ 3693 gen_set_access_type(ctx, ACCESS_INT); \ 3694 EA = tcg_temp_new(); \ 3695 gen_addr_reg_index(ctx, EA); \ 3696 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 3697 tcg_temp_free(EA); \ 3698 } 3699 #define GEN_STX(name, stop, opc2, opc3, type) \ 3700 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3701 3702 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 3703 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3704 3705 #define GEN_STS(name, stop, op, type) \ 3706 GEN_ST(name, stop, op | 0x20, type); \ 3707 GEN_STU(name, stop, op | 0x21, type); \ 3708 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 3709 GEN_STX(name, stop, 0x17, op | 0x00, type) 3710 3711 /* stb stbu stbux stbx */ 3712 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 3713 /* sth sthu sthux sthx */ 3714 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 3715 /* stw stwu stwux stwx */ 3716 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 3717 3718 #define GEN_STEPX(name, stop, opc2, opc3) \ 3719 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3720 { \ 3721 TCGv EA; \ 3722 CHK_SV; \ 3723 gen_set_access_type(ctx, ACCESS_INT); \ 3724 EA = tcg_temp_new(); \ 3725 gen_addr_reg_index(ctx, EA); \ 3726 tcg_gen_qemu_st_tl( \ 3727 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 3728 tcg_temp_free(EA); \ 3729 } 3730 3731 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 3732 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 3733 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 3734 #if defined(TARGET_PPC64) 3735 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04) 3736 #endif 3737 3738 #if defined(TARGET_PPC64) 3739 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 3740 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 3741 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 3742 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 3743 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 3744 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 3745 3746 static void gen_std(DisasContext *ctx) 3747 { 3748 int rs; 3749 TCGv EA; 3750 3751 rs = rS(ctx->opcode); 3752 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 3753 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3754 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3755 TCGv hi, lo; 3756 3757 if (!(ctx->insns_flags & PPC_64BX)) { 3758 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3759 } 3760 3761 if (!legal_in_user_mode && ctx->pr) { 3762 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 3763 return; 3764 } 3765 3766 if (!le_is_supported && ctx->le_mode) { 3767 gen_align_no_le(ctx); 3768 return; 3769 } 3770 3771 if (unlikely(rs & 1)) { 3772 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3773 return; 3774 } 3775 gen_set_access_type(ctx, ACCESS_INT); 3776 EA = tcg_temp_new(); 3777 gen_addr_imm_index(ctx, EA, 0x03); 3778 3779 /* Note that the low part is always in RS+1, even in LE mode. */ 3780 lo = cpu_gpr[rs + 1]; 3781 hi = cpu_gpr[rs]; 3782 3783 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3784 if (HAVE_ATOMIC128) { 3785 TCGv_i32 oi = tcg_temp_new_i32(); 3786 if (ctx->le_mode) { 3787 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 3788 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 3789 } else { 3790 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 3791 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 3792 } 3793 tcg_temp_free_i32(oi); 3794 } else { 3795 /* Restart with exclusive lock. */ 3796 gen_helper_exit_atomic(cpu_env); 3797 ctx->base.is_jmp = DISAS_NORETURN; 3798 } 3799 } else if (ctx->le_mode) { 3800 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 3801 gen_addr_add(ctx, EA, EA, 8); 3802 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3803 } else { 3804 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 3805 gen_addr_add(ctx, EA, EA, 8); 3806 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3807 } 3808 tcg_temp_free(EA); 3809 } else { 3810 /* std / stdu */ 3811 if (Rc(ctx->opcode)) { 3812 if (unlikely(rA(ctx->opcode) == 0)) { 3813 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3814 return; 3815 } 3816 } 3817 gen_set_access_type(ctx, ACCESS_INT); 3818 EA = tcg_temp_new(); 3819 gen_addr_imm_index(ctx, EA, 0x03); 3820 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 3821 if (Rc(ctx->opcode)) { 3822 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 3823 } 3824 tcg_temp_free(EA); 3825 } 3826 } 3827 #endif 3828 /*** Integer load and store with byte reverse ***/ 3829 3830 /* lhbrx */ 3831 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 3832 3833 /* lwbrx */ 3834 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 3835 3836 #if defined(TARGET_PPC64) 3837 /* ldbrx */ 3838 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 3839 /* stdbrx */ 3840 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 3841 #endif /* TARGET_PPC64 */ 3842 3843 /* sthbrx */ 3844 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 3845 /* stwbrx */ 3846 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 3847 3848 /*** Integer load and store multiple ***/ 3849 3850 /* lmw */ 3851 static void gen_lmw(DisasContext *ctx) 3852 { 3853 TCGv t0; 3854 TCGv_i32 t1; 3855 3856 if (ctx->le_mode) { 3857 gen_align_no_le(ctx); 3858 return; 3859 } 3860 gen_set_access_type(ctx, ACCESS_INT); 3861 t0 = tcg_temp_new(); 3862 t1 = tcg_const_i32(rD(ctx->opcode)); 3863 gen_addr_imm_index(ctx, t0, 0); 3864 gen_helper_lmw(cpu_env, t0, t1); 3865 tcg_temp_free(t0); 3866 tcg_temp_free_i32(t1); 3867 } 3868 3869 /* stmw */ 3870 static void gen_stmw(DisasContext *ctx) 3871 { 3872 TCGv t0; 3873 TCGv_i32 t1; 3874 3875 if (ctx->le_mode) { 3876 gen_align_no_le(ctx); 3877 return; 3878 } 3879 gen_set_access_type(ctx, ACCESS_INT); 3880 t0 = tcg_temp_new(); 3881 t1 = tcg_const_i32(rS(ctx->opcode)); 3882 gen_addr_imm_index(ctx, t0, 0); 3883 gen_helper_stmw(cpu_env, t0, t1); 3884 tcg_temp_free(t0); 3885 tcg_temp_free_i32(t1); 3886 } 3887 3888 /*** Integer load and store strings ***/ 3889 3890 /* lswi */ 3891 /* 3892 * PowerPC32 specification says we must generate an exception if rA is 3893 * in the range of registers to be loaded. In an other hand, IBM says 3894 * this is valid, but rA won't be loaded. For now, I'll follow the 3895 * spec... 3896 */ 3897 static void gen_lswi(DisasContext *ctx) 3898 { 3899 TCGv t0; 3900 TCGv_i32 t1, t2; 3901 int nb = NB(ctx->opcode); 3902 int start = rD(ctx->opcode); 3903 int ra = rA(ctx->opcode); 3904 int nr; 3905 3906 if (ctx->le_mode) { 3907 gen_align_no_le(ctx); 3908 return; 3909 } 3910 if (nb == 0) { 3911 nb = 32; 3912 } 3913 nr = DIV_ROUND_UP(nb, 4); 3914 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 3915 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 3916 return; 3917 } 3918 gen_set_access_type(ctx, ACCESS_INT); 3919 t0 = tcg_temp_new(); 3920 gen_addr_register(ctx, t0); 3921 t1 = tcg_const_i32(nb); 3922 t2 = tcg_const_i32(start); 3923 gen_helper_lsw(cpu_env, t0, t1, t2); 3924 tcg_temp_free(t0); 3925 tcg_temp_free_i32(t1); 3926 tcg_temp_free_i32(t2); 3927 } 3928 3929 /* lswx */ 3930 static void gen_lswx(DisasContext *ctx) 3931 { 3932 TCGv t0; 3933 TCGv_i32 t1, t2, t3; 3934 3935 if (ctx->le_mode) { 3936 gen_align_no_le(ctx); 3937 return; 3938 } 3939 gen_set_access_type(ctx, ACCESS_INT); 3940 t0 = tcg_temp_new(); 3941 gen_addr_reg_index(ctx, t0); 3942 t1 = tcg_const_i32(rD(ctx->opcode)); 3943 t2 = tcg_const_i32(rA(ctx->opcode)); 3944 t3 = tcg_const_i32(rB(ctx->opcode)); 3945 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 3946 tcg_temp_free(t0); 3947 tcg_temp_free_i32(t1); 3948 tcg_temp_free_i32(t2); 3949 tcg_temp_free_i32(t3); 3950 } 3951 3952 /* stswi */ 3953 static void gen_stswi(DisasContext *ctx) 3954 { 3955 TCGv t0; 3956 TCGv_i32 t1, t2; 3957 int nb = NB(ctx->opcode); 3958 3959 if (ctx->le_mode) { 3960 gen_align_no_le(ctx); 3961 return; 3962 } 3963 gen_set_access_type(ctx, ACCESS_INT); 3964 t0 = tcg_temp_new(); 3965 gen_addr_register(ctx, t0); 3966 if (nb == 0) { 3967 nb = 32; 3968 } 3969 t1 = tcg_const_i32(nb); 3970 t2 = tcg_const_i32(rS(ctx->opcode)); 3971 gen_helper_stsw(cpu_env, t0, t1, t2); 3972 tcg_temp_free(t0); 3973 tcg_temp_free_i32(t1); 3974 tcg_temp_free_i32(t2); 3975 } 3976 3977 /* stswx */ 3978 static void gen_stswx(DisasContext *ctx) 3979 { 3980 TCGv t0; 3981 TCGv_i32 t1, t2; 3982 3983 if (ctx->le_mode) { 3984 gen_align_no_le(ctx); 3985 return; 3986 } 3987 gen_set_access_type(ctx, ACCESS_INT); 3988 t0 = tcg_temp_new(); 3989 gen_addr_reg_index(ctx, t0); 3990 t1 = tcg_temp_new_i32(); 3991 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3992 tcg_gen_andi_i32(t1, t1, 0x7F); 3993 t2 = tcg_const_i32(rS(ctx->opcode)); 3994 gen_helper_stsw(cpu_env, t0, t1, t2); 3995 tcg_temp_free(t0); 3996 tcg_temp_free_i32(t1); 3997 tcg_temp_free_i32(t2); 3998 } 3999 4000 /*** Memory synchronisation ***/ 4001 /* eieio */ 4002 static void gen_eieio(DisasContext *ctx) 4003 { 4004 TCGBar bar = TCG_MO_LD_ST; 4005 4006 /* 4007 * POWER9 has a eieio instruction variant using bit 6 as a hint to 4008 * tell the CPU it is a store-forwarding barrier. 4009 */ 4010 if (ctx->opcode & 0x2000000) { 4011 /* 4012 * ISA says that "Reserved fields in instructions are ignored 4013 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 4014 * as this is not an instruction software should be using, 4015 * complain to the user. 4016 */ 4017 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 4018 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 4019 TARGET_FMT_lx "\n", ctx->cia); 4020 } else { 4021 bar = TCG_MO_ST_LD; 4022 } 4023 } 4024 4025 tcg_gen_mb(bar | TCG_BAR_SC); 4026 } 4027 4028 #if !defined(CONFIG_USER_ONLY) 4029 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 4030 { 4031 TCGv_i32 t; 4032 TCGLabel *l; 4033 4034 if (!ctx->lazy_tlb_flush) { 4035 return; 4036 } 4037 l = gen_new_label(); 4038 t = tcg_temp_new_i32(); 4039 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4040 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 4041 if (global) { 4042 gen_helper_check_tlb_flush_global(cpu_env); 4043 } else { 4044 gen_helper_check_tlb_flush_local(cpu_env); 4045 } 4046 gen_set_label(l); 4047 tcg_temp_free_i32(t); 4048 } 4049 #else 4050 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 4051 #endif 4052 4053 /* isync */ 4054 static void gen_isync(DisasContext *ctx) 4055 { 4056 /* 4057 * We need to check for a pending TLB flush. This can only happen in 4058 * kernel mode however so check MSR_PR 4059 */ 4060 if (!ctx->pr) { 4061 gen_check_tlb_flush(ctx, false); 4062 } 4063 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 4064 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 4065 } 4066 4067 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 4068 4069 static void gen_load_locked(DisasContext *ctx, MemOp memop) 4070 { 4071 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 4072 TCGv t0 = tcg_temp_new(); 4073 4074 gen_set_access_type(ctx, ACCESS_RES); 4075 gen_addr_reg_index(ctx, t0); 4076 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 4077 tcg_gen_mov_tl(cpu_reserve, t0); 4078 tcg_gen_mov_tl(cpu_reserve_val, gpr); 4079 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 4080 tcg_temp_free(t0); 4081 } 4082 4083 #define LARX(name, memop) \ 4084 static void gen_##name(DisasContext *ctx) \ 4085 { \ 4086 gen_load_locked(ctx, memop); \ 4087 } 4088 4089 /* lwarx */ 4090 LARX(lbarx, DEF_MEMOP(MO_UB)) 4091 LARX(lharx, DEF_MEMOP(MO_UW)) 4092 LARX(lwarx, DEF_MEMOP(MO_UL)) 4093 4094 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop, 4095 TCGv EA, TCGCond cond, int addend) 4096 { 4097 TCGv t = tcg_temp_new(); 4098 TCGv t2 = tcg_temp_new(); 4099 TCGv u = tcg_temp_new(); 4100 4101 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 4102 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 4103 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 4104 tcg_gen_addi_tl(u, t, addend); 4105 4106 /* E.g. for fetch and increment bounded... */ 4107 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 4108 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 4109 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 4110 4111 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 4112 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 4113 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 4114 4115 tcg_temp_free(t); 4116 tcg_temp_free(t2); 4117 tcg_temp_free(u); 4118 } 4119 4120 static void gen_ld_atomic(DisasContext *ctx, MemOp memop) 4121 { 4122 uint32_t gpr_FC = FC(ctx->opcode); 4123 TCGv EA = tcg_temp_new(); 4124 int rt = rD(ctx->opcode); 4125 bool need_serial; 4126 TCGv src, dst; 4127 4128 gen_addr_register(ctx, EA); 4129 dst = cpu_gpr[rt]; 4130 src = cpu_gpr[(rt + 1) & 31]; 4131 4132 need_serial = false; 4133 memop |= MO_ALIGN; 4134 switch (gpr_FC) { 4135 case 0: /* Fetch and add */ 4136 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 4137 break; 4138 case 1: /* Fetch and xor */ 4139 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 4140 break; 4141 case 2: /* Fetch and or */ 4142 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 4143 break; 4144 case 3: /* Fetch and 'and' */ 4145 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 4146 break; 4147 case 4: /* Fetch and max unsigned */ 4148 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 4149 break; 4150 case 5: /* Fetch and max signed */ 4151 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 4152 break; 4153 case 6: /* Fetch and min unsigned */ 4154 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 4155 break; 4156 case 7: /* Fetch and min signed */ 4157 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 4158 break; 4159 case 8: /* Swap */ 4160 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 4161 break; 4162 4163 case 16: /* Compare and swap not equal */ 4164 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4165 need_serial = true; 4166 } else { 4167 TCGv t0 = tcg_temp_new(); 4168 TCGv t1 = tcg_temp_new(); 4169 4170 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 4171 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 4172 tcg_gen_mov_tl(t1, src); 4173 } else { 4174 tcg_gen_ext32u_tl(t1, src); 4175 } 4176 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 4177 cpu_gpr[(rt + 2) & 31], t0); 4178 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 4179 tcg_gen_mov_tl(dst, t0); 4180 4181 tcg_temp_free(t0); 4182 tcg_temp_free(t1); 4183 } 4184 break; 4185 4186 case 24: /* Fetch and increment bounded */ 4187 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4188 need_serial = true; 4189 } else { 4190 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 4191 } 4192 break; 4193 case 25: /* Fetch and increment equal */ 4194 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4195 need_serial = true; 4196 } else { 4197 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 4198 } 4199 break; 4200 case 28: /* Fetch and decrement bounded */ 4201 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4202 need_serial = true; 4203 } else { 4204 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 4205 } 4206 break; 4207 4208 default: 4209 /* invoke data storage error handler */ 4210 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 4211 } 4212 tcg_temp_free(EA); 4213 4214 if (need_serial) { 4215 /* Restart with exclusive lock. */ 4216 gen_helper_exit_atomic(cpu_env); 4217 ctx->base.is_jmp = DISAS_NORETURN; 4218 } 4219 } 4220 4221 static void gen_lwat(DisasContext *ctx) 4222 { 4223 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 4224 } 4225 4226 #ifdef TARGET_PPC64 4227 static void gen_ldat(DisasContext *ctx) 4228 { 4229 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 4230 } 4231 #endif 4232 4233 static void gen_st_atomic(DisasContext *ctx, MemOp memop) 4234 { 4235 uint32_t gpr_FC = FC(ctx->opcode); 4236 TCGv EA = tcg_temp_new(); 4237 TCGv src, discard; 4238 4239 gen_addr_register(ctx, EA); 4240 src = cpu_gpr[rD(ctx->opcode)]; 4241 discard = tcg_temp_new(); 4242 4243 memop |= MO_ALIGN; 4244 switch (gpr_FC) { 4245 case 0: /* add and Store */ 4246 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 4247 break; 4248 case 1: /* xor and Store */ 4249 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 4250 break; 4251 case 2: /* Or and Store */ 4252 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 4253 break; 4254 case 3: /* 'and' and Store */ 4255 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 4256 break; 4257 case 4: /* Store max unsigned */ 4258 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 4259 break; 4260 case 5: /* Store max signed */ 4261 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 4262 break; 4263 case 6: /* Store min unsigned */ 4264 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 4265 break; 4266 case 7: /* Store min signed */ 4267 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 4268 break; 4269 case 24: /* Store twin */ 4270 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4271 /* Restart with exclusive lock. */ 4272 gen_helper_exit_atomic(cpu_env); 4273 ctx->base.is_jmp = DISAS_NORETURN; 4274 } else { 4275 TCGv t = tcg_temp_new(); 4276 TCGv t2 = tcg_temp_new(); 4277 TCGv s = tcg_temp_new(); 4278 TCGv s2 = tcg_temp_new(); 4279 TCGv ea_plus_s = tcg_temp_new(); 4280 4281 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 4282 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 4283 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 4284 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 4285 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 4286 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 4287 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 4288 4289 tcg_temp_free(ea_plus_s); 4290 tcg_temp_free(s2); 4291 tcg_temp_free(s); 4292 tcg_temp_free(t2); 4293 tcg_temp_free(t); 4294 } 4295 break; 4296 default: 4297 /* invoke data storage error handler */ 4298 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 4299 } 4300 tcg_temp_free(discard); 4301 tcg_temp_free(EA); 4302 } 4303 4304 static void gen_stwat(DisasContext *ctx) 4305 { 4306 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 4307 } 4308 4309 #ifdef TARGET_PPC64 4310 static void gen_stdat(DisasContext *ctx) 4311 { 4312 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 4313 } 4314 #endif 4315 4316 static void gen_conditional_store(DisasContext *ctx, MemOp memop) 4317 { 4318 TCGLabel *l1 = gen_new_label(); 4319 TCGLabel *l2 = gen_new_label(); 4320 TCGv t0 = tcg_temp_new(); 4321 int reg = rS(ctx->opcode); 4322 4323 gen_set_access_type(ctx, ACCESS_RES); 4324 gen_addr_reg_index(ctx, t0); 4325 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 4326 tcg_temp_free(t0); 4327 4328 t0 = tcg_temp_new(); 4329 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 4330 cpu_gpr[reg], ctx->mem_idx, 4331 DEF_MEMOP(memop) | MO_ALIGN); 4332 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 4333 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 4334 tcg_gen_or_tl(t0, t0, cpu_so); 4335 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 4336 tcg_temp_free(t0); 4337 tcg_gen_br(l2); 4338 4339 gen_set_label(l1); 4340 4341 /* 4342 * Address mismatch implies failure. But we still need to provide 4343 * the memory barrier semantics of the instruction. 4344 */ 4345 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 4346 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4347 4348 gen_set_label(l2); 4349 tcg_gen_movi_tl(cpu_reserve, -1); 4350 } 4351 4352 #define STCX(name, memop) \ 4353 static void gen_##name(DisasContext *ctx) \ 4354 { \ 4355 gen_conditional_store(ctx, memop); \ 4356 } 4357 4358 STCX(stbcx_, DEF_MEMOP(MO_UB)) 4359 STCX(sthcx_, DEF_MEMOP(MO_UW)) 4360 STCX(stwcx_, DEF_MEMOP(MO_UL)) 4361 4362 #if defined(TARGET_PPC64) 4363 /* ldarx */ 4364 LARX(ldarx, DEF_MEMOP(MO_Q)) 4365 /* stdcx. */ 4366 STCX(stdcx_, DEF_MEMOP(MO_Q)) 4367 4368 /* lqarx */ 4369 static void gen_lqarx(DisasContext *ctx) 4370 { 4371 int rd = rD(ctx->opcode); 4372 TCGv EA, hi, lo; 4373 4374 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 4375 (rd == rB(ctx->opcode)))) { 4376 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4377 return; 4378 } 4379 4380 gen_set_access_type(ctx, ACCESS_RES); 4381 EA = tcg_temp_new(); 4382 gen_addr_reg_index(ctx, EA); 4383 4384 /* Note that the low part is always in RD+1, even in LE mode. */ 4385 lo = cpu_gpr[rd + 1]; 4386 hi = cpu_gpr[rd]; 4387 4388 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4389 if (HAVE_ATOMIC128) { 4390 TCGv_i32 oi = tcg_temp_new_i32(); 4391 if (ctx->le_mode) { 4392 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16, 4393 ctx->mem_idx)); 4394 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 4395 } else { 4396 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16, 4397 ctx->mem_idx)); 4398 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 4399 } 4400 tcg_temp_free_i32(oi); 4401 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 4402 } else { 4403 /* Restart with exclusive lock. */ 4404 gen_helper_exit_atomic(cpu_env); 4405 ctx->base.is_jmp = DISAS_NORETURN; 4406 tcg_temp_free(EA); 4407 return; 4408 } 4409 } else if (ctx->le_mode) { 4410 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 4411 tcg_gen_mov_tl(cpu_reserve, EA); 4412 gen_addr_add(ctx, EA, EA, 8); 4413 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 4414 } else { 4415 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 4416 tcg_gen_mov_tl(cpu_reserve, EA); 4417 gen_addr_add(ctx, EA, EA, 8); 4418 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 4419 } 4420 tcg_temp_free(EA); 4421 4422 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 4423 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 4424 } 4425 4426 /* stqcx. */ 4427 static void gen_stqcx_(DisasContext *ctx) 4428 { 4429 int rs = rS(ctx->opcode); 4430 TCGv EA, hi, lo; 4431 4432 if (unlikely(rs & 1)) { 4433 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4434 return; 4435 } 4436 4437 gen_set_access_type(ctx, ACCESS_RES); 4438 EA = tcg_temp_new(); 4439 gen_addr_reg_index(ctx, EA); 4440 4441 /* Note that the low part is always in RS+1, even in LE mode. */ 4442 lo = cpu_gpr[rs + 1]; 4443 hi = cpu_gpr[rs]; 4444 4445 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4446 if (HAVE_CMPXCHG128) { 4447 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16); 4448 if (ctx->le_mode) { 4449 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 4450 EA, lo, hi, oi); 4451 } else { 4452 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 4453 EA, lo, hi, oi); 4454 } 4455 tcg_temp_free_i32(oi); 4456 } else { 4457 /* Restart with exclusive lock. */ 4458 gen_helper_exit_atomic(cpu_env); 4459 ctx->base.is_jmp = DISAS_NORETURN; 4460 } 4461 tcg_temp_free(EA); 4462 } else { 4463 TCGLabel *lab_fail = gen_new_label(); 4464 TCGLabel *lab_over = gen_new_label(); 4465 TCGv_i64 t0 = tcg_temp_new_i64(); 4466 TCGv_i64 t1 = tcg_temp_new_i64(); 4467 4468 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 4469 tcg_temp_free(EA); 4470 4471 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 4472 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4473 ? offsetof(CPUPPCState, reserve_val2) 4474 : offsetof(CPUPPCState, reserve_val))); 4475 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4476 4477 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4478 gen_qemu_ld64_i64(ctx, t0, t0); 4479 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4480 ? offsetof(CPUPPCState, reserve_val) 4481 : offsetof(CPUPPCState, reserve_val2))); 4482 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4483 4484 /* Success */ 4485 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 4486 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4487 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 4488 4489 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4490 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4491 tcg_gen_br(lab_over); 4492 4493 gen_set_label(lab_fail); 4494 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4495 4496 gen_set_label(lab_over); 4497 tcg_gen_movi_tl(cpu_reserve, -1); 4498 tcg_temp_free_i64(t0); 4499 tcg_temp_free_i64(t1); 4500 } 4501 } 4502 #endif /* defined(TARGET_PPC64) */ 4503 4504 /* sync */ 4505 static void gen_sync(DisasContext *ctx) 4506 { 4507 uint32_t l = (ctx->opcode >> 21) & 3; 4508 4509 /* 4510 * We may need to check for a pending TLB flush. 4511 * 4512 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 4513 * 4514 * Additionally, this can only happen in kernel mode however so 4515 * check MSR_PR as well. 4516 */ 4517 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 4518 gen_check_tlb_flush(ctx, true); 4519 } 4520 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 4521 } 4522 4523 /* wait */ 4524 static void gen_wait(DisasContext *ctx) 4525 { 4526 TCGv_i32 t0 = tcg_const_i32(1); 4527 tcg_gen_st_i32(t0, cpu_env, 4528 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 4529 tcg_temp_free_i32(t0); 4530 /* Stop translation, as the CPU is supposed to sleep from now */ 4531 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4532 } 4533 4534 #if defined(TARGET_PPC64) 4535 static void gen_doze(DisasContext *ctx) 4536 { 4537 #if defined(CONFIG_USER_ONLY) 4538 GEN_PRIV; 4539 #else 4540 TCGv_i32 t; 4541 4542 CHK_HV; 4543 t = tcg_const_i32(PPC_PM_DOZE); 4544 gen_helper_pminsn(cpu_env, t); 4545 tcg_temp_free_i32(t); 4546 /* Stop translation, as the CPU is supposed to sleep from now */ 4547 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4548 #endif /* defined(CONFIG_USER_ONLY) */ 4549 } 4550 4551 static void gen_nap(DisasContext *ctx) 4552 { 4553 #if defined(CONFIG_USER_ONLY) 4554 GEN_PRIV; 4555 #else 4556 TCGv_i32 t; 4557 4558 CHK_HV; 4559 t = tcg_const_i32(PPC_PM_NAP); 4560 gen_helper_pminsn(cpu_env, t); 4561 tcg_temp_free_i32(t); 4562 /* Stop translation, as the CPU is supposed to sleep from now */ 4563 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4564 #endif /* defined(CONFIG_USER_ONLY) */ 4565 } 4566 4567 static void gen_stop(DisasContext *ctx) 4568 { 4569 #if defined(CONFIG_USER_ONLY) 4570 GEN_PRIV; 4571 #else 4572 TCGv_i32 t; 4573 4574 CHK_HV; 4575 t = tcg_const_i32(PPC_PM_STOP); 4576 gen_helper_pminsn(cpu_env, t); 4577 tcg_temp_free_i32(t); 4578 /* Stop translation, as the CPU is supposed to sleep from now */ 4579 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4580 #endif /* defined(CONFIG_USER_ONLY) */ 4581 } 4582 4583 static void gen_sleep(DisasContext *ctx) 4584 { 4585 #if defined(CONFIG_USER_ONLY) 4586 GEN_PRIV; 4587 #else 4588 TCGv_i32 t; 4589 4590 CHK_HV; 4591 t = tcg_const_i32(PPC_PM_SLEEP); 4592 gen_helper_pminsn(cpu_env, t); 4593 tcg_temp_free_i32(t); 4594 /* Stop translation, as the CPU is supposed to sleep from now */ 4595 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4596 #endif /* defined(CONFIG_USER_ONLY) */ 4597 } 4598 4599 static void gen_rvwinkle(DisasContext *ctx) 4600 { 4601 #if defined(CONFIG_USER_ONLY) 4602 GEN_PRIV; 4603 #else 4604 TCGv_i32 t; 4605 4606 CHK_HV; 4607 t = tcg_const_i32(PPC_PM_RVWINKLE); 4608 gen_helper_pminsn(cpu_env, t); 4609 tcg_temp_free_i32(t); 4610 /* Stop translation, as the CPU is supposed to sleep from now */ 4611 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4612 #endif /* defined(CONFIG_USER_ONLY) */ 4613 } 4614 #endif /* #if defined(TARGET_PPC64) */ 4615 4616 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 4617 { 4618 #if defined(TARGET_PPC64) 4619 if (ctx->has_cfar) { 4620 tcg_gen_movi_tl(cpu_cfar, nip); 4621 } 4622 #endif 4623 } 4624 4625 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 4626 { 4627 if (unlikely(ctx->singlestep_enabled)) { 4628 return false; 4629 } 4630 4631 #ifndef CONFIG_USER_ONLY 4632 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 4633 #else 4634 return true; 4635 #endif 4636 } 4637 4638 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 4639 { 4640 int sse = ctx->singlestep_enabled; 4641 if (unlikely(sse)) { 4642 if (sse & GDBSTUB_SINGLE_STEP) { 4643 gen_debug_exception(ctx); 4644 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) { 4645 uint32_t excp = gen_prep_dbgex(ctx); 4646 gen_exception(ctx, excp); 4647 } else { 4648 tcg_gen_exit_tb(NULL, 0); 4649 } 4650 } else { 4651 tcg_gen_lookup_and_goto_ptr(); 4652 } 4653 } 4654 4655 /*** Branch ***/ 4656 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 4657 { 4658 if (NARROW_MODE(ctx)) { 4659 dest = (uint32_t) dest; 4660 } 4661 if (use_goto_tb(ctx, dest)) { 4662 tcg_gen_goto_tb(n); 4663 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4664 tcg_gen_exit_tb(ctx->base.tb, n); 4665 } else { 4666 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4667 gen_lookup_and_goto_ptr(ctx); 4668 } 4669 } 4670 4671 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 4672 { 4673 if (NARROW_MODE(ctx)) { 4674 nip = (uint32_t)nip; 4675 } 4676 tcg_gen_movi_tl(cpu_lr, nip); 4677 } 4678 4679 /* b ba bl bla */ 4680 static void gen_b(DisasContext *ctx) 4681 { 4682 target_ulong li, target; 4683 4684 /* sign extend LI */ 4685 li = LI(ctx->opcode); 4686 li = (li ^ 0x02000000) - 0x02000000; 4687 if (likely(AA(ctx->opcode) == 0)) { 4688 target = ctx->cia + li; 4689 } else { 4690 target = li; 4691 } 4692 if (LK(ctx->opcode)) { 4693 gen_setlr(ctx, ctx->base.pc_next); 4694 } 4695 gen_update_cfar(ctx, ctx->cia); 4696 gen_goto_tb(ctx, 0, target); 4697 ctx->base.is_jmp = DISAS_NORETURN; 4698 } 4699 4700 #define BCOND_IM 0 4701 #define BCOND_LR 1 4702 #define BCOND_CTR 2 4703 #define BCOND_TAR 3 4704 4705 static void gen_bcond(DisasContext *ctx, int type) 4706 { 4707 uint32_t bo = BO(ctx->opcode); 4708 TCGLabel *l1; 4709 TCGv target; 4710 4711 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 4712 target = tcg_temp_local_new(); 4713 if (type == BCOND_CTR) { 4714 tcg_gen_mov_tl(target, cpu_ctr); 4715 } else if (type == BCOND_TAR) { 4716 gen_load_spr(target, SPR_TAR); 4717 } else { 4718 tcg_gen_mov_tl(target, cpu_lr); 4719 } 4720 } else { 4721 target = NULL; 4722 } 4723 if (LK(ctx->opcode)) { 4724 gen_setlr(ctx, ctx->base.pc_next); 4725 } 4726 l1 = gen_new_label(); 4727 if ((bo & 0x4) == 0) { 4728 /* Decrement and test CTR */ 4729 TCGv temp = tcg_temp_new(); 4730 4731 if (type == BCOND_CTR) { 4732 /* 4733 * All ISAs up to v3 describe this form of bcctr as invalid but 4734 * some processors, ie. 64-bit server processors compliant with 4735 * arch 2.x, do implement a "test and decrement" logic instead, 4736 * as described in their respective UMs. This logic involves CTR 4737 * to act as both the branch target and a counter, which makes 4738 * it basically useless and thus never used in real code. 4739 * 4740 * This form was hence chosen to trigger extra micro-architectural 4741 * side-effect on real HW needed for the Spectre v2 workaround. 4742 * It is up to guests that implement such workaround, ie. linux, to 4743 * use this form in a way it just triggers the side-effect without 4744 * doing anything else harmful. 4745 */ 4746 if (unlikely(!is_book3s_arch2x(ctx))) { 4747 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4748 tcg_temp_free(temp); 4749 tcg_temp_free(target); 4750 return; 4751 } 4752 4753 if (NARROW_MODE(ctx)) { 4754 tcg_gen_ext32u_tl(temp, cpu_ctr); 4755 } else { 4756 tcg_gen_mov_tl(temp, cpu_ctr); 4757 } 4758 if (bo & 0x2) { 4759 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4760 } else { 4761 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4762 } 4763 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4764 } else { 4765 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4766 if (NARROW_MODE(ctx)) { 4767 tcg_gen_ext32u_tl(temp, cpu_ctr); 4768 } else { 4769 tcg_gen_mov_tl(temp, cpu_ctr); 4770 } 4771 if (bo & 0x2) { 4772 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4773 } else { 4774 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4775 } 4776 } 4777 tcg_temp_free(temp); 4778 } 4779 if ((bo & 0x10) == 0) { 4780 /* Test CR */ 4781 uint32_t bi = BI(ctx->opcode); 4782 uint32_t mask = 0x08 >> (bi & 0x03); 4783 TCGv_i32 temp = tcg_temp_new_i32(); 4784 4785 if (bo & 0x8) { 4786 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4787 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 4788 } else { 4789 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4790 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 4791 } 4792 tcg_temp_free_i32(temp); 4793 } 4794 gen_update_cfar(ctx, ctx->cia); 4795 if (type == BCOND_IM) { 4796 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 4797 if (likely(AA(ctx->opcode) == 0)) { 4798 gen_goto_tb(ctx, 0, ctx->cia + li); 4799 } else { 4800 gen_goto_tb(ctx, 0, li); 4801 } 4802 } else { 4803 if (NARROW_MODE(ctx)) { 4804 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 4805 } else { 4806 tcg_gen_andi_tl(cpu_nip, target, ~3); 4807 } 4808 gen_lookup_and_goto_ptr(ctx); 4809 tcg_temp_free(target); 4810 } 4811 if ((bo & 0x14) != 0x14) { 4812 /* fallthrough case */ 4813 gen_set_label(l1); 4814 gen_goto_tb(ctx, 1, ctx->base.pc_next); 4815 } 4816 ctx->base.is_jmp = DISAS_NORETURN; 4817 } 4818 4819 static void gen_bc(DisasContext *ctx) 4820 { 4821 gen_bcond(ctx, BCOND_IM); 4822 } 4823 4824 static void gen_bcctr(DisasContext *ctx) 4825 { 4826 gen_bcond(ctx, BCOND_CTR); 4827 } 4828 4829 static void gen_bclr(DisasContext *ctx) 4830 { 4831 gen_bcond(ctx, BCOND_LR); 4832 } 4833 4834 static void gen_bctar(DisasContext *ctx) 4835 { 4836 gen_bcond(ctx, BCOND_TAR); 4837 } 4838 4839 /*** Condition register logical ***/ 4840 #define GEN_CRLOGIC(name, tcg_op, opc) \ 4841 static void glue(gen_, name)(DisasContext *ctx) \ 4842 { \ 4843 uint8_t bitmask; \ 4844 int sh; \ 4845 TCGv_i32 t0, t1; \ 4846 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 4847 t0 = tcg_temp_new_i32(); \ 4848 if (sh > 0) \ 4849 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 4850 else if (sh < 0) \ 4851 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 4852 else \ 4853 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 4854 t1 = tcg_temp_new_i32(); \ 4855 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 4856 if (sh > 0) \ 4857 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 4858 else if (sh < 0) \ 4859 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 4860 else \ 4861 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 4862 tcg_op(t0, t0, t1); \ 4863 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 4864 tcg_gen_andi_i32(t0, t0, bitmask); \ 4865 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 4866 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 4867 tcg_temp_free_i32(t0); \ 4868 tcg_temp_free_i32(t1); \ 4869 } 4870 4871 /* crand */ 4872 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 4873 /* crandc */ 4874 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 4875 /* creqv */ 4876 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 4877 /* crnand */ 4878 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 4879 /* crnor */ 4880 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 4881 /* cror */ 4882 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 4883 /* crorc */ 4884 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 4885 /* crxor */ 4886 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 4887 4888 /* mcrf */ 4889 static void gen_mcrf(DisasContext *ctx) 4890 { 4891 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 4892 } 4893 4894 /*** System linkage ***/ 4895 4896 /* rfi (supervisor only) */ 4897 static void gen_rfi(DisasContext *ctx) 4898 { 4899 #if defined(CONFIG_USER_ONLY) 4900 GEN_PRIV; 4901 #else 4902 /* 4903 * This instruction doesn't exist anymore on 64-bit server 4904 * processors compliant with arch 2.x 4905 */ 4906 if (is_book3s_arch2x(ctx)) { 4907 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4908 return; 4909 } 4910 /* Restore CPU state */ 4911 CHK_SV; 4912 gen_icount_io_start(ctx); 4913 gen_update_cfar(ctx, ctx->cia); 4914 gen_helper_rfi(cpu_env); 4915 ctx->base.is_jmp = DISAS_EXIT; 4916 #endif 4917 } 4918 4919 #if defined(TARGET_PPC64) 4920 static void gen_rfid(DisasContext *ctx) 4921 { 4922 #if defined(CONFIG_USER_ONLY) 4923 GEN_PRIV; 4924 #else 4925 /* Restore CPU state */ 4926 CHK_SV; 4927 gen_icount_io_start(ctx); 4928 gen_update_cfar(ctx, ctx->cia); 4929 gen_helper_rfid(cpu_env); 4930 ctx->base.is_jmp = DISAS_EXIT; 4931 #endif 4932 } 4933 4934 #if !defined(CONFIG_USER_ONLY) 4935 static void gen_rfscv(DisasContext *ctx) 4936 { 4937 #if defined(CONFIG_USER_ONLY) 4938 GEN_PRIV; 4939 #else 4940 /* Restore CPU state */ 4941 CHK_SV; 4942 gen_icount_io_start(ctx); 4943 gen_update_cfar(ctx, ctx->cia); 4944 gen_helper_rfscv(cpu_env); 4945 ctx->base.is_jmp = DISAS_EXIT; 4946 #endif 4947 } 4948 #endif 4949 4950 static void gen_hrfid(DisasContext *ctx) 4951 { 4952 #if defined(CONFIG_USER_ONLY) 4953 GEN_PRIV; 4954 #else 4955 /* Restore CPU state */ 4956 CHK_HV; 4957 gen_helper_hrfid(cpu_env); 4958 ctx->base.is_jmp = DISAS_EXIT; 4959 #endif 4960 } 4961 #endif 4962 4963 /* sc */ 4964 #if defined(CONFIG_USER_ONLY) 4965 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 4966 #else 4967 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 4968 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED 4969 #endif 4970 static void gen_sc(DisasContext *ctx) 4971 { 4972 uint32_t lev; 4973 4974 lev = (ctx->opcode >> 5) & 0x7F; 4975 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 4976 } 4977 4978 #if defined(TARGET_PPC64) 4979 #if !defined(CONFIG_USER_ONLY) 4980 static void gen_scv(DisasContext *ctx) 4981 { 4982 uint32_t lev = (ctx->opcode >> 5) & 0x7F; 4983 4984 /* Set the PC back to the faulting instruction. */ 4985 gen_update_nip(ctx, ctx->cia); 4986 gen_helper_scv(cpu_env, tcg_constant_i32(lev)); 4987 4988 ctx->base.is_jmp = DISAS_NORETURN; 4989 } 4990 #endif 4991 #endif 4992 4993 /*** Trap ***/ 4994 4995 /* Check for unconditional traps (always or never) */ 4996 static bool check_unconditional_trap(DisasContext *ctx) 4997 { 4998 /* Trap never */ 4999 if (TO(ctx->opcode) == 0) { 5000 return true; 5001 } 5002 /* Trap always */ 5003 if (TO(ctx->opcode) == 31) { 5004 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 5005 return true; 5006 } 5007 return false; 5008 } 5009 5010 /* tw */ 5011 static void gen_tw(DisasContext *ctx) 5012 { 5013 TCGv_i32 t0; 5014 5015 if (check_unconditional_trap(ctx)) { 5016 return; 5017 } 5018 t0 = tcg_const_i32(TO(ctx->opcode)); 5019 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 5020 t0); 5021 tcg_temp_free_i32(t0); 5022 } 5023 5024 /* twi */ 5025 static void gen_twi(DisasContext *ctx) 5026 { 5027 TCGv t0; 5028 TCGv_i32 t1; 5029 5030 if (check_unconditional_trap(ctx)) { 5031 return; 5032 } 5033 t0 = tcg_const_tl(SIMM(ctx->opcode)); 5034 t1 = tcg_const_i32(TO(ctx->opcode)); 5035 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 5036 tcg_temp_free(t0); 5037 tcg_temp_free_i32(t1); 5038 } 5039 5040 #if defined(TARGET_PPC64) 5041 /* td */ 5042 static void gen_td(DisasContext *ctx) 5043 { 5044 TCGv_i32 t0; 5045 5046 if (check_unconditional_trap(ctx)) { 5047 return; 5048 } 5049 t0 = tcg_const_i32(TO(ctx->opcode)); 5050 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 5051 t0); 5052 tcg_temp_free_i32(t0); 5053 } 5054 5055 /* tdi */ 5056 static void gen_tdi(DisasContext *ctx) 5057 { 5058 TCGv t0; 5059 TCGv_i32 t1; 5060 5061 if (check_unconditional_trap(ctx)) { 5062 return; 5063 } 5064 t0 = tcg_const_tl(SIMM(ctx->opcode)); 5065 t1 = tcg_const_i32(TO(ctx->opcode)); 5066 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 5067 tcg_temp_free(t0); 5068 tcg_temp_free_i32(t1); 5069 } 5070 #endif 5071 5072 /*** Processor control ***/ 5073 5074 /* mcrxr */ 5075 static void gen_mcrxr(DisasContext *ctx) 5076 { 5077 TCGv_i32 t0 = tcg_temp_new_i32(); 5078 TCGv_i32 t1 = tcg_temp_new_i32(); 5079 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 5080 5081 tcg_gen_trunc_tl_i32(t0, cpu_so); 5082 tcg_gen_trunc_tl_i32(t1, cpu_ov); 5083 tcg_gen_trunc_tl_i32(dst, cpu_ca); 5084 tcg_gen_shli_i32(t0, t0, 3); 5085 tcg_gen_shli_i32(t1, t1, 2); 5086 tcg_gen_shli_i32(dst, dst, 1); 5087 tcg_gen_or_i32(dst, dst, t0); 5088 tcg_gen_or_i32(dst, dst, t1); 5089 tcg_temp_free_i32(t0); 5090 tcg_temp_free_i32(t1); 5091 5092 tcg_gen_movi_tl(cpu_so, 0); 5093 tcg_gen_movi_tl(cpu_ov, 0); 5094 tcg_gen_movi_tl(cpu_ca, 0); 5095 } 5096 5097 #ifdef TARGET_PPC64 5098 /* mcrxrx */ 5099 static void gen_mcrxrx(DisasContext *ctx) 5100 { 5101 TCGv t0 = tcg_temp_new(); 5102 TCGv t1 = tcg_temp_new(); 5103 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 5104 5105 /* copy OV and OV32 */ 5106 tcg_gen_shli_tl(t0, cpu_ov, 1); 5107 tcg_gen_or_tl(t0, t0, cpu_ov32); 5108 tcg_gen_shli_tl(t0, t0, 2); 5109 /* copy CA and CA32 */ 5110 tcg_gen_shli_tl(t1, cpu_ca, 1); 5111 tcg_gen_or_tl(t1, t1, cpu_ca32); 5112 tcg_gen_or_tl(t0, t0, t1); 5113 tcg_gen_trunc_tl_i32(dst, t0); 5114 tcg_temp_free(t0); 5115 tcg_temp_free(t1); 5116 } 5117 #endif 5118 5119 /* mfcr mfocrf */ 5120 static void gen_mfcr(DisasContext *ctx) 5121 { 5122 uint32_t crm, crn; 5123 5124 if (likely(ctx->opcode & 0x00100000)) { 5125 crm = CRM(ctx->opcode); 5126 if (likely(crm && ((crm & (crm - 1)) == 0))) { 5127 crn = ctz32(crm); 5128 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 5129 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 5130 cpu_gpr[rD(ctx->opcode)], crn * 4); 5131 } 5132 } else { 5133 TCGv_i32 t0 = tcg_temp_new_i32(); 5134 tcg_gen_mov_i32(t0, cpu_crf[0]); 5135 tcg_gen_shli_i32(t0, t0, 4); 5136 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 5137 tcg_gen_shli_i32(t0, t0, 4); 5138 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 5139 tcg_gen_shli_i32(t0, t0, 4); 5140 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 5141 tcg_gen_shli_i32(t0, t0, 4); 5142 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 5143 tcg_gen_shli_i32(t0, t0, 4); 5144 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 5145 tcg_gen_shli_i32(t0, t0, 4); 5146 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 5147 tcg_gen_shli_i32(t0, t0, 4); 5148 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 5149 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 5150 tcg_temp_free_i32(t0); 5151 } 5152 } 5153 5154 /* mfmsr */ 5155 static void gen_mfmsr(DisasContext *ctx) 5156 { 5157 CHK_SV; 5158 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 5159 } 5160 5161 /* mfspr */ 5162 static inline void gen_op_mfspr(DisasContext *ctx) 5163 { 5164 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 5165 uint32_t sprn = SPR(ctx->opcode); 5166 5167 #if defined(CONFIG_USER_ONLY) 5168 read_cb = ctx->spr_cb[sprn].uea_read; 5169 #else 5170 if (ctx->pr) { 5171 read_cb = ctx->spr_cb[sprn].uea_read; 5172 } else if (ctx->hv) { 5173 read_cb = ctx->spr_cb[sprn].hea_read; 5174 } else { 5175 read_cb = ctx->spr_cb[sprn].oea_read; 5176 } 5177 #endif 5178 if (likely(read_cb != NULL)) { 5179 if (likely(read_cb != SPR_NOACCESS)) { 5180 (*read_cb)(ctx, rD(ctx->opcode), sprn); 5181 } else { 5182 /* Privilege exception */ 5183 /* 5184 * This is a hack to avoid warnings when running Linux: 5185 * this OS breaks the PowerPC virtualisation model, 5186 * allowing userland application to read the PVR 5187 */ 5188 if (sprn != SPR_PVR) { 5189 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 5190 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 5191 ctx->cia); 5192 } 5193 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 5194 } 5195 } else { 5196 /* ISA 2.07 defines these as no-ops */ 5197 if ((ctx->insns_flags2 & PPC2_ISA207S) && 5198 (sprn >= 808 && sprn <= 811)) { 5199 /* This is a nop */ 5200 return; 5201 } 5202 /* Not defined */ 5203 qemu_log_mask(LOG_GUEST_ERROR, 5204 "Trying to read invalid spr %d (0x%03x) at " 5205 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 5206 5207 /* 5208 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 5209 * generate a priv, a hv emu or a no-op 5210 */ 5211 if (sprn & 0x10) { 5212 if (ctx->pr) { 5213 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5214 } 5215 } else { 5216 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 5217 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5218 } 5219 } 5220 } 5221 } 5222 5223 static void gen_mfspr(DisasContext *ctx) 5224 { 5225 gen_op_mfspr(ctx); 5226 } 5227 5228 /* mftb */ 5229 static void gen_mftb(DisasContext *ctx) 5230 { 5231 gen_op_mfspr(ctx); 5232 } 5233 5234 /* mtcrf mtocrf*/ 5235 static void gen_mtcrf(DisasContext *ctx) 5236 { 5237 uint32_t crm, crn; 5238 5239 crm = CRM(ctx->opcode); 5240 if (likely((ctx->opcode & 0x00100000))) { 5241 if (crm && ((crm & (crm - 1)) == 0)) { 5242 TCGv_i32 temp = tcg_temp_new_i32(); 5243 crn = ctz32(crm); 5244 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 5245 tcg_gen_shri_i32(temp, temp, crn * 4); 5246 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 5247 tcg_temp_free_i32(temp); 5248 } 5249 } else { 5250 TCGv_i32 temp = tcg_temp_new_i32(); 5251 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 5252 for (crn = 0 ; crn < 8 ; crn++) { 5253 if (crm & (1 << crn)) { 5254 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 5255 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 5256 } 5257 } 5258 tcg_temp_free_i32(temp); 5259 } 5260 } 5261 5262 /* mtmsr */ 5263 #if defined(TARGET_PPC64) 5264 static void gen_mtmsrd(DisasContext *ctx) 5265 { 5266 CHK_SV; 5267 5268 #if !defined(CONFIG_USER_ONLY) 5269 gen_icount_io_start(ctx); 5270 if (ctx->opcode & 0x00010000) { 5271 /* L=1 form only updates EE and RI */ 5272 TCGv t0 = tcg_temp_new(); 5273 TCGv t1 = tcg_temp_new(); 5274 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], 5275 (1 << MSR_RI) | (1 << MSR_EE)); 5276 tcg_gen_andi_tl(t1, cpu_msr, 5277 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 5278 tcg_gen_or_tl(t1, t1, t0); 5279 5280 gen_helper_store_msr(cpu_env, t1); 5281 tcg_temp_free(t0); 5282 tcg_temp_free(t1); 5283 5284 } else { 5285 /* 5286 * XXX: we need to update nip before the store if we enter 5287 * power saving mode, we will exit the loop directly from 5288 * ppc_store_msr 5289 */ 5290 gen_update_nip(ctx, ctx->base.pc_next); 5291 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 5292 } 5293 /* Must stop the translation as machine state (may have) changed */ 5294 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 5295 #endif /* !defined(CONFIG_USER_ONLY) */ 5296 } 5297 #endif /* defined(TARGET_PPC64) */ 5298 5299 static void gen_mtmsr(DisasContext *ctx) 5300 { 5301 CHK_SV; 5302 5303 #if !defined(CONFIG_USER_ONLY) 5304 gen_icount_io_start(ctx); 5305 if (ctx->opcode & 0x00010000) { 5306 /* L=1 form only updates EE and RI */ 5307 TCGv t0 = tcg_temp_new(); 5308 TCGv t1 = tcg_temp_new(); 5309 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], 5310 (1 << MSR_RI) | (1 << MSR_EE)); 5311 tcg_gen_andi_tl(t1, cpu_msr, 5312 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 5313 tcg_gen_or_tl(t1, t1, t0); 5314 5315 gen_helper_store_msr(cpu_env, t1); 5316 tcg_temp_free(t0); 5317 tcg_temp_free(t1); 5318 5319 } else { 5320 TCGv msr = tcg_temp_new(); 5321 5322 /* 5323 * XXX: we need to update nip before the store if we enter 5324 * power saving mode, we will exit the loop directly from 5325 * ppc_store_msr 5326 */ 5327 gen_update_nip(ctx, ctx->base.pc_next); 5328 #if defined(TARGET_PPC64) 5329 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 5330 #else 5331 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 5332 #endif 5333 gen_helper_store_msr(cpu_env, msr); 5334 tcg_temp_free(msr); 5335 } 5336 /* Must stop the translation as machine state (may have) changed */ 5337 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 5338 #endif 5339 } 5340 5341 /* mtspr */ 5342 static void gen_mtspr(DisasContext *ctx) 5343 { 5344 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 5345 uint32_t sprn = SPR(ctx->opcode); 5346 5347 #if defined(CONFIG_USER_ONLY) 5348 write_cb = ctx->spr_cb[sprn].uea_write; 5349 #else 5350 if (ctx->pr) { 5351 write_cb = ctx->spr_cb[sprn].uea_write; 5352 } else if (ctx->hv) { 5353 write_cb = ctx->spr_cb[sprn].hea_write; 5354 } else { 5355 write_cb = ctx->spr_cb[sprn].oea_write; 5356 } 5357 #endif 5358 if (likely(write_cb != NULL)) { 5359 if (likely(write_cb != SPR_NOACCESS)) { 5360 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 5361 } else { 5362 /* Privilege exception */ 5363 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 5364 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 5365 ctx->cia); 5366 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 5367 } 5368 } else { 5369 /* ISA 2.07 defines these as no-ops */ 5370 if ((ctx->insns_flags2 & PPC2_ISA207S) && 5371 (sprn >= 808 && sprn <= 811)) { 5372 /* This is a nop */ 5373 return; 5374 } 5375 5376 /* Not defined */ 5377 qemu_log_mask(LOG_GUEST_ERROR, 5378 "Trying to write invalid spr %d (0x%03x) at " 5379 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 5380 5381 5382 /* 5383 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 5384 * generate a priv, a hv emu or a no-op 5385 */ 5386 if (sprn & 0x10) { 5387 if (ctx->pr) { 5388 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5389 } 5390 } else { 5391 if (ctx->pr || sprn == 0) { 5392 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5393 } 5394 } 5395 } 5396 } 5397 5398 #if defined(TARGET_PPC64) 5399 /* setb */ 5400 static void gen_setb(DisasContext *ctx) 5401 { 5402 TCGv_i32 t0 = tcg_temp_new_i32(); 5403 TCGv_i32 t8 = tcg_temp_new_i32(); 5404 TCGv_i32 tm1 = tcg_temp_new_i32(); 5405 int crf = crfS(ctx->opcode); 5406 5407 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 5408 tcg_gen_movi_i32(t8, 8); 5409 tcg_gen_movi_i32(tm1, -1); 5410 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 5411 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 5412 5413 tcg_temp_free_i32(t0); 5414 tcg_temp_free_i32(t8); 5415 tcg_temp_free_i32(tm1); 5416 } 5417 #endif 5418 5419 /*** Cache management ***/ 5420 5421 /* dcbf */ 5422 static void gen_dcbf(DisasContext *ctx) 5423 { 5424 /* XXX: specification says this is treated as a load by the MMU */ 5425 TCGv t0; 5426 gen_set_access_type(ctx, ACCESS_CACHE); 5427 t0 = tcg_temp_new(); 5428 gen_addr_reg_index(ctx, t0); 5429 gen_qemu_ld8u(ctx, t0, t0); 5430 tcg_temp_free(t0); 5431 } 5432 5433 /* dcbfep (external PID dcbf) */ 5434 static void gen_dcbfep(DisasContext *ctx) 5435 { 5436 /* XXX: specification says this is treated as a load by the MMU */ 5437 TCGv t0; 5438 CHK_SV; 5439 gen_set_access_type(ctx, ACCESS_CACHE); 5440 t0 = tcg_temp_new(); 5441 gen_addr_reg_index(ctx, t0); 5442 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5443 tcg_temp_free(t0); 5444 } 5445 5446 /* dcbi (Supervisor only) */ 5447 static void gen_dcbi(DisasContext *ctx) 5448 { 5449 #if defined(CONFIG_USER_ONLY) 5450 GEN_PRIV; 5451 #else 5452 TCGv EA, val; 5453 5454 CHK_SV; 5455 EA = tcg_temp_new(); 5456 gen_set_access_type(ctx, ACCESS_CACHE); 5457 gen_addr_reg_index(ctx, EA); 5458 val = tcg_temp_new(); 5459 /* XXX: specification says this should be treated as a store by the MMU */ 5460 gen_qemu_ld8u(ctx, val, EA); 5461 gen_qemu_st8(ctx, val, EA); 5462 tcg_temp_free(val); 5463 tcg_temp_free(EA); 5464 #endif /* defined(CONFIG_USER_ONLY) */ 5465 } 5466 5467 /* dcdst */ 5468 static void gen_dcbst(DisasContext *ctx) 5469 { 5470 /* XXX: specification say this is treated as a load by the MMU */ 5471 TCGv t0; 5472 gen_set_access_type(ctx, ACCESS_CACHE); 5473 t0 = tcg_temp_new(); 5474 gen_addr_reg_index(ctx, t0); 5475 gen_qemu_ld8u(ctx, t0, t0); 5476 tcg_temp_free(t0); 5477 } 5478 5479 /* dcbstep (dcbstep External PID version) */ 5480 static void gen_dcbstep(DisasContext *ctx) 5481 { 5482 /* XXX: specification say this is treated as a load by the MMU */ 5483 TCGv t0; 5484 gen_set_access_type(ctx, ACCESS_CACHE); 5485 t0 = tcg_temp_new(); 5486 gen_addr_reg_index(ctx, t0); 5487 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5488 tcg_temp_free(t0); 5489 } 5490 5491 /* dcbt */ 5492 static void gen_dcbt(DisasContext *ctx) 5493 { 5494 /* 5495 * interpreted as no-op 5496 * XXX: specification say this is treated as a load by the MMU but 5497 * does not generate any exception 5498 */ 5499 } 5500 5501 /* dcbtep */ 5502 static void gen_dcbtep(DisasContext *ctx) 5503 { 5504 /* 5505 * interpreted as no-op 5506 * XXX: specification say this is treated as a load by the MMU but 5507 * does not generate any exception 5508 */ 5509 } 5510 5511 /* dcbtst */ 5512 static void gen_dcbtst(DisasContext *ctx) 5513 { 5514 /* 5515 * interpreted as no-op 5516 * XXX: specification say this is treated as a load by the MMU but 5517 * does not generate any exception 5518 */ 5519 } 5520 5521 /* dcbtstep */ 5522 static void gen_dcbtstep(DisasContext *ctx) 5523 { 5524 /* 5525 * interpreted as no-op 5526 * XXX: specification say this is treated as a load by the MMU but 5527 * does not generate any exception 5528 */ 5529 } 5530 5531 /* dcbtls */ 5532 static void gen_dcbtls(DisasContext *ctx) 5533 { 5534 /* Always fails locking the cache */ 5535 TCGv t0 = tcg_temp_new(); 5536 gen_load_spr(t0, SPR_Exxx_L1CSR0); 5537 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 5538 gen_store_spr(SPR_Exxx_L1CSR0, t0); 5539 tcg_temp_free(t0); 5540 } 5541 5542 /* dcbz */ 5543 static void gen_dcbz(DisasContext *ctx) 5544 { 5545 TCGv tcgv_addr; 5546 TCGv_i32 tcgv_op; 5547 5548 gen_set_access_type(ctx, ACCESS_CACHE); 5549 tcgv_addr = tcg_temp_new(); 5550 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5551 gen_addr_reg_index(ctx, tcgv_addr); 5552 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 5553 tcg_temp_free(tcgv_addr); 5554 tcg_temp_free_i32(tcgv_op); 5555 } 5556 5557 /* dcbzep */ 5558 static void gen_dcbzep(DisasContext *ctx) 5559 { 5560 TCGv tcgv_addr; 5561 TCGv_i32 tcgv_op; 5562 5563 gen_set_access_type(ctx, ACCESS_CACHE); 5564 tcgv_addr = tcg_temp_new(); 5565 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5566 gen_addr_reg_index(ctx, tcgv_addr); 5567 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 5568 tcg_temp_free(tcgv_addr); 5569 tcg_temp_free_i32(tcgv_op); 5570 } 5571 5572 /* dst / dstt */ 5573 static void gen_dst(DisasContext *ctx) 5574 { 5575 if (rA(ctx->opcode) == 0) { 5576 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5577 } else { 5578 /* interpreted as no-op */ 5579 } 5580 } 5581 5582 /* dstst /dststt */ 5583 static void gen_dstst(DisasContext *ctx) 5584 { 5585 if (rA(ctx->opcode) == 0) { 5586 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5587 } else { 5588 /* interpreted as no-op */ 5589 } 5590 5591 } 5592 5593 /* dss / dssall */ 5594 static void gen_dss(DisasContext *ctx) 5595 { 5596 /* interpreted as no-op */ 5597 } 5598 5599 /* icbi */ 5600 static void gen_icbi(DisasContext *ctx) 5601 { 5602 TCGv t0; 5603 gen_set_access_type(ctx, ACCESS_CACHE); 5604 t0 = tcg_temp_new(); 5605 gen_addr_reg_index(ctx, t0); 5606 gen_helper_icbi(cpu_env, t0); 5607 tcg_temp_free(t0); 5608 } 5609 5610 /* icbiep */ 5611 static void gen_icbiep(DisasContext *ctx) 5612 { 5613 TCGv t0; 5614 gen_set_access_type(ctx, ACCESS_CACHE); 5615 t0 = tcg_temp_new(); 5616 gen_addr_reg_index(ctx, t0); 5617 gen_helper_icbiep(cpu_env, t0); 5618 tcg_temp_free(t0); 5619 } 5620 5621 /* Optional: */ 5622 /* dcba */ 5623 static void gen_dcba(DisasContext *ctx) 5624 { 5625 /* 5626 * interpreted as no-op 5627 * XXX: specification say this is treated as a store by the MMU 5628 * but does not generate any exception 5629 */ 5630 } 5631 5632 /*** Segment register manipulation ***/ 5633 /* Supervisor only: */ 5634 5635 /* mfsr */ 5636 static void gen_mfsr(DisasContext *ctx) 5637 { 5638 #if defined(CONFIG_USER_ONLY) 5639 GEN_PRIV; 5640 #else 5641 TCGv t0; 5642 5643 CHK_SV; 5644 t0 = tcg_const_tl(SR(ctx->opcode)); 5645 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5646 tcg_temp_free(t0); 5647 #endif /* defined(CONFIG_USER_ONLY) */ 5648 } 5649 5650 /* mfsrin */ 5651 static void gen_mfsrin(DisasContext *ctx) 5652 { 5653 #if defined(CONFIG_USER_ONLY) 5654 GEN_PRIV; 5655 #else 5656 TCGv t0; 5657 5658 CHK_SV; 5659 t0 = tcg_temp_new(); 5660 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5661 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5662 tcg_temp_free(t0); 5663 #endif /* defined(CONFIG_USER_ONLY) */ 5664 } 5665 5666 /* mtsr */ 5667 static void gen_mtsr(DisasContext *ctx) 5668 { 5669 #if defined(CONFIG_USER_ONLY) 5670 GEN_PRIV; 5671 #else 5672 TCGv t0; 5673 5674 CHK_SV; 5675 t0 = tcg_const_tl(SR(ctx->opcode)); 5676 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5677 tcg_temp_free(t0); 5678 #endif /* defined(CONFIG_USER_ONLY) */ 5679 } 5680 5681 /* mtsrin */ 5682 static void gen_mtsrin(DisasContext *ctx) 5683 { 5684 #if defined(CONFIG_USER_ONLY) 5685 GEN_PRIV; 5686 #else 5687 TCGv t0; 5688 CHK_SV; 5689 5690 t0 = tcg_temp_new(); 5691 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5692 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 5693 tcg_temp_free(t0); 5694 #endif /* defined(CONFIG_USER_ONLY) */ 5695 } 5696 5697 #if defined(TARGET_PPC64) 5698 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 5699 5700 /* mfsr */ 5701 static void gen_mfsr_64b(DisasContext *ctx) 5702 { 5703 #if defined(CONFIG_USER_ONLY) 5704 GEN_PRIV; 5705 #else 5706 TCGv t0; 5707 5708 CHK_SV; 5709 t0 = tcg_const_tl(SR(ctx->opcode)); 5710 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5711 tcg_temp_free(t0); 5712 #endif /* defined(CONFIG_USER_ONLY) */ 5713 } 5714 5715 /* mfsrin */ 5716 static void gen_mfsrin_64b(DisasContext *ctx) 5717 { 5718 #if defined(CONFIG_USER_ONLY) 5719 GEN_PRIV; 5720 #else 5721 TCGv t0; 5722 5723 CHK_SV; 5724 t0 = tcg_temp_new(); 5725 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5726 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5727 tcg_temp_free(t0); 5728 #endif /* defined(CONFIG_USER_ONLY) */ 5729 } 5730 5731 /* mtsr */ 5732 static void gen_mtsr_64b(DisasContext *ctx) 5733 { 5734 #if defined(CONFIG_USER_ONLY) 5735 GEN_PRIV; 5736 #else 5737 TCGv t0; 5738 5739 CHK_SV; 5740 t0 = tcg_const_tl(SR(ctx->opcode)); 5741 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5742 tcg_temp_free(t0); 5743 #endif /* defined(CONFIG_USER_ONLY) */ 5744 } 5745 5746 /* mtsrin */ 5747 static void gen_mtsrin_64b(DisasContext *ctx) 5748 { 5749 #if defined(CONFIG_USER_ONLY) 5750 GEN_PRIV; 5751 #else 5752 TCGv t0; 5753 5754 CHK_SV; 5755 t0 = tcg_temp_new(); 5756 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5757 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5758 tcg_temp_free(t0); 5759 #endif /* defined(CONFIG_USER_ONLY) */ 5760 } 5761 5762 /* slbmte */ 5763 static void gen_slbmte(DisasContext *ctx) 5764 { 5765 #if defined(CONFIG_USER_ONLY) 5766 GEN_PRIV; 5767 #else 5768 CHK_SV; 5769 5770 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 5771 cpu_gpr[rS(ctx->opcode)]); 5772 #endif /* defined(CONFIG_USER_ONLY) */ 5773 } 5774 5775 static void gen_slbmfee(DisasContext *ctx) 5776 { 5777 #if defined(CONFIG_USER_ONLY) 5778 GEN_PRIV; 5779 #else 5780 CHK_SV; 5781 5782 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5783 cpu_gpr[rB(ctx->opcode)]); 5784 #endif /* defined(CONFIG_USER_ONLY) */ 5785 } 5786 5787 static void gen_slbmfev(DisasContext *ctx) 5788 { 5789 #if defined(CONFIG_USER_ONLY) 5790 GEN_PRIV; 5791 #else 5792 CHK_SV; 5793 5794 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5795 cpu_gpr[rB(ctx->opcode)]); 5796 #endif /* defined(CONFIG_USER_ONLY) */ 5797 } 5798 5799 static void gen_slbfee_(DisasContext *ctx) 5800 { 5801 #if defined(CONFIG_USER_ONLY) 5802 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5803 #else 5804 TCGLabel *l1, *l2; 5805 5806 if (unlikely(ctx->pr)) { 5807 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5808 return; 5809 } 5810 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5811 cpu_gpr[rB(ctx->opcode)]); 5812 l1 = gen_new_label(); 5813 l2 = gen_new_label(); 5814 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5815 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 5816 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 5817 tcg_gen_br(l2); 5818 gen_set_label(l1); 5819 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 5820 gen_set_label(l2); 5821 #endif 5822 } 5823 #endif /* defined(TARGET_PPC64) */ 5824 5825 /*** Lookaside buffer management ***/ 5826 /* Optional & supervisor only: */ 5827 5828 /* tlbia */ 5829 static void gen_tlbia(DisasContext *ctx) 5830 { 5831 #if defined(CONFIG_USER_ONLY) 5832 GEN_PRIV; 5833 #else 5834 CHK_HV; 5835 5836 gen_helper_tlbia(cpu_env); 5837 #endif /* defined(CONFIG_USER_ONLY) */ 5838 } 5839 5840 /* tlbiel */ 5841 static void gen_tlbiel(DisasContext *ctx) 5842 { 5843 #if defined(CONFIG_USER_ONLY) 5844 GEN_PRIV; 5845 #else 5846 CHK_SV; 5847 5848 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5849 #endif /* defined(CONFIG_USER_ONLY) */ 5850 } 5851 5852 /* tlbie */ 5853 static void gen_tlbie(DisasContext *ctx) 5854 { 5855 #if defined(CONFIG_USER_ONLY) 5856 GEN_PRIV; 5857 #else 5858 TCGv_i32 t1; 5859 5860 if (ctx->gtse) { 5861 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */ 5862 } else { 5863 CHK_HV; /* Else hypervisor privileged */ 5864 } 5865 5866 if (NARROW_MODE(ctx)) { 5867 TCGv t0 = tcg_temp_new(); 5868 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 5869 gen_helper_tlbie(cpu_env, t0); 5870 tcg_temp_free(t0); 5871 } else { 5872 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5873 } 5874 t1 = tcg_temp_new_i32(); 5875 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5876 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 5877 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5878 tcg_temp_free_i32(t1); 5879 #endif /* defined(CONFIG_USER_ONLY) */ 5880 } 5881 5882 /* tlbsync */ 5883 static void gen_tlbsync(DisasContext *ctx) 5884 { 5885 #if defined(CONFIG_USER_ONLY) 5886 GEN_PRIV; 5887 #else 5888 5889 if (ctx->gtse) { 5890 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 5891 } else { 5892 CHK_HV; /* Else hypervisor privileged */ 5893 } 5894 5895 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 5896 if (ctx->insns_flags & PPC_BOOKE) { 5897 gen_check_tlb_flush(ctx, true); 5898 } 5899 #endif /* defined(CONFIG_USER_ONLY) */ 5900 } 5901 5902 #if defined(TARGET_PPC64) 5903 /* slbia */ 5904 static void gen_slbia(DisasContext *ctx) 5905 { 5906 #if defined(CONFIG_USER_ONLY) 5907 GEN_PRIV; 5908 #else 5909 uint32_t ih = (ctx->opcode >> 21) & 0x7; 5910 TCGv_i32 t0 = tcg_const_i32(ih); 5911 5912 CHK_SV; 5913 5914 gen_helper_slbia(cpu_env, t0); 5915 tcg_temp_free_i32(t0); 5916 #endif /* defined(CONFIG_USER_ONLY) */ 5917 } 5918 5919 /* slbie */ 5920 static void gen_slbie(DisasContext *ctx) 5921 { 5922 #if defined(CONFIG_USER_ONLY) 5923 GEN_PRIV; 5924 #else 5925 CHK_SV; 5926 5927 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5928 #endif /* defined(CONFIG_USER_ONLY) */ 5929 } 5930 5931 /* slbieg */ 5932 static void gen_slbieg(DisasContext *ctx) 5933 { 5934 #if defined(CONFIG_USER_ONLY) 5935 GEN_PRIV; 5936 #else 5937 CHK_SV; 5938 5939 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5940 #endif /* defined(CONFIG_USER_ONLY) */ 5941 } 5942 5943 /* slbsync */ 5944 static void gen_slbsync(DisasContext *ctx) 5945 { 5946 #if defined(CONFIG_USER_ONLY) 5947 GEN_PRIV; 5948 #else 5949 CHK_SV; 5950 gen_check_tlb_flush(ctx, true); 5951 #endif /* defined(CONFIG_USER_ONLY) */ 5952 } 5953 5954 #endif /* defined(TARGET_PPC64) */ 5955 5956 /*** External control ***/ 5957 /* Optional: */ 5958 5959 /* eciwx */ 5960 static void gen_eciwx(DisasContext *ctx) 5961 { 5962 TCGv t0; 5963 /* Should check EAR[E] ! */ 5964 gen_set_access_type(ctx, ACCESS_EXT); 5965 t0 = tcg_temp_new(); 5966 gen_addr_reg_index(ctx, t0); 5967 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5968 DEF_MEMOP(MO_UL | MO_ALIGN)); 5969 tcg_temp_free(t0); 5970 } 5971 5972 /* ecowx */ 5973 static void gen_ecowx(DisasContext *ctx) 5974 { 5975 TCGv t0; 5976 /* Should check EAR[E] ! */ 5977 gen_set_access_type(ctx, ACCESS_EXT); 5978 t0 = tcg_temp_new(); 5979 gen_addr_reg_index(ctx, t0); 5980 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5981 DEF_MEMOP(MO_UL | MO_ALIGN)); 5982 tcg_temp_free(t0); 5983 } 5984 5985 /* PowerPC 601 specific instructions */ 5986 5987 /* abs - abs. */ 5988 static void gen_abs(DisasContext *ctx) 5989 { 5990 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5991 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5992 5993 tcg_gen_abs_tl(d, a); 5994 if (unlikely(Rc(ctx->opcode) != 0)) { 5995 gen_set_Rc0(ctx, d); 5996 } 5997 } 5998 5999 /* abso - abso. */ 6000 static void gen_abso(DisasContext *ctx) 6001 { 6002 TCGv d = cpu_gpr[rD(ctx->opcode)]; 6003 TCGv a = cpu_gpr[rA(ctx->opcode)]; 6004 6005 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000); 6006 tcg_gen_abs_tl(d, a); 6007 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 6008 if (unlikely(Rc(ctx->opcode) != 0)) { 6009 gen_set_Rc0(ctx, d); 6010 } 6011 } 6012 6013 /* clcs */ 6014 static void gen_clcs(DisasContext *ctx) 6015 { 6016 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 6017 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6018 tcg_temp_free_i32(t0); 6019 /* Rc=1 sets CR0 to an undefined state */ 6020 } 6021 6022 /* div - div. */ 6023 static void gen_div(DisasContext *ctx) 6024 { 6025 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 6026 cpu_gpr[rB(ctx->opcode)]); 6027 if (unlikely(Rc(ctx->opcode) != 0)) { 6028 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 6029 } 6030 } 6031 6032 /* divo - divo. */ 6033 static void gen_divo(DisasContext *ctx) 6034 { 6035 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 6036 cpu_gpr[rB(ctx->opcode)]); 6037 if (unlikely(Rc(ctx->opcode) != 0)) { 6038 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 6039 } 6040 } 6041 6042 /* divs - divs. */ 6043 static void gen_divs(DisasContext *ctx) 6044 { 6045 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 6046 cpu_gpr[rB(ctx->opcode)]); 6047 if (unlikely(Rc(ctx->opcode) != 0)) { 6048 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 6049 } 6050 } 6051 6052 /* divso - divso. */ 6053 static void gen_divso(DisasContext *ctx) 6054 { 6055 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 6056 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6057 if (unlikely(Rc(ctx->opcode) != 0)) { 6058 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 6059 } 6060 } 6061 6062 /* doz - doz. */ 6063 static void gen_doz(DisasContext *ctx) 6064 { 6065 TCGLabel *l1 = gen_new_label(); 6066 TCGLabel *l2 = gen_new_label(); 6067 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 6068 cpu_gpr[rA(ctx->opcode)], l1); 6069 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 6070 cpu_gpr[rA(ctx->opcode)]); 6071 tcg_gen_br(l2); 6072 gen_set_label(l1); 6073 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 6074 gen_set_label(l2); 6075 if (unlikely(Rc(ctx->opcode) != 0)) { 6076 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 6077 } 6078 } 6079 6080 /* dozo - dozo. */ 6081 static void gen_dozo(DisasContext *ctx) 6082 { 6083 TCGLabel *l1 = gen_new_label(); 6084 TCGLabel *l2 = gen_new_label(); 6085 TCGv t0 = tcg_temp_new(); 6086 TCGv t1 = tcg_temp_new(); 6087 TCGv t2 = tcg_temp_new(); 6088 /* Start with XER OV disabled, the most likely case */ 6089 tcg_gen_movi_tl(cpu_ov, 0); 6090 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 6091 cpu_gpr[rA(ctx->opcode)], l1); 6092 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 6093 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 6094 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 6095 tcg_gen_andc_tl(t1, t1, t2); 6096 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 6097 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 6098 tcg_gen_movi_tl(cpu_ov, 1); 6099 tcg_gen_movi_tl(cpu_so, 1); 6100 tcg_gen_br(l2); 6101 gen_set_label(l1); 6102 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 6103 gen_set_label(l2); 6104 tcg_temp_free(t0); 6105 tcg_temp_free(t1); 6106 tcg_temp_free(t2); 6107 if (unlikely(Rc(ctx->opcode) != 0)) { 6108 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 6109 } 6110 } 6111 6112 /* dozi */ 6113 static void gen_dozi(DisasContext *ctx) 6114 { 6115 target_long simm = SIMM(ctx->opcode); 6116 TCGLabel *l1 = gen_new_label(); 6117 TCGLabel *l2 = gen_new_label(); 6118 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 6119 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 6120 tcg_gen_br(l2); 6121 gen_set_label(l1); 6122 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 6123 gen_set_label(l2); 6124 if (unlikely(Rc(ctx->opcode) != 0)) { 6125 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 6126 } 6127 } 6128 6129 /* lscbx - lscbx. */ 6130 static void gen_lscbx(DisasContext *ctx) 6131 { 6132 TCGv t0 = tcg_temp_new(); 6133 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 6134 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 6135 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 6136 6137 gen_addr_reg_index(ctx, t0); 6138 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 6139 tcg_temp_free_i32(t1); 6140 tcg_temp_free_i32(t2); 6141 tcg_temp_free_i32(t3); 6142 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 6143 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 6144 if (unlikely(Rc(ctx->opcode) != 0)) { 6145 gen_set_Rc0(ctx, t0); 6146 } 6147 tcg_temp_free(t0); 6148 } 6149 6150 /* maskg - maskg. */ 6151 static void gen_maskg(DisasContext *ctx) 6152 { 6153 TCGLabel *l1 = gen_new_label(); 6154 TCGv t0 = tcg_temp_new(); 6155 TCGv t1 = tcg_temp_new(); 6156 TCGv t2 = tcg_temp_new(); 6157 TCGv t3 = tcg_temp_new(); 6158 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 6159 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6160 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 6161 tcg_gen_addi_tl(t2, t0, 1); 6162 tcg_gen_shr_tl(t2, t3, t2); 6163 tcg_gen_shr_tl(t3, t3, t1); 6164 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 6165 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 6166 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 6167 gen_set_label(l1); 6168 tcg_temp_free(t0); 6169 tcg_temp_free(t1); 6170 tcg_temp_free(t2); 6171 tcg_temp_free(t3); 6172 if (unlikely(Rc(ctx->opcode) != 0)) { 6173 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6174 } 6175 } 6176 6177 /* maskir - maskir. */ 6178 static void gen_maskir(DisasContext *ctx) 6179 { 6180 TCGv t0 = tcg_temp_new(); 6181 TCGv t1 = tcg_temp_new(); 6182 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6183 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6184 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6185 tcg_temp_free(t0); 6186 tcg_temp_free(t1); 6187 if (unlikely(Rc(ctx->opcode) != 0)) { 6188 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6189 } 6190 } 6191 6192 /* mul - mul. */ 6193 static void gen_mul(DisasContext *ctx) 6194 { 6195 TCGv_i64 t0 = tcg_temp_new_i64(); 6196 TCGv_i64 t1 = tcg_temp_new_i64(); 6197 TCGv t2 = tcg_temp_new(); 6198 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 6199 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 6200 tcg_gen_mul_i64(t0, t0, t1); 6201 tcg_gen_trunc_i64_tl(t2, t0); 6202 gen_store_spr(SPR_MQ, t2); 6203 tcg_gen_shri_i64(t1, t0, 32); 6204 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 6205 tcg_temp_free_i64(t0); 6206 tcg_temp_free_i64(t1); 6207 tcg_temp_free(t2); 6208 if (unlikely(Rc(ctx->opcode) != 0)) { 6209 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 6210 } 6211 } 6212 6213 /* mulo - mulo. */ 6214 static void gen_mulo(DisasContext *ctx) 6215 { 6216 TCGLabel *l1 = gen_new_label(); 6217 TCGv_i64 t0 = tcg_temp_new_i64(); 6218 TCGv_i64 t1 = tcg_temp_new_i64(); 6219 TCGv t2 = tcg_temp_new(); 6220 /* Start with XER OV disabled, the most likely case */ 6221 tcg_gen_movi_tl(cpu_ov, 0); 6222 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 6223 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 6224 tcg_gen_mul_i64(t0, t0, t1); 6225 tcg_gen_trunc_i64_tl(t2, t0); 6226 gen_store_spr(SPR_MQ, t2); 6227 tcg_gen_shri_i64(t1, t0, 32); 6228 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 6229 tcg_gen_ext32s_i64(t1, t0); 6230 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 6231 tcg_gen_movi_tl(cpu_ov, 1); 6232 tcg_gen_movi_tl(cpu_so, 1); 6233 gen_set_label(l1); 6234 tcg_temp_free_i64(t0); 6235 tcg_temp_free_i64(t1); 6236 tcg_temp_free(t2); 6237 if (unlikely(Rc(ctx->opcode) != 0)) { 6238 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 6239 } 6240 } 6241 6242 /* nabs - nabs. */ 6243 static void gen_nabs(DisasContext *ctx) 6244 { 6245 TCGv d = cpu_gpr[rD(ctx->opcode)]; 6246 TCGv a = cpu_gpr[rA(ctx->opcode)]; 6247 6248 tcg_gen_abs_tl(d, a); 6249 tcg_gen_neg_tl(d, d); 6250 if (unlikely(Rc(ctx->opcode) != 0)) { 6251 gen_set_Rc0(ctx, d); 6252 } 6253 } 6254 6255 /* nabso - nabso. */ 6256 static void gen_nabso(DisasContext *ctx) 6257 { 6258 TCGv d = cpu_gpr[rD(ctx->opcode)]; 6259 TCGv a = cpu_gpr[rA(ctx->opcode)]; 6260 6261 tcg_gen_abs_tl(d, a); 6262 tcg_gen_neg_tl(d, d); 6263 /* nabs never overflows */ 6264 tcg_gen_movi_tl(cpu_ov, 0); 6265 if (unlikely(Rc(ctx->opcode) != 0)) { 6266 gen_set_Rc0(ctx, d); 6267 } 6268 } 6269 6270 /* rlmi - rlmi. */ 6271 static void gen_rlmi(DisasContext *ctx) 6272 { 6273 uint32_t mb = MB(ctx->opcode); 6274 uint32_t me = ME(ctx->opcode); 6275 TCGv t0 = tcg_temp_new(); 6276 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6277 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6278 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 6279 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 6280 ~MASK(mb, me)); 6281 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 6282 tcg_temp_free(t0); 6283 if (unlikely(Rc(ctx->opcode) != 0)) { 6284 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6285 } 6286 } 6287 6288 /* rrib - rrib. */ 6289 static void gen_rrib(DisasContext *ctx) 6290 { 6291 TCGv t0 = tcg_temp_new(); 6292 TCGv t1 = tcg_temp_new(); 6293 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6294 tcg_gen_movi_tl(t1, 0x80000000); 6295 tcg_gen_shr_tl(t1, t1, t0); 6296 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6297 tcg_gen_and_tl(t0, t0, t1); 6298 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 6299 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6300 tcg_temp_free(t0); 6301 tcg_temp_free(t1); 6302 if (unlikely(Rc(ctx->opcode) != 0)) { 6303 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6304 } 6305 } 6306 6307 /* sle - sle. */ 6308 static void gen_sle(DisasContext *ctx) 6309 { 6310 TCGv t0 = tcg_temp_new(); 6311 TCGv t1 = tcg_temp_new(); 6312 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6313 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6314 tcg_gen_subfi_tl(t1, 32, t1); 6315 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6316 tcg_gen_or_tl(t1, t0, t1); 6317 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6318 gen_store_spr(SPR_MQ, t1); 6319 tcg_temp_free(t0); 6320 tcg_temp_free(t1); 6321 if (unlikely(Rc(ctx->opcode) != 0)) { 6322 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6323 } 6324 } 6325 6326 /* sleq - sleq. */ 6327 static void gen_sleq(DisasContext *ctx) 6328 { 6329 TCGv t0 = tcg_temp_new(); 6330 TCGv t1 = tcg_temp_new(); 6331 TCGv t2 = tcg_temp_new(); 6332 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6333 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 6334 tcg_gen_shl_tl(t2, t2, t0); 6335 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6336 gen_load_spr(t1, SPR_MQ); 6337 gen_store_spr(SPR_MQ, t0); 6338 tcg_gen_and_tl(t0, t0, t2); 6339 tcg_gen_andc_tl(t1, t1, t2); 6340 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6341 tcg_temp_free(t0); 6342 tcg_temp_free(t1); 6343 tcg_temp_free(t2); 6344 if (unlikely(Rc(ctx->opcode) != 0)) { 6345 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6346 } 6347 } 6348 6349 /* sliq - sliq. */ 6350 static void gen_sliq(DisasContext *ctx) 6351 { 6352 int sh = SH(ctx->opcode); 6353 TCGv t0 = tcg_temp_new(); 6354 TCGv t1 = tcg_temp_new(); 6355 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6356 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6357 tcg_gen_or_tl(t1, t0, t1); 6358 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6359 gen_store_spr(SPR_MQ, t1); 6360 tcg_temp_free(t0); 6361 tcg_temp_free(t1); 6362 if (unlikely(Rc(ctx->opcode) != 0)) { 6363 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6364 } 6365 } 6366 6367 /* slliq - slliq. */ 6368 static void gen_slliq(DisasContext *ctx) 6369 { 6370 int sh = SH(ctx->opcode); 6371 TCGv t0 = tcg_temp_new(); 6372 TCGv t1 = tcg_temp_new(); 6373 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6374 gen_load_spr(t1, SPR_MQ); 6375 gen_store_spr(SPR_MQ, t0); 6376 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 6377 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 6378 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6379 tcg_temp_free(t0); 6380 tcg_temp_free(t1); 6381 if (unlikely(Rc(ctx->opcode) != 0)) { 6382 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6383 } 6384 } 6385 6386 /* sllq - sllq. */ 6387 static void gen_sllq(DisasContext *ctx) 6388 { 6389 TCGLabel *l1 = gen_new_label(); 6390 TCGLabel *l2 = gen_new_label(); 6391 TCGv t0 = tcg_temp_local_new(); 6392 TCGv t1 = tcg_temp_local_new(); 6393 TCGv t2 = tcg_temp_local_new(); 6394 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6395 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6396 tcg_gen_shl_tl(t1, t1, t2); 6397 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6398 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6399 gen_load_spr(t0, SPR_MQ); 6400 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6401 tcg_gen_br(l2); 6402 gen_set_label(l1); 6403 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6404 gen_load_spr(t2, SPR_MQ); 6405 tcg_gen_andc_tl(t1, t2, t1); 6406 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6407 gen_set_label(l2); 6408 tcg_temp_free(t0); 6409 tcg_temp_free(t1); 6410 tcg_temp_free(t2); 6411 if (unlikely(Rc(ctx->opcode) != 0)) { 6412 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6413 } 6414 } 6415 6416 /* slq - slq. */ 6417 static void gen_slq(DisasContext *ctx) 6418 { 6419 TCGLabel *l1 = gen_new_label(); 6420 TCGv t0 = tcg_temp_new(); 6421 TCGv t1 = tcg_temp_new(); 6422 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6423 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6424 tcg_gen_subfi_tl(t1, 32, t1); 6425 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6426 tcg_gen_or_tl(t1, t0, t1); 6427 gen_store_spr(SPR_MQ, t1); 6428 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6429 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6430 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6431 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6432 gen_set_label(l1); 6433 tcg_temp_free(t0); 6434 tcg_temp_free(t1); 6435 if (unlikely(Rc(ctx->opcode) != 0)) { 6436 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6437 } 6438 } 6439 6440 /* sraiq - sraiq. */ 6441 static void gen_sraiq(DisasContext *ctx) 6442 { 6443 int sh = SH(ctx->opcode); 6444 TCGLabel *l1 = gen_new_label(); 6445 TCGv t0 = tcg_temp_new(); 6446 TCGv t1 = tcg_temp_new(); 6447 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6448 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6449 tcg_gen_or_tl(t0, t0, t1); 6450 gen_store_spr(SPR_MQ, t0); 6451 tcg_gen_movi_tl(cpu_ca, 0); 6452 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6453 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 6454 tcg_gen_movi_tl(cpu_ca, 1); 6455 gen_set_label(l1); 6456 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 6457 tcg_temp_free(t0); 6458 tcg_temp_free(t1); 6459 if (unlikely(Rc(ctx->opcode) != 0)) { 6460 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6461 } 6462 } 6463 6464 /* sraq - sraq. */ 6465 static void gen_sraq(DisasContext *ctx) 6466 { 6467 TCGLabel *l1 = gen_new_label(); 6468 TCGLabel *l2 = gen_new_label(); 6469 TCGv t0 = tcg_temp_new(); 6470 TCGv t1 = tcg_temp_local_new(); 6471 TCGv t2 = tcg_temp_local_new(); 6472 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6473 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6474 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 6475 tcg_gen_subfi_tl(t2, 32, t2); 6476 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 6477 tcg_gen_or_tl(t0, t0, t2); 6478 gen_store_spr(SPR_MQ, t0); 6479 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6480 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 6481 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 6482 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 6483 gen_set_label(l1); 6484 tcg_temp_free(t0); 6485 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 6486 tcg_gen_movi_tl(cpu_ca, 0); 6487 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 6488 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 6489 tcg_gen_movi_tl(cpu_ca, 1); 6490 gen_set_label(l2); 6491 tcg_temp_free(t1); 6492 tcg_temp_free(t2); 6493 if (unlikely(Rc(ctx->opcode) != 0)) { 6494 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6495 } 6496 } 6497 6498 /* sre - sre. */ 6499 static void gen_sre(DisasContext *ctx) 6500 { 6501 TCGv t0 = tcg_temp_new(); 6502 TCGv t1 = tcg_temp_new(); 6503 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6504 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6505 tcg_gen_subfi_tl(t1, 32, t1); 6506 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6507 tcg_gen_or_tl(t1, t0, t1); 6508 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6509 gen_store_spr(SPR_MQ, t1); 6510 tcg_temp_free(t0); 6511 tcg_temp_free(t1); 6512 if (unlikely(Rc(ctx->opcode) != 0)) { 6513 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6514 } 6515 } 6516 6517 /* srea - srea. */ 6518 static void gen_srea(DisasContext *ctx) 6519 { 6520 TCGv t0 = tcg_temp_new(); 6521 TCGv t1 = tcg_temp_new(); 6522 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6523 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6524 gen_store_spr(SPR_MQ, t0); 6525 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 6526 tcg_temp_free(t0); 6527 tcg_temp_free(t1); 6528 if (unlikely(Rc(ctx->opcode) != 0)) { 6529 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6530 } 6531 } 6532 6533 /* sreq */ 6534 static void gen_sreq(DisasContext *ctx) 6535 { 6536 TCGv t0 = tcg_temp_new(); 6537 TCGv t1 = tcg_temp_new(); 6538 TCGv t2 = tcg_temp_new(); 6539 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6540 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6541 tcg_gen_shr_tl(t1, t1, t0); 6542 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6543 gen_load_spr(t2, SPR_MQ); 6544 gen_store_spr(SPR_MQ, t0); 6545 tcg_gen_and_tl(t0, t0, t1); 6546 tcg_gen_andc_tl(t2, t2, t1); 6547 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6548 tcg_temp_free(t0); 6549 tcg_temp_free(t1); 6550 tcg_temp_free(t2); 6551 if (unlikely(Rc(ctx->opcode) != 0)) { 6552 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6553 } 6554 } 6555 6556 /* sriq */ 6557 static void gen_sriq(DisasContext *ctx) 6558 { 6559 int sh = SH(ctx->opcode); 6560 TCGv t0 = tcg_temp_new(); 6561 TCGv t1 = tcg_temp_new(); 6562 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6563 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6564 tcg_gen_or_tl(t1, t0, t1); 6565 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6566 gen_store_spr(SPR_MQ, t1); 6567 tcg_temp_free(t0); 6568 tcg_temp_free(t1); 6569 if (unlikely(Rc(ctx->opcode) != 0)) { 6570 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6571 } 6572 } 6573 6574 /* srliq */ 6575 static void gen_srliq(DisasContext *ctx) 6576 { 6577 int sh = SH(ctx->opcode); 6578 TCGv t0 = tcg_temp_new(); 6579 TCGv t1 = tcg_temp_new(); 6580 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6581 gen_load_spr(t1, SPR_MQ); 6582 gen_store_spr(SPR_MQ, t0); 6583 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 6584 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 6585 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6586 tcg_temp_free(t0); 6587 tcg_temp_free(t1); 6588 if (unlikely(Rc(ctx->opcode) != 0)) { 6589 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6590 } 6591 } 6592 6593 /* srlq */ 6594 static void gen_srlq(DisasContext *ctx) 6595 { 6596 TCGLabel *l1 = gen_new_label(); 6597 TCGLabel *l2 = gen_new_label(); 6598 TCGv t0 = tcg_temp_local_new(); 6599 TCGv t1 = tcg_temp_local_new(); 6600 TCGv t2 = tcg_temp_local_new(); 6601 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6602 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6603 tcg_gen_shr_tl(t2, t1, t2); 6604 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6605 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6606 gen_load_spr(t0, SPR_MQ); 6607 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6608 tcg_gen_br(l2); 6609 gen_set_label(l1); 6610 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6611 tcg_gen_and_tl(t0, t0, t2); 6612 gen_load_spr(t1, SPR_MQ); 6613 tcg_gen_andc_tl(t1, t1, t2); 6614 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6615 gen_set_label(l2); 6616 tcg_temp_free(t0); 6617 tcg_temp_free(t1); 6618 tcg_temp_free(t2); 6619 if (unlikely(Rc(ctx->opcode) != 0)) { 6620 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6621 } 6622 } 6623 6624 /* srq */ 6625 static void gen_srq(DisasContext *ctx) 6626 { 6627 TCGLabel *l1 = gen_new_label(); 6628 TCGv t0 = tcg_temp_new(); 6629 TCGv t1 = tcg_temp_new(); 6630 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6631 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6632 tcg_gen_subfi_tl(t1, 32, t1); 6633 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6634 tcg_gen_or_tl(t1, t0, t1); 6635 gen_store_spr(SPR_MQ, t1); 6636 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6637 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6638 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6639 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6640 gen_set_label(l1); 6641 tcg_temp_free(t0); 6642 tcg_temp_free(t1); 6643 if (unlikely(Rc(ctx->opcode) != 0)) { 6644 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6645 } 6646 } 6647 6648 /* PowerPC 602 specific instructions */ 6649 6650 /* dsa */ 6651 static void gen_dsa(DisasContext *ctx) 6652 { 6653 /* XXX: TODO */ 6654 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6655 } 6656 6657 /* esa */ 6658 static void gen_esa(DisasContext *ctx) 6659 { 6660 /* XXX: TODO */ 6661 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6662 } 6663 6664 /* mfrom */ 6665 static void gen_mfrom(DisasContext *ctx) 6666 { 6667 #if defined(CONFIG_USER_ONLY) 6668 GEN_PRIV; 6669 #else 6670 CHK_SV; 6671 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 6672 #endif /* defined(CONFIG_USER_ONLY) */ 6673 } 6674 6675 /* 602 - 603 - G2 TLB management */ 6676 6677 /* tlbld */ 6678 static void gen_tlbld_6xx(DisasContext *ctx) 6679 { 6680 #if defined(CONFIG_USER_ONLY) 6681 GEN_PRIV; 6682 #else 6683 CHK_SV; 6684 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6685 #endif /* defined(CONFIG_USER_ONLY) */ 6686 } 6687 6688 /* tlbli */ 6689 static void gen_tlbli_6xx(DisasContext *ctx) 6690 { 6691 #if defined(CONFIG_USER_ONLY) 6692 GEN_PRIV; 6693 #else 6694 CHK_SV; 6695 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6696 #endif /* defined(CONFIG_USER_ONLY) */ 6697 } 6698 6699 /* 74xx TLB management */ 6700 6701 /* tlbld */ 6702 static void gen_tlbld_74xx(DisasContext *ctx) 6703 { 6704 #if defined(CONFIG_USER_ONLY) 6705 GEN_PRIV; 6706 #else 6707 CHK_SV; 6708 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6709 #endif /* defined(CONFIG_USER_ONLY) */ 6710 } 6711 6712 /* tlbli */ 6713 static void gen_tlbli_74xx(DisasContext *ctx) 6714 { 6715 #if defined(CONFIG_USER_ONLY) 6716 GEN_PRIV; 6717 #else 6718 CHK_SV; 6719 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6720 #endif /* defined(CONFIG_USER_ONLY) */ 6721 } 6722 6723 /* POWER instructions not in PowerPC 601 */ 6724 6725 /* clf */ 6726 static void gen_clf(DisasContext *ctx) 6727 { 6728 /* Cache line flush: implemented as no-op */ 6729 } 6730 6731 /* cli */ 6732 static void gen_cli(DisasContext *ctx) 6733 { 6734 #if defined(CONFIG_USER_ONLY) 6735 GEN_PRIV; 6736 #else 6737 /* Cache line invalidate: privileged and treated as no-op */ 6738 CHK_SV; 6739 #endif /* defined(CONFIG_USER_ONLY) */ 6740 } 6741 6742 /* dclst */ 6743 static void gen_dclst(DisasContext *ctx) 6744 { 6745 /* Data cache line store: treated as no-op */ 6746 } 6747 6748 static void gen_mfsri(DisasContext *ctx) 6749 { 6750 #if defined(CONFIG_USER_ONLY) 6751 GEN_PRIV; 6752 #else 6753 int ra = rA(ctx->opcode); 6754 int rd = rD(ctx->opcode); 6755 TCGv t0; 6756 6757 CHK_SV; 6758 t0 = tcg_temp_new(); 6759 gen_addr_reg_index(ctx, t0); 6760 tcg_gen_extract_tl(t0, t0, 28, 4); 6761 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 6762 tcg_temp_free(t0); 6763 if (ra != 0 && ra != rd) { 6764 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 6765 } 6766 #endif /* defined(CONFIG_USER_ONLY) */ 6767 } 6768 6769 static void gen_rac(DisasContext *ctx) 6770 { 6771 #if defined(CONFIG_USER_ONLY) 6772 GEN_PRIV; 6773 #else 6774 TCGv t0; 6775 6776 CHK_SV; 6777 t0 = tcg_temp_new(); 6778 gen_addr_reg_index(ctx, t0); 6779 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6780 tcg_temp_free(t0); 6781 #endif /* defined(CONFIG_USER_ONLY) */ 6782 } 6783 6784 static void gen_rfsvc(DisasContext *ctx) 6785 { 6786 #if defined(CONFIG_USER_ONLY) 6787 GEN_PRIV; 6788 #else 6789 CHK_SV; 6790 6791 gen_helper_rfsvc(cpu_env); 6792 ctx->base.is_jmp = DISAS_EXIT; 6793 #endif /* defined(CONFIG_USER_ONLY) */ 6794 } 6795 6796 /* svc is not implemented for now */ 6797 6798 /* BookE specific instructions */ 6799 6800 /* XXX: not implemented on 440 ? */ 6801 static void gen_mfapidi(DisasContext *ctx) 6802 { 6803 /* XXX: TODO */ 6804 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6805 } 6806 6807 /* XXX: not implemented on 440 ? */ 6808 static void gen_tlbiva(DisasContext *ctx) 6809 { 6810 #if defined(CONFIG_USER_ONLY) 6811 GEN_PRIV; 6812 #else 6813 TCGv t0; 6814 6815 CHK_SV; 6816 t0 = tcg_temp_new(); 6817 gen_addr_reg_index(ctx, t0); 6818 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6819 tcg_temp_free(t0); 6820 #endif /* defined(CONFIG_USER_ONLY) */ 6821 } 6822 6823 /* All 405 MAC instructions are translated here */ 6824 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 6825 int ra, int rb, int rt, int Rc) 6826 { 6827 TCGv t0, t1; 6828 6829 t0 = tcg_temp_local_new(); 6830 t1 = tcg_temp_local_new(); 6831 6832 switch (opc3 & 0x0D) { 6833 case 0x05: 6834 /* macchw - macchw. - macchwo - macchwo. */ 6835 /* macchws - macchws. - macchwso - macchwso. */ 6836 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 6837 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 6838 /* mulchw - mulchw. */ 6839 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6840 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6841 tcg_gen_ext16s_tl(t1, t1); 6842 break; 6843 case 0x04: 6844 /* macchwu - macchwu. - macchwuo - macchwuo. */ 6845 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 6846 /* mulchwu - mulchwu. */ 6847 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6848 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6849 tcg_gen_ext16u_tl(t1, t1); 6850 break; 6851 case 0x01: 6852 /* machhw - machhw. - machhwo - machhwo. */ 6853 /* machhws - machhws. - machhwso - machhwso. */ 6854 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 6855 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 6856 /* mulhhw - mulhhw. */ 6857 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 6858 tcg_gen_ext16s_tl(t0, t0); 6859 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6860 tcg_gen_ext16s_tl(t1, t1); 6861 break; 6862 case 0x00: 6863 /* machhwu - machhwu. - machhwuo - machhwuo. */ 6864 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 6865 /* mulhhwu - mulhhwu. */ 6866 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 6867 tcg_gen_ext16u_tl(t0, t0); 6868 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6869 tcg_gen_ext16u_tl(t1, t1); 6870 break; 6871 case 0x0D: 6872 /* maclhw - maclhw. - maclhwo - maclhwo. */ 6873 /* maclhws - maclhws. - maclhwso - maclhwso. */ 6874 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 6875 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 6876 /* mullhw - mullhw. */ 6877 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6878 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 6879 break; 6880 case 0x0C: 6881 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 6882 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 6883 /* mullhwu - mullhwu. */ 6884 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6885 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 6886 break; 6887 } 6888 if (opc2 & 0x04) { 6889 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 6890 tcg_gen_mul_tl(t1, t0, t1); 6891 if (opc2 & 0x02) { 6892 /* nmultiply-and-accumulate (0x0E) */ 6893 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 6894 } else { 6895 /* multiply-and-accumulate (0x0C) */ 6896 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 6897 } 6898 6899 if (opc3 & 0x12) { 6900 /* Check overflow and/or saturate */ 6901 TCGLabel *l1 = gen_new_label(); 6902 6903 if (opc3 & 0x10) { 6904 /* Start with XER OV disabled, the most likely case */ 6905 tcg_gen_movi_tl(cpu_ov, 0); 6906 } 6907 if (opc3 & 0x01) { 6908 /* Signed */ 6909 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 6910 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 6911 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 6912 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 6913 if (opc3 & 0x02) { 6914 /* Saturate */ 6915 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 6916 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 6917 } 6918 } else { 6919 /* Unsigned */ 6920 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 6921 if (opc3 & 0x02) { 6922 /* Saturate */ 6923 tcg_gen_movi_tl(t0, UINT32_MAX); 6924 } 6925 } 6926 if (opc3 & 0x10) { 6927 /* Check overflow */ 6928 tcg_gen_movi_tl(cpu_ov, 1); 6929 tcg_gen_movi_tl(cpu_so, 1); 6930 } 6931 gen_set_label(l1); 6932 tcg_gen_mov_tl(cpu_gpr[rt], t0); 6933 } 6934 } else { 6935 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 6936 } 6937 tcg_temp_free(t0); 6938 tcg_temp_free(t1); 6939 if (unlikely(Rc) != 0) { 6940 /* Update Rc0 */ 6941 gen_set_Rc0(ctx, cpu_gpr[rt]); 6942 } 6943 } 6944 6945 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 6946 static void glue(gen_, name)(DisasContext *ctx) \ 6947 { \ 6948 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 6949 rD(ctx->opcode), Rc(ctx->opcode)); \ 6950 } 6951 6952 /* macchw - macchw. */ 6953 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 6954 /* macchwo - macchwo. */ 6955 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 6956 /* macchws - macchws. */ 6957 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 6958 /* macchwso - macchwso. */ 6959 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 6960 /* macchwsu - macchwsu. */ 6961 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 6962 /* macchwsuo - macchwsuo. */ 6963 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 6964 /* macchwu - macchwu. */ 6965 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 6966 /* macchwuo - macchwuo. */ 6967 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 6968 /* machhw - machhw. */ 6969 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 6970 /* machhwo - machhwo. */ 6971 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 6972 /* machhws - machhws. */ 6973 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 6974 /* machhwso - machhwso. */ 6975 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 6976 /* machhwsu - machhwsu. */ 6977 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 6978 /* machhwsuo - machhwsuo. */ 6979 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 6980 /* machhwu - machhwu. */ 6981 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 6982 /* machhwuo - machhwuo. */ 6983 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 6984 /* maclhw - maclhw. */ 6985 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 6986 /* maclhwo - maclhwo. */ 6987 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 6988 /* maclhws - maclhws. */ 6989 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 6990 /* maclhwso - maclhwso. */ 6991 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 6992 /* maclhwu - maclhwu. */ 6993 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 6994 /* maclhwuo - maclhwuo. */ 6995 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 6996 /* maclhwsu - maclhwsu. */ 6997 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 6998 /* maclhwsuo - maclhwsuo. */ 6999 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 7000 /* nmacchw - nmacchw. */ 7001 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 7002 /* nmacchwo - nmacchwo. */ 7003 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 7004 /* nmacchws - nmacchws. */ 7005 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 7006 /* nmacchwso - nmacchwso. */ 7007 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 7008 /* nmachhw - nmachhw. */ 7009 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 7010 /* nmachhwo - nmachhwo. */ 7011 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 7012 /* nmachhws - nmachhws. */ 7013 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 7014 /* nmachhwso - nmachhwso. */ 7015 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 7016 /* nmaclhw - nmaclhw. */ 7017 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 7018 /* nmaclhwo - nmaclhwo. */ 7019 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 7020 /* nmaclhws - nmaclhws. */ 7021 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 7022 /* nmaclhwso - nmaclhwso. */ 7023 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 7024 7025 /* mulchw - mulchw. */ 7026 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 7027 /* mulchwu - mulchwu. */ 7028 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 7029 /* mulhhw - mulhhw. */ 7030 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 7031 /* mulhhwu - mulhhwu. */ 7032 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 7033 /* mullhw - mullhw. */ 7034 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 7035 /* mullhwu - mullhwu. */ 7036 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 7037 7038 /* mfdcr */ 7039 static void gen_mfdcr(DisasContext *ctx) 7040 { 7041 #if defined(CONFIG_USER_ONLY) 7042 GEN_PRIV; 7043 #else 7044 TCGv dcrn; 7045 7046 CHK_SV; 7047 dcrn = tcg_const_tl(SPR(ctx->opcode)); 7048 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 7049 tcg_temp_free(dcrn); 7050 #endif /* defined(CONFIG_USER_ONLY) */ 7051 } 7052 7053 /* mtdcr */ 7054 static void gen_mtdcr(DisasContext *ctx) 7055 { 7056 #if defined(CONFIG_USER_ONLY) 7057 GEN_PRIV; 7058 #else 7059 TCGv dcrn; 7060 7061 CHK_SV; 7062 dcrn = tcg_const_tl(SPR(ctx->opcode)); 7063 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 7064 tcg_temp_free(dcrn); 7065 #endif /* defined(CONFIG_USER_ONLY) */ 7066 } 7067 7068 /* mfdcrx */ 7069 /* XXX: not implemented on 440 ? */ 7070 static void gen_mfdcrx(DisasContext *ctx) 7071 { 7072 #if defined(CONFIG_USER_ONLY) 7073 GEN_PRIV; 7074 #else 7075 CHK_SV; 7076 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 7077 cpu_gpr[rA(ctx->opcode)]); 7078 /* Note: Rc update flag set leads to undefined state of Rc0 */ 7079 #endif /* defined(CONFIG_USER_ONLY) */ 7080 } 7081 7082 /* mtdcrx */ 7083 /* XXX: not implemented on 440 ? */ 7084 static void gen_mtdcrx(DisasContext *ctx) 7085 { 7086 #if defined(CONFIG_USER_ONLY) 7087 GEN_PRIV; 7088 #else 7089 CHK_SV; 7090 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 7091 cpu_gpr[rS(ctx->opcode)]); 7092 /* Note: Rc update flag set leads to undefined state of Rc0 */ 7093 #endif /* defined(CONFIG_USER_ONLY) */ 7094 } 7095 7096 /* mfdcrux (PPC 460) : user-mode access to DCR */ 7097 static void gen_mfdcrux(DisasContext *ctx) 7098 { 7099 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 7100 cpu_gpr[rA(ctx->opcode)]); 7101 /* Note: Rc update flag set leads to undefined state of Rc0 */ 7102 } 7103 7104 /* mtdcrux (PPC 460) : user-mode access to DCR */ 7105 static void gen_mtdcrux(DisasContext *ctx) 7106 { 7107 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 7108 cpu_gpr[rS(ctx->opcode)]); 7109 /* Note: Rc update flag set leads to undefined state of Rc0 */ 7110 } 7111 7112 /* dccci */ 7113 static void gen_dccci(DisasContext *ctx) 7114 { 7115 CHK_SV; 7116 /* interpreted as no-op */ 7117 } 7118 7119 /* dcread */ 7120 static void gen_dcread(DisasContext *ctx) 7121 { 7122 #if defined(CONFIG_USER_ONLY) 7123 GEN_PRIV; 7124 #else 7125 TCGv EA, val; 7126 7127 CHK_SV; 7128 gen_set_access_type(ctx, ACCESS_CACHE); 7129 EA = tcg_temp_new(); 7130 gen_addr_reg_index(ctx, EA); 7131 val = tcg_temp_new(); 7132 gen_qemu_ld32u(ctx, val, EA); 7133 tcg_temp_free(val); 7134 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 7135 tcg_temp_free(EA); 7136 #endif /* defined(CONFIG_USER_ONLY) */ 7137 } 7138 7139 /* icbt */ 7140 static void gen_icbt_40x(DisasContext *ctx) 7141 { 7142 /* 7143 * interpreted as no-op 7144 * XXX: specification say this is treated as a load by the MMU but 7145 * does not generate any exception 7146 */ 7147 } 7148 7149 /* iccci */ 7150 static void gen_iccci(DisasContext *ctx) 7151 { 7152 CHK_SV; 7153 /* interpreted as no-op */ 7154 } 7155 7156 /* icread */ 7157 static void gen_icread(DisasContext *ctx) 7158 { 7159 CHK_SV; 7160 /* interpreted as no-op */ 7161 } 7162 7163 /* rfci (supervisor only) */ 7164 static void gen_rfci_40x(DisasContext *ctx) 7165 { 7166 #if defined(CONFIG_USER_ONLY) 7167 GEN_PRIV; 7168 #else 7169 CHK_SV; 7170 /* Restore CPU state */ 7171 gen_helper_40x_rfci(cpu_env); 7172 ctx->base.is_jmp = DISAS_EXIT; 7173 #endif /* defined(CONFIG_USER_ONLY) */ 7174 } 7175 7176 static void gen_rfci(DisasContext *ctx) 7177 { 7178 #if defined(CONFIG_USER_ONLY) 7179 GEN_PRIV; 7180 #else 7181 CHK_SV; 7182 /* Restore CPU state */ 7183 gen_helper_rfci(cpu_env); 7184 ctx->base.is_jmp = DISAS_EXIT; 7185 #endif /* defined(CONFIG_USER_ONLY) */ 7186 } 7187 7188 /* BookE specific */ 7189 7190 /* XXX: not implemented on 440 ? */ 7191 static void gen_rfdi(DisasContext *ctx) 7192 { 7193 #if defined(CONFIG_USER_ONLY) 7194 GEN_PRIV; 7195 #else 7196 CHK_SV; 7197 /* Restore CPU state */ 7198 gen_helper_rfdi(cpu_env); 7199 ctx->base.is_jmp = DISAS_EXIT; 7200 #endif /* defined(CONFIG_USER_ONLY) */ 7201 } 7202 7203 /* XXX: not implemented on 440 ? */ 7204 static void gen_rfmci(DisasContext *ctx) 7205 { 7206 #if defined(CONFIG_USER_ONLY) 7207 GEN_PRIV; 7208 #else 7209 CHK_SV; 7210 /* Restore CPU state */ 7211 gen_helper_rfmci(cpu_env); 7212 ctx->base.is_jmp = DISAS_EXIT; 7213 #endif /* defined(CONFIG_USER_ONLY) */ 7214 } 7215 7216 /* TLB management - PowerPC 405 implementation */ 7217 7218 /* tlbre */ 7219 static void gen_tlbre_40x(DisasContext *ctx) 7220 { 7221 #if defined(CONFIG_USER_ONLY) 7222 GEN_PRIV; 7223 #else 7224 CHK_SV; 7225 switch (rB(ctx->opcode)) { 7226 case 0: 7227 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 7228 cpu_gpr[rA(ctx->opcode)]); 7229 break; 7230 case 1: 7231 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 7232 cpu_gpr[rA(ctx->opcode)]); 7233 break; 7234 default: 7235 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7236 break; 7237 } 7238 #endif /* defined(CONFIG_USER_ONLY) */ 7239 } 7240 7241 /* tlbsx - tlbsx. */ 7242 static void gen_tlbsx_40x(DisasContext *ctx) 7243 { 7244 #if defined(CONFIG_USER_ONLY) 7245 GEN_PRIV; 7246 #else 7247 TCGv t0; 7248 7249 CHK_SV; 7250 t0 = tcg_temp_new(); 7251 gen_addr_reg_index(ctx, t0); 7252 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 7253 tcg_temp_free(t0); 7254 if (Rc(ctx->opcode)) { 7255 TCGLabel *l1 = gen_new_label(); 7256 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 7257 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 7258 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 7259 gen_set_label(l1); 7260 } 7261 #endif /* defined(CONFIG_USER_ONLY) */ 7262 } 7263 7264 /* tlbwe */ 7265 static void gen_tlbwe_40x(DisasContext *ctx) 7266 { 7267 #if defined(CONFIG_USER_ONLY) 7268 GEN_PRIV; 7269 #else 7270 CHK_SV; 7271 7272 switch (rB(ctx->opcode)) { 7273 case 0: 7274 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 7275 cpu_gpr[rS(ctx->opcode)]); 7276 break; 7277 case 1: 7278 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 7279 cpu_gpr[rS(ctx->opcode)]); 7280 break; 7281 default: 7282 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7283 break; 7284 } 7285 #endif /* defined(CONFIG_USER_ONLY) */ 7286 } 7287 7288 /* TLB management - PowerPC 440 implementation */ 7289 7290 /* tlbre */ 7291 static void gen_tlbre_440(DisasContext *ctx) 7292 { 7293 #if defined(CONFIG_USER_ONLY) 7294 GEN_PRIV; 7295 #else 7296 CHK_SV; 7297 7298 switch (rB(ctx->opcode)) { 7299 case 0: 7300 case 1: 7301 case 2: 7302 { 7303 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 7304 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 7305 t0, cpu_gpr[rA(ctx->opcode)]); 7306 tcg_temp_free_i32(t0); 7307 } 7308 break; 7309 default: 7310 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7311 break; 7312 } 7313 #endif /* defined(CONFIG_USER_ONLY) */ 7314 } 7315 7316 /* tlbsx - tlbsx. */ 7317 static void gen_tlbsx_440(DisasContext *ctx) 7318 { 7319 #if defined(CONFIG_USER_ONLY) 7320 GEN_PRIV; 7321 #else 7322 TCGv t0; 7323 7324 CHK_SV; 7325 t0 = tcg_temp_new(); 7326 gen_addr_reg_index(ctx, t0); 7327 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 7328 tcg_temp_free(t0); 7329 if (Rc(ctx->opcode)) { 7330 TCGLabel *l1 = gen_new_label(); 7331 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 7332 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 7333 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 7334 gen_set_label(l1); 7335 } 7336 #endif /* defined(CONFIG_USER_ONLY) */ 7337 } 7338 7339 /* tlbwe */ 7340 static void gen_tlbwe_440(DisasContext *ctx) 7341 { 7342 #if defined(CONFIG_USER_ONLY) 7343 GEN_PRIV; 7344 #else 7345 CHK_SV; 7346 switch (rB(ctx->opcode)) { 7347 case 0: 7348 case 1: 7349 case 2: 7350 { 7351 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 7352 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 7353 cpu_gpr[rS(ctx->opcode)]); 7354 tcg_temp_free_i32(t0); 7355 } 7356 break; 7357 default: 7358 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7359 break; 7360 } 7361 #endif /* defined(CONFIG_USER_ONLY) */ 7362 } 7363 7364 /* TLB management - PowerPC BookE 2.06 implementation */ 7365 7366 /* tlbre */ 7367 static void gen_tlbre_booke206(DisasContext *ctx) 7368 { 7369 #if defined(CONFIG_USER_ONLY) 7370 GEN_PRIV; 7371 #else 7372 CHK_SV; 7373 gen_helper_booke206_tlbre(cpu_env); 7374 #endif /* defined(CONFIG_USER_ONLY) */ 7375 } 7376 7377 /* tlbsx - tlbsx. */ 7378 static void gen_tlbsx_booke206(DisasContext *ctx) 7379 { 7380 #if defined(CONFIG_USER_ONLY) 7381 GEN_PRIV; 7382 #else 7383 TCGv t0; 7384 7385 CHK_SV; 7386 if (rA(ctx->opcode)) { 7387 t0 = tcg_temp_new(); 7388 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 7389 } else { 7390 t0 = tcg_const_tl(0); 7391 } 7392 7393 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 7394 gen_helper_booke206_tlbsx(cpu_env, t0); 7395 tcg_temp_free(t0); 7396 #endif /* defined(CONFIG_USER_ONLY) */ 7397 } 7398 7399 /* tlbwe */ 7400 static void gen_tlbwe_booke206(DisasContext *ctx) 7401 { 7402 #if defined(CONFIG_USER_ONLY) 7403 GEN_PRIV; 7404 #else 7405 CHK_SV; 7406 gen_helper_booke206_tlbwe(cpu_env); 7407 #endif /* defined(CONFIG_USER_ONLY) */ 7408 } 7409 7410 static void gen_tlbivax_booke206(DisasContext *ctx) 7411 { 7412 #if defined(CONFIG_USER_ONLY) 7413 GEN_PRIV; 7414 #else 7415 TCGv t0; 7416 7417 CHK_SV; 7418 t0 = tcg_temp_new(); 7419 gen_addr_reg_index(ctx, t0); 7420 gen_helper_booke206_tlbivax(cpu_env, t0); 7421 tcg_temp_free(t0); 7422 #endif /* defined(CONFIG_USER_ONLY) */ 7423 } 7424 7425 static void gen_tlbilx_booke206(DisasContext *ctx) 7426 { 7427 #if defined(CONFIG_USER_ONLY) 7428 GEN_PRIV; 7429 #else 7430 TCGv t0; 7431 7432 CHK_SV; 7433 t0 = tcg_temp_new(); 7434 gen_addr_reg_index(ctx, t0); 7435 7436 switch ((ctx->opcode >> 21) & 0x3) { 7437 case 0: 7438 gen_helper_booke206_tlbilx0(cpu_env, t0); 7439 break; 7440 case 1: 7441 gen_helper_booke206_tlbilx1(cpu_env, t0); 7442 break; 7443 case 3: 7444 gen_helper_booke206_tlbilx3(cpu_env, t0); 7445 break; 7446 default: 7447 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7448 break; 7449 } 7450 7451 tcg_temp_free(t0); 7452 #endif /* defined(CONFIG_USER_ONLY) */ 7453 } 7454 7455 7456 /* wrtee */ 7457 static void gen_wrtee(DisasContext *ctx) 7458 { 7459 #if defined(CONFIG_USER_ONLY) 7460 GEN_PRIV; 7461 #else 7462 TCGv t0; 7463 7464 CHK_SV; 7465 t0 = tcg_temp_new(); 7466 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 7467 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7468 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 7469 tcg_temp_free(t0); 7470 /* 7471 * Stop translation to have a chance to raise an exception if we 7472 * just set msr_ee to 1 7473 */ 7474 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7475 #endif /* defined(CONFIG_USER_ONLY) */ 7476 } 7477 7478 /* wrteei */ 7479 static void gen_wrteei(DisasContext *ctx) 7480 { 7481 #if defined(CONFIG_USER_ONLY) 7482 GEN_PRIV; 7483 #else 7484 CHK_SV; 7485 if (ctx->opcode & 0x00008000) { 7486 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 7487 /* Stop translation to have a chance to raise an exception */ 7488 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7489 } else { 7490 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7491 } 7492 #endif /* defined(CONFIG_USER_ONLY) */ 7493 } 7494 7495 /* PowerPC 440 specific instructions */ 7496 7497 /* dlmzb */ 7498 static void gen_dlmzb(DisasContext *ctx) 7499 { 7500 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 7501 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 7502 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 7503 tcg_temp_free_i32(t0); 7504 } 7505 7506 /* mbar replaces eieio on 440 */ 7507 static void gen_mbar(DisasContext *ctx) 7508 { 7509 /* interpreted as no-op */ 7510 } 7511 7512 /* msync replaces sync on 440 */ 7513 static void gen_msync_4xx(DisasContext *ctx) 7514 { 7515 /* Only e500 seems to treat reserved bits as invalid */ 7516 if ((ctx->insns_flags2 & PPC2_BOOKE206) && 7517 (ctx->opcode & 0x03FFF801)) { 7518 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7519 } 7520 /* otherwise interpreted as no-op */ 7521 } 7522 7523 /* icbt */ 7524 static void gen_icbt_440(DisasContext *ctx) 7525 { 7526 /* 7527 * interpreted as no-op 7528 * XXX: specification say this is treated as a load by the MMU but 7529 * does not generate any exception 7530 */ 7531 } 7532 7533 /* Embedded.Processor Control */ 7534 7535 static void gen_msgclr(DisasContext *ctx) 7536 { 7537 #if defined(CONFIG_USER_ONLY) 7538 GEN_PRIV; 7539 #else 7540 CHK_HV; 7541 if (is_book3s_arch2x(ctx)) { 7542 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7543 } else { 7544 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7545 } 7546 #endif /* defined(CONFIG_USER_ONLY) */ 7547 } 7548 7549 static void gen_msgsnd(DisasContext *ctx) 7550 { 7551 #if defined(CONFIG_USER_ONLY) 7552 GEN_PRIV; 7553 #else 7554 CHK_HV; 7555 if (is_book3s_arch2x(ctx)) { 7556 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7557 } else { 7558 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7559 } 7560 #endif /* defined(CONFIG_USER_ONLY) */ 7561 } 7562 7563 #if defined(TARGET_PPC64) 7564 static void gen_msgclrp(DisasContext *ctx) 7565 { 7566 #if defined(CONFIG_USER_ONLY) 7567 GEN_PRIV; 7568 #else 7569 CHK_SV; 7570 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7571 #endif /* defined(CONFIG_USER_ONLY) */ 7572 } 7573 7574 static void gen_msgsndp(DisasContext *ctx) 7575 { 7576 #if defined(CONFIG_USER_ONLY) 7577 GEN_PRIV; 7578 #else 7579 CHK_SV; 7580 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7581 #endif /* defined(CONFIG_USER_ONLY) */ 7582 } 7583 #endif 7584 7585 static void gen_msgsync(DisasContext *ctx) 7586 { 7587 #if defined(CONFIG_USER_ONLY) 7588 GEN_PRIV; 7589 #else 7590 CHK_HV; 7591 #endif /* defined(CONFIG_USER_ONLY) */ 7592 /* interpreted as no-op */ 7593 } 7594 7595 #if defined(TARGET_PPC64) 7596 static void gen_maddld(DisasContext *ctx) 7597 { 7598 TCGv_i64 t1 = tcg_temp_new_i64(); 7599 7600 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 7601 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 7602 tcg_temp_free_i64(t1); 7603 } 7604 7605 /* maddhd maddhdu */ 7606 static void gen_maddhd_maddhdu(DisasContext *ctx) 7607 { 7608 TCGv_i64 lo = tcg_temp_new_i64(); 7609 TCGv_i64 hi = tcg_temp_new_i64(); 7610 TCGv_i64 t1 = tcg_temp_new_i64(); 7611 7612 if (Rc(ctx->opcode)) { 7613 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7614 cpu_gpr[rB(ctx->opcode)]); 7615 tcg_gen_movi_i64(t1, 0); 7616 } else { 7617 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7618 cpu_gpr[rB(ctx->opcode)]); 7619 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 7620 } 7621 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 7622 cpu_gpr[rC(ctx->opcode)], t1); 7623 tcg_temp_free_i64(lo); 7624 tcg_temp_free_i64(hi); 7625 tcg_temp_free_i64(t1); 7626 } 7627 #endif /* defined(TARGET_PPC64) */ 7628 7629 static void gen_tbegin(DisasContext *ctx) 7630 { 7631 if (unlikely(!ctx->tm_enabled)) { 7632 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7633 return; 7634 } 7635 gen_helper_tbegin(cpu_env); 7636 } 7637 7638 #define GEN_TM_NOOP(name) \ 7639 static inline void gen_##name(DisasContext *ctx) \ 7640 { \ 7641 if (unlikely(!ctx->tm_enabled)) { \ 7642 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7643 return; \ 7644 } \ 7645 /* \ 7646 * Because tbegin always fails in QEMU, these user \ 7647 * space instructions all have a simple implementation: \ 7648 * \ 7649 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7650 * = 0b0 || 0b00 || 0b0 \ 7651 */ \ 7652 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7653 } 7654 7655 GEN_TM_NOOP(tend); 7656 GEN_TM_NOOP(tabort); 7657 GEN_TM_NOOP(tabortwc); 7658 GEN_TM_NOOP(tabortwci); 7659 GEN_TM_NOOP(tabortdc); 7660 GEN_TM_NOOP(tabortdci); 7661 GEN_TM_NOOP(tsr); 7662 7663 static inline void gen_cp_abort(DisasContext *ctx) 7664 { 7665 /* Do Nothing */ 7666 } 7667 7668 #define GEN_CP_PASTE_NOOP(name) \ 7669 static inline void gen_##name(DisasContext *ctx) \ 7670 { \ 7671 /* \ 7672 * Generate invalid exception until we have an \ 7673 * implementation of the copy paste facility \ 7674 */ \ 7675 gen_invalid(ctx); \ 7676 } 7677 7678 GEN_CP_PASTE_NOOP(copy) 7679 GEN_CP_PASTE_NOOP(paste) 7680 7681 static void gen_tcheck(DisasContext *ctx) 7682 { 7683 if (unlikely(!ctx->tm_enabled)) { 7684 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7685 return; 7686 } 7687 /* 7688 * Because tbegin always fails, the tcheck implementation is 7689 * simple: 7690 * 7691 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 7692 * = 0b1 || 0b00 || 0b0 7693 */ 7694 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 7695 } 7696 7697 #if defined(CONFIG_USER_ONLY) 7698 #define GEN_TM_PRIV_NOOP(name) \ 7699 static inline void gen_##name(DisasContext *ctx) \ 7700 { \ 7701 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 7702 } 7703 7704 #else 7705 7706 #define GEN_TM_PRIV_NOOP(name) \ 7707 static inline void gen_##name(DisasContext *ctx) \ 7708 { \ 7709 CHK_SV; \ 7710 if (unlikely(!ctx->tm_enabled)) { \ 7711 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7712 return; \ 7713 } \ 7714 /* \ 7715 * Because tbegin always fails, the implementation is \ 7716 * simple: \ 7717 * \ 7718 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7719 * = 0b0 || 0b00 | 0b0 \ 7720 */ \ 7721 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7722 } 7723 7724 #endif 7725 7726 GEN_TM_PRIV_NOOP(treclaim); 7727 GEN_TM_PRIV_NOOP(trechkpt); 7728 7729 static inline void get_fpr(TCGv_i64 dst, int regno) 7730 { 7731 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno)); 7732 } 7733 7734 static inline void set_fpr(int regno, TCGv_i64 src) 7735 { 7736 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno)); 7737 } 7738 7739 static inline void get_avr64(TCGv_i64 dst, int regno, bool high) 7740 { 7741 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high)); 7742 } 7743 7744 static inline void set_avr64(int regno, TCGv_i64 src, bool high) 7745 { 7746 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high)); 7747 } 7748 7749 #include "translate/fp-impl.c.inc" 7750 7751 #include "translate/vmx-impl.c.inc" 7752 7753 #include "translate/vsx-impl.c.inc" 7754 7755 #include "translate/dfp-impl.c.inc" 7756 7757 #include "translate/spe-impl.c.inc" 7758 7759 /* Handles lfdp, lxsd, lxssp */ 7760 static void gen_dform39(DisasContext *ctx) 7761 { 7762 switch (ctx->opcode & 0x3) { 7763 case 0: /* lfdp */ 7764 if (ctx->insns_flags2 & PPC2_ISA205) { 7765 return gen_lfdp(ctx); 7766 } 7767 break; 7768 case 2: /* lxsd */ 7769 if (ctx->insns_flags2 & PPC2_ISA300) { 7770 return gen_lxsd(ctx); 7771 } 7772 break; 7773 case 3: /* lxssp */ 7774 if (ctx->insns_flags2 & PPC2_ISA300) { 7775 return gen_lxssp(ctx); 7776 } 7777 break; 7778 } 7779 return gen_invalid(ctx); 7780 } 7781 7782 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 7783 static void gen_dform3D(DisasContext *ctx) 7784 { 7785 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 7786 switch (ctx->opcode & 0x7) { 7787 case 1: /* lxv */ 7788 if (ctx->insns_flags2 & PPC2_ISA300) { 7789 return gen_lxv(ctx); 7790 } 7791 break; 7792 case 5: /* stxv */ 7793 if (ctx->insns_flags2 & PPC2_ISA300) { 7794 return gen_stxv(ctx); 7795 } 7796 break; 7797 } 7798 } else { /* DS-FORM */ 7799 switch (ctx->opcode & 0x3) { 7800 case 0: /* stfdp */ 7801 if (ctx->insns_flags2 & PPC2_ISA205) { 7802 return gen_stfdp(ctx); 7803 } 7804 break; 7805 case 2: /* stxsd */ 7806 if (ctx->insns_flags2 & PPC2_ISA300) { 7807 return gen_stxsd(ctx); 7808 } 7809 break; 7810 case 3: /* stxssp */ 7811 if (ctx->insns_flags2 & PPC2_ISA300) { 7812 return gen_stxssp(ctx); 7813 } 7814 break; 7815 } 7816 } 7817 return gen_invalid(ctx); 7818 } 7819 7820 #if defined(TARGET_PPC64) 7821 /* brd */ 7822 static void gen_brd(DisasContext *ctx) 7823 { 7824 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7825 } 7826 7827 /* brw */ 7828 static void gen_brw(DisasContext *ctx) 7829 { 7830 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7831 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32); 7832 7833 } 7834 7835 /* brh */ 7836 static void gen_brh(DisasContext *ctx) 7837 { 7838 TCGv_i64 t0 = tcg_temp_new_i64(); 7839 TCGv_i64 t1 = tcg_temp_new_i64(); 7840 TCGv_i64 t2 = tcg_temp_new_i64(); 7841 7842 tcg_gen_movi_i64(t0, 0x00ff00ff00ff00ffull); 7843 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8); 7844 tcg_gen_and_i64(t2, t1, t0); 7845 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], t0); 7846 tcg_gen_shli_i64(t1, t1, 8); 7847 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2); 7848 7849 tcg_temp_free_i64(t0); 7850 tcg_temp_free_i64(t1); 7851 tcg_temp_free_i64(t2); 7852 } 7853 #endif 7854 7855 static opcode_t opcodes[] = { 7856 #if defined(TARGET_PPC64) 7857 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310), 7858 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310), 7859 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310), 7860 #endif 7861 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 7862 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 7863 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 7864 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 7865 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 7866 #if defined(TARGET_PPC64) 7867 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 7868 #endif 7869 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 7870 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 7871 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 7872 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7873 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7874 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7875 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7876 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7877 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 7878 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 7879 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 7880 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 7881 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7882 #if defined(TARGET_PPC64) 7883 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 7884 #endif 7885 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 7886 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 7887 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7888 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7889 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7890 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 7891 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 7892 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 7893 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7894 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 7895 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 7896 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 7897 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7898 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7899 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7900 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7901 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 7902 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 7903 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 7904 #if defined(TARGET_PPC64) 7905 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 7906 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 7907 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 7908 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 7909 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 7910 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 7911 #endif 7912 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7913 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7914 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7915 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 7916 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 7917 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 7918 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 7919 #if defined(TARGET_PPC64) 7920 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 7921 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 7922 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 7923 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 7924 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 7925 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 7926 PPC_NONE, PPC2_ISA300), 7927 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 7928 PPC_NONE, PPC2_ISA300), 7929 #endif 7930 #if defined(TARGET_PPC64) 7931 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 7932 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 7933 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 7934 #endif 7935 /* handles lfdp, lxsd, lxssp */ 7936 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7937 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 7938 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7939 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7940 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7941 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 7942 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 7943 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 7944 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 7945 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 7946 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 7947 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7948 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7949 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 7950 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 7951 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 7952 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7953 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7954 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 7955 #if defined(TARGET_PPC64) 7956 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 7957 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 7958 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 7959 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 7960 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 7961 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 7962 #endif 7963 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 7964 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 7965 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 7966 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7967 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7968 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 7969 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 7970 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 7971 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 7972 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 7973 #if defined(TARGET_PPC64) 7974 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 7975 #if !defined(CONFIG_USER_ONLY) 7976 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7977 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7978 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7979 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300), 7980 #endif 7981 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7982 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7983 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7984 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7985 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7986 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 7987 #endif 7988 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7989 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW), 7990 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW), 7991 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 7992 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7993 #if defined(TARGET_PPC64) 7994 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 7995 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 7996 #endif 7997 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 7998 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 7999 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 8000 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 8001 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 8002 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 8003 #if defined(TARGET_PPC64) 8004 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 8005 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 8006 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 8007 #endif 8008 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 8009 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 8010 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 8011 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 8012 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 8013 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 8014 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 8015 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 8016 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 8017 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 8018 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 8019 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 8020 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 8021 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 8022 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 8023 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 8024 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 8025 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 8026 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 8027 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 8028 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 8029 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 8030 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 8031 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 8032 #if defined(TARGET_PPC64) 8033 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 8034 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 8035 PPC_SEGMENT_64B), 8036 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 8037 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 8038 PPC_SEGMENT_64B), 8039 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 8040 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 8041 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 8042 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 8043 #endif 8044 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 8045 /* 8046 * XXX Those instructions will need to be handled differently for 8047 * different ISA versions 8048 */ 8049 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 8050 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 8051 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 8052 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 8053 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 8054 #if defined(TARGET_PPC64) 8055 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 8056 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 8057 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 8058 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 8059 #endif 8060 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 8061 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 8062 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 8063 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 8064 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 8065 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 8066 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 8067 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 8068 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 8069 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 8070 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 8071 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 8072 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 8073 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 8074 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 8075 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 8076 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 8077 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 8078 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 8079 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 8080 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 8081 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 8082 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 8083 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 8084 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 8085 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 8086 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 8087 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 8088 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 8089 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 8090 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 8091 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 8092 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 8093 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 8094 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 8095 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 8096 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 8097 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 8098 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 8099 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 8100 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 8101 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 8102 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 8103 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 8104 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 8105 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 8106 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 8107 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 8108 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 8109 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 8110 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 8111 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 8112 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 8113 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 8114 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 8115 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 8116 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 8117 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 8118 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 8119 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 8120 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 8121 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 8122 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 8123 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 8124 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 8125 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 8126 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 8127 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 8128 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 8129 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 8130 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 8131 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 8132 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 8133 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 8134 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 8135 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 8136 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 8137 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 8138 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 8139 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 8140 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 8141 PPC_NONE, PPC2_BOOKE206), 8142 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 8143 PPC_NONE, PPC2_BOOKE206), 8144 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 8145 PPC_NONE, PPC2_BOOKE206), 8146 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 8147 PPC_NONE, PPC2_BOOKE206), 8148 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 8149 PPC_NONE, PPC2_BOOKE206), 8150 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 8151 PPC_NONE, PPC2_PRCNTL), 8152 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 8153 PPC_NONE, PPC2_PRCNTL), 8154 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 8155 PPC_NONE, PPC2_PRCNTL), 8156 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 8157 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 8158 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 8159 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 8160 PPC_BOOKE, PPC2_BOOKE206), 8161 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE), 8162 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 8163 PPC_BOOKE, PPC2_BOOKE206), 8164 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 8165 PPC_440_SPEC), 8166 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 8167 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 8168 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 8169 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 8170 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 8171 #if defined(TARGET_PPC64) 8172 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 8173 PPC2_ISA300), 8174 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 8175 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001, 8176 PPC_NONE, PPC2_ISA207S), 8177 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001, 8178 PPC_NONE, PPC2_ISA207S), 8179 #endif 8180 8181 #undef GEN_INT_ARITH_ADD 8182 #undef GEN_INT_ARITH_ADD_CONST 8183 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 8184 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 8185 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 8186 add_ca, compute_ca, compute_ov) \ 8187 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 8188 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 8189 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 8190 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 8191 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 8192 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 8193 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 8194 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 8195 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 8196 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 8197 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 8198 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 8199 8200 #undef GEN_INT_ARITH_DIVW 8201 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 8202 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 8203 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 8204 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 8205 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 8206 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 8207 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 8208 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 8209 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 8210 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 8211 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 8212 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 8213 8214 #if defined(TARGET_PPC64) 8215 #undef GEN_INT_ARITH_DIVD 8216 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 8217 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 8218 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 8219 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 8220 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 8221 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 8222 8223 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 8224 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 8225 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 8226 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 8227 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 8228 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 8229 8230 #undef GEN_INT_ARITH_MUL_HELPER 8231 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 8232 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 8233 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 8234 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 8235 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 8236 #endif 8237 8238 #undef GEN_INT_ARITH_SUBF 8239 #undef GEN_INT_ARITH_SUBF_CONST 8240 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 8241 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 8242 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 8243 add_ca, compute_ca, compute_ov) \ 8244 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 8245 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 8246 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 8247 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 8248 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 8249 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 8250 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 8251 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 8252 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 8253 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 8254 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 8255 8256 #undef GEN_LOGICAL1 8257 #undef GEN_LOGICAL2 8258 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 8259 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 8260 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 8261 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 8262 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 8263 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 8264 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 8265 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 8266 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 8267 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 8268 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 8269 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 8270 #if defined(TARGET_PPC64) 8271 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 8272 #endif 8273 8274 #if defined(TARGET_PPC64) 8275 #undef GEN_PPC64_R2 8276 #undef GEN_PPC64_R4 8277 #define GEN_PPC64_R2(name, opc1, opc2) \ 8278 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 8279 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 8280 PPC_64B) 8281 #define GEN_PPC64_R4(name, opc1, opc2) \ 8282 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 8283 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 8284 PPC_64B), \ 8285 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 8286 PPC_64B), \ 8287 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 8288 PPC_64B) 8289 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 8290 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 8291 GEN_PPC64_R4(rldic, 0x1E, 0x04), 8292 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 8293 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 8294 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 8295 #endif 8296 8297 #undef GEN_LD 8298 #undef GEN_LDU 8299 #undef GEN_LDUX 8300 #undef GEN_LDX_E 8301 #undef GEN_LDS 8302 #define GEN_LD(name, ldop, opc, type) \ 8303 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 8304 #define GEN_LDU(name, ldop, opc, type) \ 8305 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 8306 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 8307 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 8308 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 8309 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 8310 #define GEN_LDS(name, ldop, op, type) \ 8311 GEN_LD(name, ldop, op | 0x20, type) \ 8312 GEN_LDU(name, ldop, op | 0x21, type) \ 8313 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 8314 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 8315 8316 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 8317 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 8318 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 8319 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 8320 #if defined(TARGET_PPC64) 8321 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 8322 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 8323 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 8324 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 8325 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 8326 8327 /* HV/P7 and later only */ 8328 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 8329 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 8330 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 8331 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 8332 #endif 8333 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 8334 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 8335 8336 /* External PID based load */ 8337 #undef GEN_LDEPX 8338 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 8339 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8340 0x00000001, PPC_NONE, PPC2_BOOKE206), 8341 8342 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 8343 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 8344 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 8345 #if defined(TARGET_PPC64) 8346 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 8347 #endif 8348 8349 #undef GEN_ST 8350 #undef GEN_STU 8351 #undef GEN_STUX 8352 #undef GEN_STX_E 8353 #undef GEN_STS 8354 #define GEN_ST(name, stop, opc, type) \ 8355 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 8356 #define GEN_STU(name, stop, opc, type) \ 8357 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 8358 #define GEN_STUX(name, stop, opc2, opc3, type) \ 8359 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 8360 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 8361 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 8362 #define GEN_STS(name, stop, op, type) \ 8363 GEN_ST(name, stop, op | 0x20, type) \ 8364 GEN_STU(name, stop, op | 0x21, type) \ 8365 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 8366 GEN_STX(name, stop, 0x17, op | 0x00, type) 8367 8368 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 8369 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 8370 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 8371 #if defined(TARGET_PPC64) 8372 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 8373 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 8374 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 8375 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 8376 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 8377 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 8378 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 8379 #endif 8380 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 8381 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 8382 8383 #undef GEN_STEPX 8384 #define GEN_STEPX(name, ldop, opc2, opc3) \ 8385 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8386 0x00000001, PPC_NONE, PPC2_BOOKE206), 8387 8388 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 8389 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 8390 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 8391 #if defined(TARGET_PPC64) 8392 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04) 8393 #endif 8394 8395 #undef GEN_CRLOGIC 8396 #define GEN_CRLOGIC(name, tcg_op, opc) \ 8397 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 8398 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 8399 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 8400 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 8401 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 8402 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 8403 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 8404 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 8405 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 8406 8407 #undef GEN_MAC_HANDLER 8408 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 8409 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 8410 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 8411 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 8412 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 8413 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 8414 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 8415 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 8416 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 8417 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 8418 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 8419 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 8420 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 8421 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 8422 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 8423 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 8424 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 8425 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 8426 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 8427 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 8428 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 8429 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 8430 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 8431 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 8432 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 8433 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 8434 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 8435 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 8436 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 8437 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 8438 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 8439 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 8440 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 8441 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 8442 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 8443 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 8444 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 8445 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 8446 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 8447 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 8448 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 8449 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 8450 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 8451 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 8452 8453 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 8454 PPC_NONE, PPC2_TM), 8455 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 8456 PPC_NONE, PPC2_TM), 8457 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 8458 PPC_NONE, PPC2_TM), 8459 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 8460 PPC_NONE, PPC2_TM), 8461 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 8462 PPC_NONE, PPC2_TM), 8463 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 8464 PPC_NONE, PPC2_TM), 8465 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 8466 PPC_NONE, PPC2_TM), 8467 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 8468 PPC_NONE, PPC2_TM), 8469 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 8470 PPC_NONE, PPC2_TM), 8471 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 8472 PPC_NONE, PPC2_TM), 8473 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 8474 PPC_NONE, PPC2_TM), 8475 8476 #include "translate/fp-ops.c.inc" 8477 8478 #include "translate/vmx-ops.c.inc" 8479 8480 #include "translate/vsx-ops.c.inc" 8481 8482 #include "translate/dfp-ops.c.inc" 8483 8484 #include "translate/spe-ops.c.inc" 8485 }; 8486 8487 /*****************************************************************************/ 8488 /* Opcode types */ 8489 enum { 8490 PPC_DIRECT = 0, /* Opcode routine */ 8491 PPC_INDIRECT = 1, /* Indirect opcode table */ 8492 }; 8493 8494 #define PPC_OPCODE_MASK 0x3 8495 8496 static inline int is_indirect_opcode(void *handler) 8497 { 8498 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT; 8499 } 8500 8501 static inline opc_handler_t **ind_table(void *handler) 8502 { 8503 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK); 8504 } 8505 8506 /* Instruction table creation */ 8507 /* Opcodes tables creation */ 8508 static void fill_new_table(opc_handler_t **table, int len) 8509 { 8510 int i; 8511 8512 for (i = 0; i < len; i++) { 8513 table[i] = &invalid_handler; 8514 } 8515 } 8516 8517 static int create_new_table(opc_handler_t **table, unsigned char idx) 8518 { 8519 opc_handler_t **tmp; 8520 8521 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN); 8522 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN); 8523 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT); 8524 8525 return 0; 8526 } 8527 8528 static int insert_in_table(opc_handler_t **table, unsigned char idx, 8529 opc_handler_t *handler) 8530 { 8531 if (table[idx] != &invalid_handler) { 8532 return -1; 8533 } 8534 table[idx] = handler; 8535 8536 return 0; 8537 } 8538 8539 static int register_direct_insn(opc_handler_t **ppc_opcodes, 8540 unsigned char idx, opc_handler_t *handler) 8541 { 8542 if (insert_in_table(ppc_opcodes, idx, handler) < 0) { 8543 printf("*** ERROR: opcode %02x already assigned in main " 8544 "opcode table\n", idx); 8545 #if defined(PPC_DUMP_CPU) 8546 printf(" Registered handler '%s' - new handler '%s'\n", 8547 ppc_opcodes[idx]->oname, handler->oname); 8548 #endif 8549 return -1; 8550 } 8551 8552 return 0; 8553 } 8554 8555 static int register_ind_in_table(opc_handler_t **table, 8556 unsigned char idx1, unsigned char idx2, 8557 opc_handler_t *handler) 8558 { 8559 if (table[idx1] == &invalid_handler) { 8560 if (create_new_table(table, idx1) < 0) { 8561 printf("*** ERROR: unable to create indirect table " 8562 "idx=%02x\n", idx1); 8563 return -1; 8564 } 8565 } else { 8566 if (!is_indirect_opcode(table[idx1])) { 8567 printf("*** ERROR: idx %02x already assigned to a direct " 8568 "opcode\n", idx1); 8569 #if defined(PPC_DUMP_CPU) 8570 printf(" Registered handler '%s' - new handler '%s'\n", 8571 ind_table(table[idx1])[idx2]->oname, handler->oname); 8572 #endif 8573 return -1; 8574 } 8575 } 8576 if (handler != NULL && 8577 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) { 8578 printf("*** ERROR: opcode %02x already assigned in " 8579 "opcode table %02x\n", idx2, idx1); 8580 #if defined(PPC_DUMP_CPU) 8581 printf(" Registered handler '%s' - new handler '%s'\n", 8582 ind_table(table[idx1])[idx2]->oname, handler->oname); 8583 #endif 8584 return -1; 8585 } 8586 8587 return 0; 8588 } 8589 8590 static int register_ind_insn(opc_handler_t **ppc_opcodes, 8591 unsigned char idx1, unsigned char idx2, 8592 opc_handler_t *handler) 8593 { 8594 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler); 8595 } 8596 8597 static int register_dblind_insn(opc_handler_t **ppc_opcodes, 8598 unsigned char idx1, unsigned char idx2, 8599 unsigned char idx3, opc_handler_t *handler) 8600 { 8601 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8602 printf("*** ERROR: unable to join indirect table idx " 8603 "[%02x-%02x]\n", idx1, idx2); 8604 return -1; 8605 } 8606 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3, 8607 handler) < 0) { 8608 printf("*** ERROR: unable to insert opcode " 8609 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8610 return -1; 8611 } 8612 8613 return 0; 8614 } 8615 8616 static int register_trplind_insn(opc_handler_t **ppc_opcodes, 8617 unsigned char idx1, unsigned char idx2, 8618 unsigned char idx3, unsigned char idx4, 8619 opc_handler_t *handler) 8620 { 8621 opc_handler_t **table; 8622 8623 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8624 printf("*** ERROR: unable to join indirect table idx " 8625 "[%02x-%02x]\n", idx1, idx2); 8626 return -1; 8627 } 8628 table = ind_table(ppc_opcodes[idx1]); 8629 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) { 8630 printf("*** ERROR: unable to join 2nd-level indirect table idx " 8631 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8632 return -1; 8633 } 8634 table = ind_table(table[idx2]); 8635 if (register_ind_in_table(table, idx3, idx4, handler) < 0) { 8636 printf("*** ERROR: unable to insert opcode " 8637 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4); 8638 return -1; 8639 } 8640 return 0; 8641 } 8642 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn) 8643 { 8644 if (insn->opc2 != 0xFF) { 8645 if (insn->opc3 != 0xFF) { 8646 if (insn->opc4 != 0xFF) { 8647 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8648 insn->opc3, insn->opc4, 8649 &insn->handler) < 0) { 8650 return -1; 8651 } 8652 } else { 8653 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8654 insn->opc3, &insn->handler) < 0) { 8655 return -1; 8656 } 8657 } 8658 } else { 8659 if (register_ind_insn(ppc_opcodes, insn->opc1, 8660 insn->opc2, &insn->handler) < 0) { 8661 return -1; 8662 } 8663 } 8664 } else { 8665 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) { 8666 return -1; 8667 } 8668 } 8669 8670 return 0; 8671 } 8672 8673 static int test_opcode_table(opc_handler_t **table, int len) 8674 { 8675 int i, count, tmp; 8676 8677 for (i = 0, count = 0; i < len; i++) { 8678 /* Consistency fixup */ 8679 if (table[i] == NULL) { 8680 table[i] = &invalid_handler; 8681 } 8682 if (table[i] != &invalid_handler) { 8683 if (is_indirect_opcode(table[i])) { 8684 tmp = test_opcode_table(ind_table(table[i]), 8685 PPC_CPU_INDIRECT_OPCODES_LEN); 8686 if (tmp == 0) { 8687 free(table[i]); 8688 table[i] = &invalid_handler; 8689 } else { 8690 count++; 8691 } 8692 } else { 8693 count++; 8694 } 8695 } 8696 } 8697 8698 return count; 8699 } 8700 8701 static void fix_opcode_tables(opc_handler_t **ppc_opcodes) 8702 { 8703 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) { 8704 printf("*** WARNING: no opcode defined !\n"); 8705 } 8706 } 8707 8708 /*****************************************************************************/ 8709 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp) 8710 { 8711 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu); 8712 opcode_t *opc; 8713 8714 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN); 8715 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) { 8716 if (((opc->handler.type & pcc->insns_flags) != 0) || 8717 ((opc->handler.type2 & pcc->insns_flags2) != 0)) { 8718 if (register_insn(cpu->opcodes, opc) < 0) { 8719 error_setg(errp, "ERROR initializing PowerPC instruction " 8720 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2, 8721 opc->opc3); 8722 return; 8723 } 8724 } 8725 } 8726 fix_opcode_tables(cpu->opcodes); 8727 fflush(stdout); 8728 fflush(stderr); 8729 } 8730 8731 void destroy_ppc_opcodes(PowerPCCPU *cpu) 8732 { 8733 opc_handler_t **table, **table_2; 8734 int i, j, k; 8735 8736 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) { 8737 if (cpu->opcodes[i] == &invalid_handler) { 8738 continue; 8739 } 8740 if (is_indirect_opcode(cpu->opcodes[i])) { 8741 table = ind_table(cpu->opcodes[i]); 8742 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) { 8743 if (table[j] == &invalid_handler) { 8744 continue; 8745 } 8746 if (is_indirect_opcode(table[j])) { 8747 table_2 = ind_table(table[j]); 8748 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) { 8749 if (table_2[k] != &invalid_handler && 8750 is_indirect_opcode(table_2[k])) { 8751 g_free((opc_handler_t *)((uintptr_t)table_2[k] & 8752 ~PPC_INDIRECT)); 8753 } 8754 } 8755 g_free((opc_handler_t *)((uintptr_t)table[j] & 8756 ~PPC_INDIRECT)); 8757 } 8758 } 8759 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] & 8760 ~PPC_INDIRECT)); 8761 } 8762 } 8763 } 8764 8765 #if defined(PPC_DUMP_CPU) 8766 static void dump_ppc_insns(CPUPPCState *env) 8767 { 8768 opc_handler_t **table, *handler; 8769 const char *p, *q; 8770 uint8_t opc1, opc2, opc3, opc4; 8771 8772 printf("Instructions set:\n"); 8773 /* opc1 is 6 bits long */ 8774 for (opc1 = 0x00; opc1 < PPC_CPU_OPCODES_LEN; opc1++) { 8775 table = env->opcodes; 8776 handler = table[opc1]; 8777 if (is_indirect_opcode(handler)) { 8778 /* opc2 is 5 bits long */ 8779 for (opc2 = 0; opc2 < PPC_CPU_INDIRECT_OPCODES_LEN; opc2++) { 8780 table = env->opcodes; 8781 handler = env->opcodes[opc1]; 8782 table = ind_table(handler); 8783 handler = table[opc2]; 8784 if (is_indirect_opcode(handler)) { 8785 table = ind_table(handler); 8786 /* opc3 is 5 bits long */ 8787 for (opc3 = 0; opc3 < PPC_CPU_INDIRECT_OPCODES_LEN; 8788 opc3++) { 8789 handler = table[opc3]; 8790 if (is_indirect_opcode(handler)) { 8791 table = ind_table(handler); 8792 /* opc4 is 5 bits long */ 8793 for (opc4 = 0; opc4 < PPC_CPU_INDIRECT_OPCODES_LEN; 8794 opc4++) { 8795 handler = table[opc4]; 8796 if (handler->handler != &gen_invalid) { 8797 printf("INSN: %02x %02x %02x %02x -- " 8798 "(%02d %04d %02d) : %s\n", 8799 opc1, opc2, opc3, opc4, 8800 opc1, (opc3 << 5) | opc2, opc4, 8801 handler->oname); 8802 } 8803 } 8804 } else { 8805 if (handler->handler != &gen_invalid) { 8806 /* Special hack to properly dump SPE insns */ 8807 p = strchr(handler->oname, '_'); 8808 if (p == NULL) { 8809 printf("INSN: %02x %02x %02x (%02d %04d) : " 8810 "%s\n", 8811 opc1, opc2, opc3, opc1, 8812 (opc3 << 5) | opc2, 8813 handler->oname); 8814 } else { 8815 q = "speundef"; 8816 if ((p - handler->oname) != strlen(q) 8817 || (memcmp(handler->oname, q, strlen(q)) 8818 != 0)) { 8819 /* First instruction */ 8820 printf("INSN: %02x %02x %02x" 8821 "(%02d %04d) : %.*s\n", 8822 opc1, opc2 << 1, opc3, opc1, 8823 (opc3 << 6) | (opc2 << 1), 8824 (int)(p - handler->oname), 8825 handler->oname); 8826 } 8827 if (strcmp(p + 1, q) != 0) { 8828 /* Second instruction */ 8829 printf("INSN: %02x %02x %02x " 8830 "(%02d %04d) : %s\n", opc1, 8831 (opc2 << 1) | 1, opc3, opc1, 8832 (opc3 << 6) | (opc2 << 1) | 1, 8833 p + 1); 8834 } 8835 } 8836 } 8837 } 8838 } 8839 } else { 8840 if (handler->handler != &gen_invalid) { 8841 printf("INSN: %02x %02x -- (%02d %04d) : %s\n", 8842 opc1, opc2, opc1, opc2, handler->oname); 8843 } 8844 } 8845 } 8846 } else { 8847 if (handler->handler != &gen_invalid) { 8848 printf("INSN: %02x -- -- (%02d ----) : %s\n", 8849 opc1, opc1, handler->oname); 8850 } 8851 } 8852 } 8853 } 8854 #endif 8855 int ppc_fixup_cpu(PowerPCCPU *cpu) 8856 { 8857 CPUPPCState *env = &cpu->env; 8858 8859 /* 8860 * TCG doesn't (yet) emulate some groups of instructions that are 8861 * implemented on some otherwise supported CPUs (e.g. VSX and 8862 * decimal floating point instructions on POWER7). We remove 8863 * unsupported instruction groups from the cpu state's instruction 8864 * masks and hope the guest can cope. For at least the pseries 8865 * machine, the unavailability of these instructions can be 8866 * advertised to the guest via the device tree. 8867 */ 8868 if ((env->insns_flags & ~PPC_TCG_INSNS) 8869 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) { 8870 warn_report("Disabling some instructions which are not " 8871 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")", 8872 env->insns_flags & ~PPC_TCG_INSNS, 8873 env->insns_flags2 & ~PPC_TCG_INSNS2); 8874 } 8875 env->insns_flags &= PPC_TCG_INSNS; 8876 env->insns_flags2 &= PPC_TCG_INSNS2; 8877 return 0; 8878 } 8879 8880 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn) 8881 { 8882 opc_handler_t **table, *handler; 8883 uint32_t inval; 8884 8885 ctx->opcode = insn; 8886 8887 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 8888 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8889 ctx->le_mode ? "little" : "big"); 8890 8891 table = cpu->opcodes; 8892 handler = table[opc1(insn)]; 8893 if (is_indirect_opcode(handler)) { 8894 table = ind_table(handler); 8895 handler = table[opc2(insn)]; 8896 if (is_indirect_opcode(handler)) { 8897 table = ind_table(handler); 8898 handler = table[opc3(insn)]; 8899 if (is_indirect_opcode(handler)) { 8900 table = ind_table(handler); 8901 handler = table[opc4(insn)]; 8902 } 8903 } 8904 } 8905 8906 /* Is opcode *REALLY* valid ? */ 8907 if (unlikely(handler->handler == &gen_invalid)) { 8908 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 8909 "%02x - %02x - %02x - %02x (%08x) " 8910 TARGET_FMT_lx "\n", 8911 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8912 insn, ctx->cia); 8913 return false; 8914 } 8915 8916 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 8917 && Rc(insn))) { 8918 inval = handler->inval2; 8919 } else { 8920 inval = handler->inval1; 8921 } 8922 8923 if (unlikely((insn & inval) != 0)) { 8924 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 8925 "%02x - %02x - %02x - %02x (%08x) " 8926 TARGET_FMT_lx "\n", insn & inval, 8927 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8928 insn, ctx->cia); 8929 return false; 8930 } 8931 8932 handler->handler(ctx); 8933 return true; 8934 } 8935 8936 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 8937 { 8938 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8939 CPUPPCState *env = cs->env_ptr; 8940 uint32_t hflags = ctx->base.tb->flags; 8941 8942 ctx->spr_cb = env->spr_cb; 8943 ctx->pr = (hflags >> HFLAGS_PR) & 1; 8944 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7; 8945 ctx->dr = (hflags >> HFLAGS_DR) & 1; 8946 ctx->hv = (hflags >> HFLAGS_HV) & 1; 8947 ctx->insns_flags = env->insns_flags; 8948 ctx->insns_flags2 = env->insns_flags2; 8949 ctx->access_type = -1; 8950 ctx->need_access_type = !mmu_is_64bit(env->mmu_model); 8951 ctx->le_mode = (hflags >> HFLAGS_LE) & 1; 8952 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 8953 ctx->flags = env->flags; 8954 #if defined(TARGET_PPC64) 8955 ctx->sf_mode = (hflags >> HFLAGS_64) & 1; 8956 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 8957 #endif 8958 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 8959 || env->mmu_model == POWERPC_MMU_601 8960 || env->mmu_model & POWERPC_MMU_64; 8961 8962 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1; 8963 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1; 8964 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1; 8965 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1; 8966 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1; 8967 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1; 8968 8969 ctx->singlestep_enabled = 0; 8970 if ((hflags >> HFLAGS_SE) & 1) { 8971 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 8972 } 8973 if ((hflags >> HFLAGS_BE) & 1) { 8974 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 8975 } 8976 if (unlikely(ctx->base.singlestep_enabled)) { 8977 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP; 8978 } 8979 8980 if (ctx->singlestep_enabled & (CPU_SINGLE_STEP | GDBSTUB_SINGLE_STEP)) { 8981 ctx->base.max_insns = 1; 8982 } else { 8983 int bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4; 8984 ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 8985 } 8986 } 8987 8988 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 8989 { 8990 } 8991 8992 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 8993 { 8994 tcg_gen_insn_start(dcbase->pc_next); 8995 } 8996 8997 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs, 8998 const CPUBreakpoint *bp) 8999 { 9000 DisasContext *ctx = container_of(dcbase, DisasContext, base); 9001 9002 gen_update_nip(ctx, ctx->base.pc_next); 9003 gen_debug_exception(ctx); 9004 /* 9005 * The address covered by the breakpoint must be included in 9006 * [tb->pc, tb->pc + tb->size) in order to for it to be properly 9007 * cleared -- thus we increment the PC here so that the logic 9008 * setting tb->size below does the right thing. 9009 */ 9010 ctx->base.pc_next += 4; 9011 return true; 9012 } 9013 9014 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 9015 { 9016 DisasContext *ctx = container_of(dcbase, DisasContext, base); 9017 PowerPCCPU *cpu = POWERPC_CPU(cs); 9018 CPUPPCState *env = cs->env_ptr; 9019 uint32_t insn; 9020 bool ok; 9021 9022 LOG_DISAS("----------------\n"); 9023 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 9024 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 9025 9026 ctx->cia = ctx->base.pc_next; 9027 insn = translator_ldl_swap(env, ctx->base.pc_next, need_byteswap(ctx)); 9028 ctx->base.pc_next += 4; 9029 9030 ok = decode_legacy(cpu, ctx, insn); 9031 if (!ok) { 9032 gen_invalid(ctx); 9033 } 9034 9035 translator_loop_temp_check(&ctx->base); 9036 } 9037 9038 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 9039 { 9040 DisasContext *ctx = container_of(dcbase, DisasContext, base); 9041 DisasJumpType is_jmp = ctx->base.is_jmp; 9042 target_ulong nip = ctx->base.pc_next; 9043 int sse; 9044 9045 if (is_jmp == DISAS_NORETURN) { 9046 /* We have already exited the TB. */ 9047 return; 9048 } 9049 9050 /* Honor single stepping. */ 9051 sse = ctx->singlestep_enabled & (CPU_SINGLE_STEP | GDBSTUB_SINGLE_STEP); 9052 if (unlikely(sse)) { 9053 switch (is_jmp) { 9054 case DISAS_TOO_MANY: 9055 case DISAS_EXIT_UPDATE: 9056 case DISAS_CHAIN_UPDATE: 9057 gen_update_nip(ctx, nip); 9058 break; 9059 case DISAS_EXIT: 9060 case DISAS_CHAIN: 9061 break; 9062 default: 9063 g_assert_not_reached(); 9064 } 9065 9066 if (sse & GDBSTUB_SINGLE_STEP) { 9067 gen_debug_exception(ctx); 9068 return; 9069 } 9070 /* else CPU_SINGLE_STEP... */ 9071 if (nip <= 0x100 || nip > 0xf00) { 9072 gen_exception(ctx, gen_prep_dbgex(ctx)); 9073 return; 9074 } 9075 } 9076 9077 switch (is_jmp) { 9078 case DISAS_TOO_MANY: 9079 if (use_goto_tb(ctx, nip)) { 9080 tcg_gen_goto_tb(0); 9081 gen_update_nip(ctx, nip); 9082 tcg_gen_exit_tb(ctx->base.tb, 0); 9083 break; 9084 } 9085 /* fall through */ 9086 case DISAS_CHAIN_UPDATE: 9087 gen_update_nip(ctx, nip); 9088 /* fall through */ 9089 case DISAS_CHAIN: 9090 tcg_gen_lookup_and_goto_ptr(); 9091 break; 9092 9093 case DISAS_EXIT_UPDATE: 9094 gen_update_nip(ctx, nip); 9095 /* fall through */ 9096 case DISAS_EXIT: 9097 tcg_gen_exit_tb(NULL, 0); 9098 break; 9099 9100 default: 9101 g_assert_not_reached(); 9102 } 9103 } 9104 9105 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 9106 { 9107 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 9108 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 9109 } 9110 9111 static const TranslatorOps ppc_tr_ops = { 9112 .init_disas_context = ppc_tr_init_disas_context, 9113 .tb_start = ppc_tr_tb_start, 9114 .insn_start = ppc_tr_insn_start, 9115 .breakpoint_check = ppc_tr_breakpoint_check, 9116 .translate_insn = ppc_tr_translate_insn, 9117 .tb_stop = ppc_tr_tb_stop, 9118 .disas_log = ppc_tr_disas_log, 9119 }; 9120 9121 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns) 9122 { 9123 DisasContext ctx; 9124 9125 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns); 9126 } 9127 9128 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 9129 target_ulong *data) 9130 { 9131 env->nip = data[0]; 9132 } 9133