1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2.1 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg/tcg-op.h" 27 #include "tcg/tcg-op-gvec.h" 28 #include "qemu/host-utils.h" 29 #include "qemu/main-loop.h" 30 #include "exec/cpu_ldst.h" 31 32 #include "exec/helper-proto.h" 33 #include "exec/helper-gen.h" 34 35 #include "exec/translator.h" 36 #include "exec/log.h" 37 #include "qemu/atomic128.h" 38 #include "spr_tcg.h" 39 40 #include "qemu/qemu-print.h" 41 #include "qapi/error.h" 42 43 #define CPU_SINGLE_STEP 0x1 44 #define CPU_BRANCH_STEP 0x2 45 46 /* Include definitions for instructions classes and implementations flags */ 47 /* #define PPC_DEBUG_DISAS */ 48 49 #ifdef PPC_DEBUG_DISAS 50 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 51 #else 52 # define LOG_DISAS(...) do { } while (0) 53 #endif 54 /*****************************************************************************/ 55 /* Code translation helpers */ 56 57 /* global register indexes */ 58 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */ 59 + 10 * 4 + 22 * 5 /* SPE GPRh */ 60 + 8 * 5 /* CRF */]; 61 static TCGv cpu_gpr[32]; 62 static TCGv cpu_gprh[32]; 63 static TCGv_i32 cpu_crf[8]; 64 static TCGv cpu_nip; 65 static TCGv cpu_msr; 66 static TCGv cpu_ctr; 67 static TCGv cpu_lr; 68 #if defined(TARGET_PPC64) 69 static TCGv cpu_cfar; 70 #endif 71 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 72 static TCGv cpu_reserve; 73 static TCGv cpu_reserve_val; 74 static TCGv cpu_fpscr; 75 static TCGv_i32 cpu_access_type; 76 77 #include "exec/gen-icount.h" 78 79 void ppc_translate_init(void) 80 { 81 int i; 82 char *p; 83 size_t cpu_reg_names_size; 84 85 p = cpu_reg_names; 86 cpu_reg_names_size = sizeof(cpu_reg_names); 87 88 for (i = 0; i < 8; i++) { 89 snprintf(p, cpu_reg_names_size, "crf%d", i); 90 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 91 offsetof(CPUPPCState, crf[i]), p); 92 p += 5; 93 cpu_reg_names_size -= 5; 94 } 95 96 for (i = 0; i < 32; i++) { 97 snprintf(p, cpu_reg_names_size, "r%d", i); 98 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 99 offsetof(CPUPPCState, gpr[i]), p); 100 p += (i < 10) ? 3 : 4; 101 cpu_reg_names_size -= (i < 10) ? 3 : 4; 102 snprintf(p, cpu_reg_names_size, "r%dH", i); 103 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 104 offsetof(CPUPPCState, gprh[i]), p); 105 p += (i < 10) ? 4 : 5; 106 cpu_reg_names_size -= (i < 10) ? 4 : 5; 107 } 108 109 cpu_nip = tcg_global_mem_new(cpu_env, 110 offsetof(CPUPPCState, nip), "nip"); 111 112 cpu_msr = tcg_global_mem_new(cpu_env, 113 offsetof(CPUPPCState, msr), "msr"); 114 115 cpu_ctr = tcg_global_mem_new(cpu_env, 116 offsetof(CPUPPCState, ctr), "ctr"); 117 118 cpu_lr = tcg_global_mem_new(cpu_env, 119 offsetof(CPUPPCState, lr), "lr"); 120 121 #if defined(TARGET_PPC64) 122 cpu_cfar = tcg_global_mem_new(cpu_env, 123 offsetof(CPUPPCState, cfar), "cfar"); 124 #endif 125 126 cpu_xer = tcg_global_mem_new(cpu_env, 127 offsetof(CPUPPCState, xer), "xer"); 128 cpu_so = tcg_global_mem_new(cpu_env, 129 offsetof(CPUPPCState, so), "SO"); 130 cpu_ov = tcg_global_mem_new(cpu_env, 131 offsetof(CPUPPCState, ov), "OV"); 132 cpu_ca = tcg_global_mem_new(cpu_env, 133 offsetof(CPUPPCState, ca), "CA"); 134 cpu_ov32 = tcg_global_mem_new(cpu_env, 135 offsetof(CPUPPCState, ov32), "OV32"); 136 cpu_ca32 = tcg_global_mem_new(cpu_env, 137 offsetof(CPUPPCState, ca32), "CA32"); 138 139 cpu_reserve = tcg_global_mem_new(cpu_env, 140 offsetof(CPUPPCState, reserve_addr), 141 "reserve_addr"); 142 cpu_reserve_val = tcg_global_mem_new(cpu_env, 143 offsetof(CPUPPCState, reserve_val), 144 "reserve_val"); 145 146 cpu_fpscr = tcg_global_mem_new(cpu_env, 147 offsetof(CPUPPCState, fpscr), "fpscr"); 148 149 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 150 offsetof(CPUPPCState, access_type), 151 "access_type"); 152 } 153 154 /* internal defines */ 155 struct DisasContext { 156 DisasContextBase base; 157 target_ulong cia; /* current instruction address */ 158 uint32_t opcode; 159 /* Routine used to access memory */ 160 bool pr, hv, dr, le_mode; 161 bool lazy_tlb_flush; 162 bool need_access_type; 163 int mem_idx; 164 int access_type; 165 /* Translation flags */ 166 MemOp default_tcg_memop_mask; 167 #if defined(TARGET_PPC64) 168 bool sf_mode; 169 bool has_cfar; 170 #endif 171 bool fpu_enabled; 172 bool altivec_enabled; 173 bool vsx_enabled; 174 bool spe_enabled; 175 bool tm_enabled; 176 bool gtse; 177 bool hr; 178 bool mmcr0_pmcc0; 179 bool mmcr0_pmcc1; 180 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 181 int singlestep_enabled; 182 uint32_t flags; 183 uint64_t insns_flags; 184 uint64_t insns_flags2; 185 }; 186 187 #define DISAS_EXIT DISAS_TARGET_0 /* exit to main loop, pc updated */ 188 #define DISAS_EXIT_UPDATE DISAS_TARGET_1 /* exit to main loop, pc stale */ 189 #define DISAS_CHAIN DISAS_TARGET_2 /* lookup next tb, pc updated */ 190 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3 /* lookup next tb, pc stale */ 191 192 /* Return true iff byteswap is needed in a scalar memop */ 193 static inline bool need_byteswap(const DisasContext *ctx) 194 { 195 #if defined(TARGET_WORDS_BIGENDIAN) 196 return ctx->le_mode; 197 #else 198 return !ctx->le_mode; 199 #endif 200 } 201 202 /* True when active word size < size of target_long. */ 203 #ifdef TARGET_PPC64 204 # define NARROW_MODE(C) (!(C)->sf_mode) 205 #else 206 # define NARROW_MODE(C) 0 207 #endif 208 209 struct opc_handler_t { 210 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 211 uint32_t inval1; 212 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 213 uint32_t inval2; 214 /* instruction type */ 215 uint64_t type; 216 /* extended instruction type */ 217 uint64_t type2; 218 /* handler */ 219 void (*handler)(DisasContext *ctx); 220 }; 221 222 /* SPR load/store helpers */ 223 static inline void gen_load_spr(TCGv t, int reg) 224 { 225 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 226 } 227 228 static inline void gen_store_spr(int reg, TCGv t) 229 { 230 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 231 } 232 233 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 234 { 235 if (ctx->need_access_type && ctx->access_type != access_type) { 236 tcg_gen_movi_i32(cpu_access_type, access_type); 237 ctx->access_type = access_type; 238 } 239 } 240 241 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 242 { 243 if (NARROW_MODE(ctx)) { 244 nip = (uint32_t)nip; 245 } 246 tcg_gen_movi_tl(cpu_nip, nip); 247 } 248 249 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 250 { 251 TCGv_i32 t0, t1; 252 253 /* 254 * These are all synchronous exceptions, we set the PC back to the 255 * faulting instruction 256 */ 257 gen_update_nip(ctx, ctx->cia); 258 t0 = tcg_const_i32(excp); 259 t1 = tcg_const_i32(error); 260 gen_helper_raise_exception_err(cpu_env, t0, t1); 261 tcg_temp_free_i32(t0); 262 tcg_temp_free_i32(t1); 263 ctx->base.is_jmp = DISAS_NORETURN; 264 } 265 266 static void gen_exception(DisasContext *ctx, uint32_t excp) 267 { 268 TCGv_i32 t0; 269 270 /* 271 * These are all synchronous exceptions, we set the PC back to the 272 * faulting instruction 273 */ 274 gen_update_nip(ctx, ctx->cia); 275 t0 = tcg_const_i32(excp); 276 gen_helper_raise_exception(cpu_env, t0); 277 tcg_temp_free_i32(t0); 278 ctx->base.is_jmp = DISAS_NORETURN; 279 } 280 281 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 282 target_ulong nip) 283 { 284 TCGv_i32 t0; 285 286 gen_update_nip(ctx, nip); 287 t0 = tcg_const_i32(excp); 288 gen_helper_raise_exception(cpu_env, t0); 289 tcg_temp_free_i32(t0); 290 ctx->base.is_jmp = DISAS_NORETURN; 291 } 292 293 static void gen_icount_io_start(DisasContext *ctx) 294 { 295 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 296 gen_io_start(); 297 /* 298 * An I/O instruction must be last in the TB. 299 * Chain to the next TB, and let the code from gen_tb_start 300 * decide if we need to return to the main loop. 301 * Doing this first also allows this value to be overridden. 302 */ 303 ctx->base.is_jmp = DISAS_TOO_MANY; 304 } 305 } 306 307 /* 308 * Tells the caller what is the appropriate exception to generate and prepares 309 * SPR registers for this exception. 310 * 311 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or 312 * POWERPC_EXCP_DEBUG (on BookE). 313 */ 314 static uint32_t gen_prep_dbgex(DisasContext *ctx) 315 { 316 if (ctx->flags & POWERPC_FLAG_DE) { 317 target_ulong dbsr = 0; 318 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) { 319 dbsr = DBCR0_ICMP; 320 } else { 321 /* Must have been branch */ 322 dbsr = DBCR0_BRT; 323 } 324 TCGv t0 = tcg_temp_new(); 325 gen_load_spr(t0, SPR_BOOKE_DBSR); 326 tcg_gen_ori_tl(t0, t0, dbsr); 327 gen_store_spr(SPR_BOOKE_DBSR, t0); 328 tcg_temp_free(t0); 329 return POWERPC_EXCP_DEBUG; 330 } else { 331 return POWERPC_EXCP_TRACE; 332 } 333 } 334 335 static void gen_debug_exception(DisasContext *ctx) 336 { 337 gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx))); 338 ctx->base.is_jmp = DISAS_NORETURN; 339 } 340 341 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 342 { 343 /* Will be converted to program check if needed */ 344 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 345 } 346 347 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 348 { 349 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 350 } 351 352 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 353 { 354 /* Will be converted to program check if needed */ 355 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 356 } 357 358 /*****************************************************************************/ 359 /* SPR READ/WRITE CALLBACKS */ 360 361 void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 362 { 363 #if 0 364 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 365 printf("ERROR: try to access SPR %d !\n", sprn); 366 #endif 367 } 368 369 /* #define PPC_DUMP_SPR_ACCESSES */ 370 371 /* 372 * Generic callbacks: 373 * do nothing but store/retrieve spr value 374 */ 375 static void spr_load_dump_spr(int sprn) 376 { 377 #ifdef PPC_DUMP_SPR_ACCESSES 378 TCGv_i32 t0 = tcg_const_i32(sprn); 379 gen_helper_load_dump_spr(cpu_env, t0); 380 tcg_temp_free_i32(t0); 381 #endif 382 } 383 384 void spr_read_generic(DisasContext *ctx, int gprn, int sprn) 385 { 386 gen_load_spr(cpu_gpr[gprn], sprn); 387 spr_load_dump_spr(sprn); 388 } 389 390 static void spr_store_dump_spr(int sprn) 391 { 392 #ifdef PPC_DUMP_SPR_ACCESSES 393 TCGv_i32 t0 = tcg_const_i32(sprn); 394 gen_helper_store_dump_spr(cpu_env, t0); 395 tcg_temp_free_i32(t0); 396 #endif 397 } 398 399 void spr_write_generic(DisasContext *ctx, int sprn, int gprn) 400 { 401 gen_store_spr(sprn, cpu_gpr[gprn]); 402 spr_store_dump_spr(sprn); 403 } 404 405 #if !defined(CONFIG_USER_ONLY) 406 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn) 407 { 408 #ifdef TARGET_PPC64 409 TCGv t0 = tcg_temp_new(); 410 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]); 411 gen_store_spr(sprn, t0); 412 tcg_temp_free(t0); 413 spr_store_dump_spr(sprn); 414 #else 415 spr_write_generic(ctx, sprn, gprn); 416 #endif 417 } 418 419 void spr_write_clear(DisasContext *ctx, int sprn, int gprn) 420 { 421 TCGv t0 = tcg_temp_new(); 422 TCGv t1 = tcg_temp_new(); 423 gen_load_spr(t0, sprn); 424 tcg_gen_neg_tl(t1, cpu_gpr[gprn]); 425 tcg_gen_and_tl(t0, t0, t1); 426 gen_store_spr(sprn, t0); 427 tcg_temp_free(t0); 428 tcg_temp_free(t1); 429 } 430 431 void spr_access_nop(DisasContext *ctx, int sprn, int gprn) 432 { 433 } 434 435 #endif 436 437 /* SPR common to all PowerPC */ 438 /* XER */ 439 void spr_read_xer(DisasContext *ctx, int gprn, int sprn) 440 { 441 TCGv dst = cpu_gpr[gprn]; 442 TCGv t0 = tcg_temp_new(); 443 TCGv t1 = tcg_temp_new(); 444 TCGv t2 = tcg_temp_new(); 445 tcg_gen_mov_tl(dst, cpu_xer); 446 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 447 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 448 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 449 tcg_gen_or_tl(t0, t0, t1); 450 tcg_gen_or_tl(dst, dst, t2); 451 tcg_gen_or_tl(dst, dst, t0); 452 if (is_isa300(ctx)) { 453 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 454 tcg_gen_or_tl(dst, dst, t0); 455 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 456 tcg_gen_or_tl(dst, dst, t0); 457 } 458 tcg_temp_free(t0); 459 tcg_temp_free(t1); 460 tcg_temp_free(t2); 461 } 462 463 void spr_write_xer(DisasContext *ctx, int sprn, int gprn) 464 { 465 TCGv src = cpu_gpr[gprn]; 466 /* Write all flags, while reading back check for isa300 */ 467 tcg_gen_andi_tl(cpu_xer, src, 468 ~((1u << XER_SO) | 469 (1u << XER_OV) | (1u << XER_OV32) | 470 (1u << XER_CA) | (1u << XER_CA32))); 471 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 472 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 473 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 474 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 475 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 476 } 477 478 /* LR */ 479 void spr_read_lr(DisasContext *ctx, int gprn, int sprn) 480 { 481 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr); 482 } 483 484 void spr_write_lr(DisasContext *ctx, int sprn, int gprn) 485 { 486 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]); 487 } 488 489 /* CFAR */ 490 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 491 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn) 492 { 493 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar); 494 } 495 496 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn) 497 { 498 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]); 499 } 500 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */ 501 502 /* CTR */ 503 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn) 504 { 505 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr); 506 } 507 508 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn) 509 { 510 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]); 511 } 512 513 /* User read access to SPR */ 514 /* USPRx */ 515 /* UMMCRx */ 516 /* UPMCx */ 517 /* USIA */ 518 /* UDECR */ 519 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn) 520 { 521 gen_load_spr(cpu_gpr[gprn], sprn + 0x10); 522 } 523 524 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 525 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn) 526 { 527 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]); 528 } 529 #endif 530 531 /* SPR common to all non-embedded PowerPC */ 532 /* DECR */ 533 #if !defined(CONFIG_USER_ONLY) 534 void spr_read_decr(DisasContext *ctx, int gprn, int sprn) 535 { 536 gen_icount_io_start(ctx); 537 gen_helper_load_decr(cpu_gpr[gprn], cpu_env); 538 } 539 540 void spr_write_decr(DisasContext *ctx, int sprn, int gprn) 541 { 542 gen_icount_io_start(ctx); 543 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]); 544 } 545 #endif 546 547 /* SPR common to all non-embedded PowerPC, except 601 */ 548 /* Time base */ 549 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn) 550 { 551 gen_icount_io_start(ctx); 552 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env); 553 } 554 555 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn) 556 { 557 gen_icount_io_start(ctx); 558 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env); 559 } 560 561 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn) 562 { 563 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env); 564 } 565 566 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn) 567 { 568 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env); 569 } 570 571 #if !defined(CONFIG_USER_ONLY) 572 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn) 573 { 574 gen_icount_io_start(ctx); 575 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]); 576 } 577 578 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn) 579 { 580 gen_icount_io_start(ctx); 581 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]); 582 } 583 584 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn) 585 { 586 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]); 587 } 588 589 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn) 590 { 591 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]); 592 } 593 594 #if defined(TARGET_PPC64) 595 void spr_read_purr(DisasContext *ctx, int gprn, int sprn) 596 { 597 gen_icount_io_start(ctx); 598 gen_helper_load_purr(cpu_gpr[gprn], cpu_env); 599 } 600 601 void spr_write_purr(DisasContext *ctx, int sprn, int gprn) 602 { 603 gen_icount_io_start(ctx); 604 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]); 605 } 606 607 /* HDECR */ 608 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn) 609 { 610 gen_icount_io_start(ctx); 611 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env); 612 } 613 614 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn) 615 { 616 gen_icount_io_start(ctx); 617 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]); 618 } 619 620 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn) 621 { 622 gen_icount_io_start(ctx); 623 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env); 624 } 625 626 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn) 627 { 628 gen_icount_io_start(ctx); 629 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]); 630 } 631 632 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn) 633 { 634 gen_icount_io_start(ctx); 635 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]); 636 } 637 638 #endif 639 #endif 640 641 #if !defined(CONFIG_USER_ONLY) 642 /* IBAT0U...IBAT0U */ 643 /* IBAT0L...IBAT7L */ 644 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn) 645 { 646 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 647 offsetof(CPUPPCState, 648 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 649 } 650 651 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn) 652 { 653 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 654 offsetof(CPUPPCState, 655 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4])); 656 } 657 658 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn) 659 { 660 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 661 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 662 tcg_temp_free_i32(t0); 663 } 664 665 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn) 666 { 667 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4); 668 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 669 tcg_temp_free_i32(t0); 670 } 671 672 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn) 673 { 674 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2); 675 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 676 tcg_temp_free_i32(t0); 677 } 678 679 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn) 680 { 681 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4); 682 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 683 tcg_temp_free_i32(t0); 684 } 685 686 /* DBAT0U...DBAT7U */ 687 /* DBAT0L...DBAT7L */ 688 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn) 689 { 690 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 691 offsetof(CPUPPCState, 692 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2])); 693 } 694 695 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn) 696 { 697 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 698 offsetof(CPUPPCState, 699 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4])); 700 } 701 702 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn) 703 { 704 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2); 705 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 706 tcg_temp_free_i32(t0); 707 } 708 709 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn) 710 { 711 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4); 712 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 713 tcg_temp_free_i32(t0); 714 } 715 716 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn) 717 { 718 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2); 719 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 720 tcg_temp_free_i32(t0); 721 } 722 723 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn) 724 { 725 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4); 726 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 727 tcg_temp_free_i32(t0); 728 } 729 730 /* SDR1 */ 731 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn) 732 { 733 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]); 734 } 735 736 #if defined(TARGET_PPC64) 737 /* 64 bits PowerPC specific SPRs */ 738 /* PIDR */ 739 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn) 740 { 741 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]); 742 } 743 744 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn) 745 { 746 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]); 747 } 748 749 void spr_read_hior(DisasContext *ctx, int gprn, int sprn) 750 { 751 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix)); 752 } 753 754 void spr_write_hior(DisasContext *ctx, int sprn, int gprn) 755 { 756 TCGv t0 = tcg_temp_new(); 757 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL); 758 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 759 tcg_temp_free(t0); 760 } 761 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn) 762 { 763 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]); 764 } 765 766 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn) 767 { 768 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]); 769 } 770 771 /* DPDES */ 772 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn) 773 { 774 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env); 775 } 776 777 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn) 778 { 779 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]); 780 } 781 #endif 782 #endif 783 784 /* PowerPC 601 specific registers */ 785 /* RTC */ 786 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn) 787 { 788 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env); 789 } 790 791 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn) 792 { 793 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env); 794 } 795 796 #if !defined(CONFIG_USER_ONLY) 797 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn) 798 { 799 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]); 800 } 801 802 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn) 803 { 804 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]); 805 } 806 807 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn) 808 { 809 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]); 810 /* Must stop the translation as endianness may have changed */ 811 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 812 } 813 #endif 814 815 /* Unified bats */ 816 #if !defined(CONFIG_USER_ONLY) 817 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn) 818 { 819 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 820 offsetof(CPUPPCState, 821 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 822 } 823 824 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn) 825 { 826 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 827 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]); 828 tcg_temp_free_i32(t0); 829 } 830 831 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn) 832 { 833 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 834 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]); 835 tcg_temp_free_i32(t0); 836 } 837 #endif 838 839 /* PowerPC 40x specific registers */ 840 #if !defined(CONFIG_USER_ONLY) 841 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn) 842 { 843 gen_icount_io_start(ctx); 844 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env); 845 } 846 847 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn) 848 { 849 gen_icount_io_start(ctx); 850 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]); 851 } 852 853 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn) 854 { 855 gen_icount_io_start(ctx); 856 gen_store_spr(sprn, cpu_gpr[gprn]); 857 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]); 858 /* We must stop translation as we may have rebooted */ 859 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 860 } 861 862 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn) 863 { 864 gen_icount_io_start(ctx); 865 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]); 866 } 867 868 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn) 869 { 870 gen_icount_io_start(ctx); 871 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]); 872 } 873 874 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn) 875 { 876 gen_icount_io_start(ctx); 877 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]); 878 } 879 #endif 880 881 /* PowerPC 403 specific registers */ 882 /* PBL1 / PBU1 / PBL2 / PBU2 */ 883 #if !defined(CONFIG_USER_ONLY) 884 void spr_read_403_pbr(DisasContext *ctx, int gprn, int sprn) 885 { 886 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 887 offsetof(CPUPPCState, pb[sprn - SPR_403_PBL1])); 888 } 889 890 void spr_write_403_pbr(DisasContext *ctx, int sprn, int gprn) 891 { 892 TCGv_i32 t0 = tcg_const_i32(sprn - SPR_403_PBL1); 893 gen_helper_store_403_pbr(cpu_env, t0, cpu_gpr[gprn]); 894 tcg_temp_free_i32(t0); 895 } 896 897 void spr_write_pir(DisasContext *ctx, int sprn, int gprn) 898 { 899 TCGv t0 = tcg_temp_new(); 900 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF); 901 gen_store_spr(SPR_PIR, t0); 902 tcg_temp_free(t0); 903 } 904 #endif 905 906 /* SPE specific registers */ 907 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn) 908 { 909 TCGv_i32 t0 = tcg_temp_new_i32(); 910 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 911 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0); 912 tcg_temp_free_i32(t0); 913 } 914 915 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn) 916 { 917 TCGv_i32 t0 = tcg_temp_new_i32(); 918 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]); 919 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 920 tcg_temp_free_i32(t0); 921 } 922 923 #if !defined(CONFIG_USER_ONLY) 924 /* Callback used to write the exception vector base */ 925 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn) 926 { 927 TCGv t0 = tcg_temp_new(); 928 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask)); 929 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 930 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 931 gen_store_spr(sprn, t0); 932 tcg_temp_free(t0); 933 } 934 935 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn) 936 { 937 int sprn_offs; 938 939 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) { 940 sprn_offs = sprn - SPR_BOOKE_IVOR0; 941 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) { 942 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32; 943 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) { 944 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38; 945 } else { 946 printf("Trying to write an unknown exception vector %d %03x\n", 947 sprn, sprn); 948 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 949 return; 950 } 951 952 TCGv t0 = tcg_temp_new(); 953 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask)); 954 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 955 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs])); 956 gen_store_spr(sprn, t0); 957 tcg_temp_free(t0); 958 } 959 #endif 960 961 #ifdef TARGET_PPC64 962 #ifndef CONFIG_USER_ONLY 963 void spr_write_amr(DisasContext *ctx, int sprn, int gprn) 964 { 965 TCGv t0 = tcg_temp_new(); 966 TCGv t1 = tcg_temp_new(); 967 TCGv t2 = tcg_temp_new(); 968 969 /* 970 * Note, the HV=1 PR=0 case is handled earlier by simply using 971 * spr_write_generic for HV mode in the SPR table 972 */ 973 974 /* Build insertion mask into t1 based on context */ 975 if (ctx->pr) { 976 gen_load_spr(t1, SPR_UAMOR); 977 } else { 978 gen_load_spr(t1, SPR_AMOR); 979 } 980 981 /* Mask new bits into t2 */ 982 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 983 984 /* Load AMR and clear new bits in t0 */ 985 gen_load_spr(t0, SPR_AMR); 986 tcg_gen_andc_tl(t0, t0, t1); 987 988 /* Or'in new bits and write it out */ 989 tcg_gen_or_tl(t0, t0, t2); 990 gen_store_spr(SPR_AMR, t0); 991 spr_store_dump_spr(SPR_AMR); 992 993 tcg_temp_free(t0); 994 tcg_temp_free(t1); 995 tcg_temp_free(t2); 996 } 997 998 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn) 999 { 1000 TCGv t0 = tcg_temp_new(); 1001 TCGv t1 = tcg_temp_new(); 1002 TCGv t2 = tcg_temp_new(); 1003 1004 /* 1005 * Note, the HV=1 case is handled earlier by simply using 1006 * spr_write_generic for HV mode in the SPR table 1007 */ 1008 1009 /* Build insertion mask into t1 based on context */ 1010 gen_load_spr(t1, SPR_AMOR); 1011 1012 /* Mask new bits into t2 */ 1013 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1014 1015 /* Load AMR and clear new bits in t0 */ 1016 gen_load_spr(t0, SPR_UAMOR); 1017 tcg_gen_andc_tl(t0, t0, t1); 1018 1019 /* Or'in new bits and write it out */ 1020 tcg_gen_or_tl(t0, t0, t2); 1021 gen_store_spr(SPR_UAMOR, t0); 1022 spr_store_dump_spr(SPR_UAMOR); 1023 1024 tcg_temp_free(t0); 1025 tcg_temp_free(t1); 1026 tcg_temp_free(t2); 1027 } 1028 1029 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn) 1030 { 1031 TCGv t0 = tcg_temp_new(); 1032 TCGv t1 = tcg_temp_new(); 1033 TCGv t2 = tcg_temp_new(); 1034 1035 /* 1036 * Note, the HV=1 case is handled earlier by simply using 1037 * spr_write_generic for HV mode in the SPR table 1038 */ 1039 1040 /* Build insertion mask into t1 based on context */ 1041 gen_load_spr(t1, SPR_AMOR); 1042 1043 /* Mask new bits into t2 */ 1044 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1045 1046 /* Load AMR and clear new bits in t0 */ 1047 gen_load_spr(t0, SPR_IAMR); 1048 tcg_gen_andc_tl(t0, t0, t1); 1049 1050 /* Or'in new bits and write it out */ 1051 tcg_gen_or_tl(t0, t0, t2); 1052 gen_store_spr(SPR_IAMR, t0); 1053 spr_store_dump_spr(SPR_IAMR); 1054 1055 tcg_temp_free(t0); 1056 tcg_temp_free(t1); 1057 tcg_temp_free(t2); 1058 } 1059 #endif 1060 #endif 1061 1062 #ifndef CONFIG_USER_ONLY 1063 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn) 1064 { 1065 gen_helper_fixup_thrm(cpu_env); 1066 gen_load_spr(cpu_gpr[gprn], sprn); 1067 spr_load_dump_spr(sprn); 1068 } 1069 #endif /* !CONFIG_USER_ONLY */ 1070 1071 #if !defined(CONFIG_USER_ONLY) 1072 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn) 1073 { 1074 TCGv t0 = tcg_temp_new(); 1075 1076 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE); 1077 gen_store_spr(sprn, t0); 1078 tcg_temp_free(t0); 1079 } 1080 1081 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn) 1082 { 1083 TCGv t0 = tcg_temp_new(); 1084 1085 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE); 1086 gen_store_spr(sprn, t0); 1087 tcg_temp_free(t0); 1088 } 1089 1090 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn) 1091 { 1092 TCGv t0 = tcg_temp_new(); 1093 1094 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 1095 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC)); 1096 gen_store_spr(sprn, t0); 1097 tcg_temp_free(t0); 1098 } 1099 1100 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn) 1101 { 1102 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]); 1103 } 1104 1105 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn) 1106 { 1107 TCGv_i32 t0 = tcg_const_i32(sprn); 1108 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]); 1109 tcg_temp_free_i32(t0); 1110 } 1111 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn) 1112 { 1113 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]); 1114 } 1115 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn) 1116 { 1117 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]); 1118 } 1119 1120 #endif 1121 1122 #if !defined(CONFIG_USER_ONLY) 1123 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn) 1124 { 1125 TCGv val = tcg_temp_new(); 1126 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]); 1127 gen_store_spr(SPR_BOOKE_MAS3, val); 1128 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32); 1129 gen_store_spr(SPR_BOOKE_MAS7, val); 1130 tcg_temp_free(val); 1131 } 1132 1133 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn) 1134 { 1135 TCGv mas7 = tcg_temp_new(); 1136 TCGv mas3 = tcg_temp_new(); 1137 gen_load_spr(mas7, SPR_BOOKE_MAS7); 1138 tcg_gen_shli_tl(mas7, mas7, 32); 1139 gen_load_spr(mas3, SPR_BOOKE_MAS3); 1140 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7); 1141 tcg_temp_free(mas3); 1142 tcg_temp_free(mas7); 1143 } 1144 1145 #endif 1146 1147 #ifdef TARGET_PPC64 1148 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn, 1149 int bit, int sprn, int cause) 1150 { 1151 TCGv_i32 t1 = tcg_const_i32(bit); 1152 TCGv_i32 t2 = tcg_const_i32(sprn); 1153 TCGv_i32 t3 = tcg_const_i32(cause); 1154 1155 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3); 1156 1157 tcg_temp_free_i32(t3); 1158 tcg_temp_free_i32(t2); 1159 tcg_temp_free_i32(t1); 1160 } 1161 1162 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn, 1163 int bit, int sprn, int cause) 1164 { 1165 TCGv_i32 t1 = tcg_const_i32(bit); 1166 TCGv_i32 t2 = tcg_const_i32(sprn); 1167 TCGv_i32 t3 = tcg_const_i32(cause); 1168 1169 gen_helper_msr_facility_check(cpu_env, t1, t2, t3); 1170 1171 tcg_temp_free_i32(t3); 1172 tcg_temp_free_i32(t2); 1173 tcg_temp_free_i32(t1); 1174 } 1175 1176 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn) 1177 { 1178 TCGv spr_up = tcg_temp_new(); 1179 TCGv spr = tcg_temp_new(); 1180 1181 gen_load_spr(spr, sprn - 1); 1182 tcg_gen_shri_tl(spr_up, spr, 32); 1183 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up); 1184 1185 tcg_temp_free(spr); 1186 tcg_temp_free(spr_up); 1187 } 1188 1189 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn) 1190 { 1191 TCGv spr = tcg_temp_new(); 1192 1193 gen_load_spr(spr, sprn - 1); 1194 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32); 1195 gen_store_spr(sprn - 1, spr); 1196 1197 tcg_temp_free(spr); 1198 } 1199 1200 #if !defined(CONFIG_USER_ONLY) 1201 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn) 1202 { 1203 TCGv hmer = tcg_temp_new(); 1204 1205 gen_load_spr(hmer, sprn); 1206 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer); 1207 gen_store_spr(sprn, hmer); 1208 spr_store_dump_spr(sprn); 1209 tcg_temp_free(hmer); 1210 } 1211 1212 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn) 1213 { 1214 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]); 1215 } 1216 #endif /* !defined(CONFIG_USER_ONLY) */ 1217 1218 void spr_read_tar(DisasContext *ctx, int gprn, int sprn) 1219 { 1220 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1221 spr_read_generic(ctx, gprn, sprn); 1222 } 1223 1224 void spr_write_tar(DisasContext *ctx, int sprn, int gprn) 1225 { 1226 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1227 spr_write_generic(ctx, sprn, gprn); 1228 } 1229 1230 void spr_read_tm(DisasContext *ctx, int gprn, int sprn) 1231 { 1232 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1233 spr_read_generic(ctx, gprn, sprn); 1234 } 1235 1236 void spr_write_tm(DisasContext *ctx, int sprn, int gprn) 1237 { 1238 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1239 spr_write_generic(ctx, sprn, gprn); 1240 } 1241 1242 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn) 1243 { 1244 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1245 spr_read_prev_upper32(ctx, gprn, sprn); 1246 } 1247 1248 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn) 1249 { 1250 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1251 spr_write_prev_upper32(ctx, sprn, gprn); 1252 } 1253 1254 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn) 1255 { 1256 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1257 spr_read_generic(ctx, gprn, sprn); 1258 } 1259 1260 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn) 1261 { 1262 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1263 spr_write_generic(ctx, sprn, gprn); 1264 } 1265 1266 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn) 1267 { 1268 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1269 spr_read_prev_upper32(ctx, gprn, sprn); 1270 } 1271 1272 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn) 1273 { 1274 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1275 spr_write_prev_upper32(ctx, sprn, gprn); 1276 } 1277 #endif 1278 1279 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 1280 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 1281 1282 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 1283 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 1284 1285 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 1286 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 1287 1288 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 1289 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 1290 1291 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 1292 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 1293 1294 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 1295 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 1296 1297 typedef struct opcode_t { 1298 unsigned char opc1, opc2, opc3, opc4; 1299 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 1300 unsigned char pad[4]; 1301 #endif 1302 opc_handler_t handler; 1303 const char *oname; 1304 } opcode_t; 1305 1306 /* Helpers for priv. check */ 1307 #define GEN_PRIV \ 1308 do { \ 1309 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 1310 } while (0) 1311 1312 #if defined(CONFIG_USER_ONLY) 1313 #define CHK_HV GEN_PRIV 1314 #define CHK_SV GEN_PRIV 1315 #define CHK_HVRM GEN_PRIV 1316 #else 1317 #define CHK_HV \ 1318 do { \ 1319 if (unlikely(ctx->pr || !ctx->hv)) { \ 1320 GEN_PRIV; \ 1321 } \ 1322 } while (0) 1323 #define CHK_SV \ 1324 do { \ 1325 if (unlikely(ctx->pr)) { \ 1326 GEN_PRIV; \ 1327 } \ 1328 } while (0) 1329 #define CHK_HVRM \ 1330 do { \ 1331 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 1332 GEN_PRIV; \ 1333 } \ 1334 } while (0) 1335 #endif 1336 1337 #define CHK_NONE 1338 1339 /*****************************************************************************/ 1340 /* PowerPC instructions table */ 1341 1342 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 1343 { \ 1344 .opc1 = op1, \ 1345 .opc2 = op2, \ 1346 .opc3 = op3, \ 1347 .opc4 = 0xff, \ 1348 .handler = { \ 1349 .inval1 = invl, \ 1350 .type = _typ, \ 1351 .type2 = _typ2, \ 1352 .handler = &gen_##name, \ 1353 }, \ 1354 .oname = stringify(name), \ 1355 } 1356 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 1357 { \ 1358 .opc1 = op1, \ 1359 .opc2 = op2, \ 1360 .opc3 = op3, \ 1361 .opc4 = 0xff, \ 1362 .handler = { \ 1363 .inval1 = invl1, \ 1364 .inval2 = invl2, \ 1365 .type = _typ, \ 1366 .type2 = _typ2, \ 1367 .handler = &gen_##name, \ 1368 }, \ 1369 .oname = stringify(name), \ 1370 } 1371 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 1372 { \ 1373 .opc1 = op1, \ 1374 .opc2 = op2, \ 1375 .opc3 = op3, \ 1376 .opc4 = 0xff, \ 1377 .handler = { \ 1378 .inval1 = invl, \ 1379 .type = _typ, \ 1380 .type2 = _typ2, \ 1381 .handler = &gen_##name, \ 1382 }, \ 1383 .oname = onam, \ 1384 } 1385 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 1386 { \ 1387 .opc1 = op1, \ 1388 .opc2 = op2, \ 1389 .opc3 = op3, \ 1390 .opc4 = op4, \ 1391 .handler = { \ 1392 .inval1 = invl, \ 1393 .type = _typ, \ 1394 .type2 = _typ2, \ 1395 .handler = &gen_##name, \ 1396 }, \ 1397 .oname = stringify(name), \ 1398 } 1399 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 1400 { \ 1401 .opc1 = op1, \ 1402 .opc2 = op2, \ 1403 .opc3 = op3, \ 1404 .opc4 = op4, \ 1405 .handler = { \ 1406 .inval1 = invl, \ 1407 .type = _typ, \ 1408 .type2 = _typ2, \ 1409 .handler = &gen_##name, \ 1410 }, \ 1411 .oname = onam, \ 1412 } 1413 1414 /* Invalid instruction */ 1415 static void gen_invalid(DisasContext *ctx) 1416 { 1417 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 1418 } 1419 1420 static opc_handler_t invalid_handler = { 1421 .inval1 = 0xFFFFFFFF, 1422 .inval2 = 0xFFFFFFFF, 1423 .type = PPC_NONE, 1424 .type2 = PPC_NONE, 1425 .handler = gen_invalid, 1426 }; 1427 1428 /*** Integer comparison ***/ 1429 1430 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 1431 { 1432 TCGv t0 = tcg_temp_new(); 1433 TCGv t1 = tcg_temp_new(); 1434 TCGv_i32 t = tcg_temp_new_i32(); 1435 1436 tcg_gen_movi_tl(t0, CRF_EQ); 1437 tcg_gen_movi_tl(t1, CRF_LT); 1438 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), 1439 t0, arg0, arg1, t1, t0); 1440 tcg_gen_movi_tl(t1, CRF_GT); 1441 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), 1442 t0, arg0, arg1, t1, t0); 1443 1444 tcg_gen_trunc_tl_i32(t, t0); 1445 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 1446 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 1447 1448 tcg_temp_free(t0); 1449 tcg_temp_free(t1); 1450 tcg_temp_free_i32(t); 1451 } 1452 1453 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 1454 { 1455 TCGv t0 = tcg_const_tl(arg1); 1456 gen_op_cmp(arg0, t0, s, crf); 1457 tcg_temp_free(t0); 1458 } 1459 1460 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 1461 { 1462 TCGv t0, t1; 1463 t0 = tcg_temp_new(); 1464 t1 = tcg_temp_new(); 1465 if (s) { 1466 tcg_gen_ext32s_tl(t0, arg0); 1467 tcg_gen_ext32s_tl(t1, arg1); 1468 } else { 1469 tcg_gen_ext32u_tl(t0, arg0); 1470 tcg_gen_ext32u_tl(t1, arg1); 1471 } 1472 gen_op_cmp(t0, t1, s, crf); 1473 tcg_temp_free(t1); 1474 tcg_temp_free(t0); 1475 } 1476 1477 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 1478 { 1479 TCGv t0 = tcg_const_tl(arg1); 1480 gen_op_cmp32(arg0, t0, s, crf); 1481 tcg_temp_free(t0); 1482 } 1483 1484 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 1485 { 1486 if (NARROW_MODE(ctx)) { 1487 gen_op_cmpi32(reg, 0, 1, 0); 1488 } else { 1489 gen_op_cmpi(reg, 0, 1, 0); 1490 } 1491 } 1492 1493 /* cmprb - range comparison: isupper, isaplha, islower*/ 1494 static void gen_cmprb(DisasContext *ctx) 1495 { 1496 TCGv_i32 src1 = tcg_temp_new_i32(); 1497 TCGv_i32 src2 = tcg_temp_new_i32(); 1498 TCGv_i32 src2lo = tcg_temp_new_i32(); 1499 TCGv_i32 src2hi = tcg_temp_new_i32(); 1500 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 1501 1502 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 1503 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 1504 1505 tcg_gen_andi_i32(src1, src1, 0xFF); 1506 tcg_gen_ext8u_i32(src2lo, src2); 1507 tcg_gen_shri_i32(src2, src2, 8); 1508 tcg_gen_ext8u_i32(src2hi, src2); 1509 1510 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1511 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1512 tcg_gen_and_i32(crf, src2lo, src2hi); 1513 1514 if (ctx->opcode & 0x00200000) { 1515 tcg_gen_shri_i32(src2, src2, 8); 1516 tcg_gen_ext8u_i32(src2lo, src2); 1517 tcg_gen_shri_i32(src2, src2, 8); 1518 tcg_gen_ext8u_i32(src2hi, src2); 1519 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1520 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1521 tcg_gen_and_i32(src2lo, src2lo, src2hi); 1522 tcg_gen_or_i32(crf, crf, src2lo); 1523 } 1524 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 1525 tcg_temp_free_i32(src1); 1526 tcg_temp_free_i32(src2); 1527 tcg_temp_free_i32(src2lo); 1528 tcg_temp_free_i32(src2hi); 1529 } 1530 1531 #if defined(TARGET_PPC64) 1532 /* cmpeqb */ 1533 static void gen_cmpeqb(DisasContext *ctx) 1534 { 1535 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1536 cpu_gpr[rB(ctx->opcode)]); 1537 } 1538 #endif 1539 1540 /* isel (PowerPC 2.03 specification) */ 1541 static void gen_isel(DisasContext *ctx) 1542 { 1543 uint32_t bi = rC(ctx->opcode); 1544 uint32_t mask = 0x08 >> (bi & 0x03); 1545 TCGv t0 = tcg_temp_new(); 1546 TCGv zr; 1547 1548 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 1549 tcg_gen_andi_tl(t0, t0, mask); 1550 1551 zr = tcg_const_tl(0); 1552 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 1553 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 1554 cpu_gpr[rB(ctx->opcode)]); 1555 tcg_temp_free(zr); 1556 tcg_temp_free(t0); 1557 } 1558 1559 /* cmpb: PowerPC 2.05 specification */ 1560 static void gen_cmpb(DisasContext *ctx) 1561 { 1562 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 1563 cpu_gpr[rB(ctx->opcode)]); 1564 } 1565 1566 /*** Integer arithmetic ***/ 1567 1568 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 1569 TCGv arg1, TCGv arg2, int sub) 1570 { 1571 TCGv t0 = tcg_temp_new(); 1572 1573 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 1574 tcg_gen_xor_tl(t0, arg1, arg2); 1575 if (sub) { 1576 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 1577 } else { 1578 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 1579 } 1580 tcg_temp_free(t0); 1581 if (NARROW_MODE(ctx)) { 1582 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 1583 if (is_isa300(ctx)) { 1584 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1585 } 1586 } else { 1587 if (is_isa300(ctx)) { 1588 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 1589 } 1590 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 1591 } 1592 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1593 } 1594 1595 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 1596 TCGv res, TCGv arg0, TCGv arg1, 1597 TCGv ca32, int sub) 1598 { 1599 TCGv t0; 1600 1601 if (!is_isa300(ctx)) { 1602 return; 1603 } 1604 1605 t0 = tcg_temp_new(); 1606 if (sub) { 1607 tcg_gen_eqv_tl(t0, arg0, arg1); 1608 } else { 1609 tcg_gen_xor_tl(t0, arg0, arg1); 1610 } 1611 tcg_gen_xor_tl(t0, t0, res); 1612 tcg_gen_extract_tl(ca32, t0, 32, 1); 1613 tcg_temp_free(t0); 1614 } 1615 1616 /* Common add function */ 1617 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 1618 TCGv arg2, TCGv ca, TCGv ca32, 1619 bool add_ca, bool compute_ca, 1620 bool compute_ov, bool compute_rc0) 1621 { 1622 TCGv t0 = ret; 1623 1624 if (compute_ca || compute_ov) { 1625 t0 = tcg_temp_new(); 1626 } 1627 1628 if (compute_ca) { 1629 if (NARROW_MODE(ctx)) { 1630 /* 1631 * Caution: a non-obvious corner case of the spec is that 1632 * we must produce the *entire* 64-bit addition, but 1633 * produce the carry into bit 32. 1634 */ 1635 TCGv t1 = tcg_temp_new(); 1636 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 1637 tcg_gen_add_tl(t0, arg1, arg2); 1638 if (add_ca) { 1639 tcg_gen_add_tl(t0, t0, ca); 1640 } 1641 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 1642 tcg_temp_free(t1); 1643 tcg_gen_extract_tl(ca, ca, 32, 1); 1644 if (is_isa300(ctx)) { 1645 tcg_gen_mov_tl(ca32, ca); 1646 } 1647 } else { 1648 TCGv zero = tcg_const_tl(0); 1649 if (add_ca) { 1650 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 1651 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 1652 } else { 1653 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 1654 } 1655 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 1656 tcg_temp_free(zero); 1657 } 1658 } else { 1659 tcg_gen_add_tl(t0, arg1, arg2); 1660 if (add_ca) { 1661 tcg_gen_add_tl(t0, t0, ca); 1662 } 1663 } 1664 1665 if (compute_ov) { 1666 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 1667 } 1668 if (unlikely(compute_rc0)) { 1669 gen_set_Rc0(ctx, t0); 1670 } 1671 1672 if (t0 != ret) { 1673 tcg_gen_mov_tl(ret, t0); 1674 tcg_temp_free(t0); 1675 } 1676 } 1677 /* Add functions with two operands */ 1678 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 1679 static void glue(gen_, name)(DisasContext *ctx) \ 1680 { \ 1681 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1682 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1683 ca, glue(ca, 32), \ 1684 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1685 } 1686 /* Add functions with one operand and one immediate */ 1687 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 1688 add_ca, compute_ca, compute_ov) \ 1689 static void glue(gen_, name)(DisasContext *ctx) \ 1690 { \ 1691 TCGv t0 = tcg_const_tl(const_val); \ 1692 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1693 cpu_gpr[rA(ctx->opcode)], t0, \ 1694 ca, glue(ca, 32), \ 1695 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1696 tcg_temp_free(t0); \ 1697 } 1698 1699 /* add add. addo addo. */ 1700 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 1701 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 1702 /* addc addc. addco addco. */ 1703 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 1704 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 1705 /* adde adde. addeo addeo. */ 1706 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 1707 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 1708 /* addme addme. addmeo addmeo. */ 1709 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 1710 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 1711 /* addex */ 1712 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 1713 /* addze addze. addzeo addzeo.*/ 1714 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 1715 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 1716 /* addic addic.*/ 1717 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 1718 { 1719 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1720 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1721 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 1722 tcg_temp_free(c); 1723 } 1724 1725 static void gen_addic(DisasContext *ctx) 1726 { 1727 gen_op_addic(ctx, 0); 1728 } 1729 1730 static void gen_addic_(DisasContext *ctx) 1731 { 1732 gen_op_addic(ctx, 1); 1733 } 1734 1735 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1736 TCGv arg2, int sign, int compute_ov) 1737 { 1738 TCGv_i32 t0 = tcg_temp_new_i32(); 1739 TCGv_i32 t1 = tcg_temp_new_i32(); 1740 TCGv_i32 t2 = tcg_temp_new_i32(); 1741 TCGv_i32 t3 = tcg_temp_new_i32(); 1742 1743 tcg_gen_trunc_tl_i32(t0, arg1); 1744 tcg_gen_trunc_tl_i32(t1, arg2); 1745 if (sign) { 1746 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1747 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1748 tcg_gen_and_i32(t2, t2, t3); 1749 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1750 tcg_gen_or_i32(t2, t2, t3); 1751 tcg_gen_movi_i32(t3, 0); 1752 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1753 tcg_gen_div_i32(t3, t0, t1); 1754 tcg_gen_extu_i32_tl(ret, t3); 1755 } else { 1756 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1757 tcg_gen_movi_i32(t3, 0); 1758 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1759 tcg_gen_divu_i32(t3, t0, t1); 1760 tcg_gen_extu_i32_tl(ret, t3); 1761 } 1762 if (compute_ov) { 1763 tcg_gen_extu_i32_tl(cpu_ov, t2); 1764 if (is_isa300(ctx)) { 1765 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1766 } 1767 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1768 } 1769 tcg_temp_free_i32(t0); 1770 tcg_temp_free_i32(t1); 1771 tcg_temp_free_i32(t2); 1772 tcg_temp_free_i32(t3); 1773 1774 if (unlikely(Rc(ctx->opcode) != 0)) { 1775 gen_set_Rc0(ctx, ret); 1776 } 1777 } 1778 /* Div functions */ 1779 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1780 static void glue(gen_, name)(DisasContext *ctx) \ 1781 { \ 1782 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1783 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1784 sign, compute_ov); \ 1785 } 1786 /* divwu divwu. divwuo divwuo. */ 1787 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1788 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1789 /* divw divw. divwo divwo. */ 1790 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1791 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1792 1793 /* div[wd]eu[o][.] */ 1794 #define GEN_DIVE(name, hlpr, compute_ov) \ 1795 static void gen_##name(DisasContext *ctx) \ 1796 { \ 1797 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1798 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1799 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1800 tcg_temp_free_i32(t0); \ 1801 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1802 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1803 } \ 1804 } 1805 1806 GEN_DIVE(divweu, divweu, 0); 1807 GEN_DIVE(divweuo, divweu, 1); 1808 GEN_DIVE(divwe, divwe, 0); 1809 GEN_DIVE(divweo, divwe, 1); 1810 1811 #if defined(TARGET_PPC64) 1812 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1813 TCGv arg2, int sign, int compute_ov) 1814 { 1815 TCGv_i64 t0 = tcg_temp_new_i64(); 1816 TCGv_i64 t1 = tcg_temp_new_i64(); 1817 TCGv_i64 t2 = tcg_temp_new_i64(); 1818 TCGv_i64 t3 = tcg_temp_new_i64(); 1819 1820 tcg_gen_mov_i64(t0, arg1); 1821 tcg_gen_mov_i64(t1, arg2); 1822 if (sign) { 1823 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1824 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1825 tcg_gen_and_i64(t2, t2, t3); 1826 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1827 tcg_gen_or_i64(t2, t2, t3); 1828 tcg_gen_movi_i64(t3, 0); 1829 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1830 tcg_gen_div_i64(ret, t0, t1); 1831 } else { 1832 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1833 tcg_gen_movi_i64(t3, 0); 1834 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1835 tcg_gen_divu_i64(ret, t0, t1); 1836 } 1837 if (compute_ov) { 1838 tcg_gen_mov_tl(cpu_ov, t2); 1839 if (is_isa300(ctx)) { 1840 tcg_gen_mov_tl(cpu_ov32, t2); 1841 } 1842 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1843 } 1844 tcg_temp_free_i64(t0); 1845 tcg_temp_free_i64(t1); 1846 tcg_temp_free_i64(t2); 1847 tcg_temp_free_i64(t3); 1848 1849 if (unlikely(Rc(ctx->opcode) != 0)) { 1850 gen_set_Rc0(ctx, ret); 1851 } 1852 } 1853 1854 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1855 static void glue(gen_, name)(DisasContext *ctx) \ 1856 { \ 1857 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1858 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1859 sign, compute_ov); \ 1860 } 1861 /* divdu divdu. divduo divduo. */ 1862 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1863 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1864 /* divd divd. divdo divdo. */ 1865 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1866 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1867 1868 GEN_DIVE(divdeu, divdeu, 0); 1869 GEN_DIVE(divdeuo, divdeu, 1); 1870 GEN_DIVE(divde, divde, 0); 1871 GEN_DIVE(divdeo, divde, 1); 1872 #endif 1873 1874 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1875 TCGv arg2, int sign) 1876 { 1877 TCGv_i32 t0 = tcg_temp_new_i32(); 1878 TCGv_i32 t1 = tcg_temp_new_i32(); 1879 1880 tcg_gen_trunc_tl_i32(t0, arg1); 1881 tcg_gen_trunc_tl_i32(t1, arg2); 1882 if (sign) { 1883 TCGv_i32 t2 = tcg_temp_new_i32(); 1884 TCGv_i32 t3 = tcg_temp_new_i32(); 1885 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1886 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1887 tcg_gen_and_i32(t2, t2, t3); 1888 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1889 tcg_gen_or_i32(t2, t2, t3); 1890 tcg_gen_movi_i32(t3, 0); 1891 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1892 tcg_gen_rem_i32(t3, t0, t1); 1893 tcg_gen_ext_i32_tl(ret, t3); 1894 tcg_temp_free_i32(t2); 1895 tcg_temp_free_i32(t3); 1896 } else { 1897 TCGv_i32 t2 = tcg_const_i32(1); 1898 TCGv_i32 t3 = tcg_const_i32(0); 1899 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1900 tcg_gen_remu_i32(t3, t0, t1); 1901 tcg_gen_extu_i32_tl(ret, t3); 1902 tcg_temp_free_i32(t2); 1903 tcg_temp_free_i32(t3); 1904 } 1905 tcg_temp_free_i32(t0); 1906 tcg_temp_free_i32(t1); 1907 } 1908 1909 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1910 static void glue(gen_, name)(DisasContext *ctx) \ 1911 { \ 1912 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1913 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1914 sign); \ 1915 } 1916 1917 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1918 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1919 1920 #if defined(TARGET_PPC64) 1921 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1922 TCGv arg2, int sign) 1923 { 1924 TCGv_i64 t0 = tcg_temp_new_i64(); 1925 TCGv_i64 t1 = tcg_temp_new_i64(); 1926 1927 tcg_gen_mov_i64(t0, arg1); 1928 tcg_gen_mov_i64(t1, arg2); 1929 if (sign) { 1930 TCGv_i64 t2 = tcg_temp_new_i64(); 1931 TCGv_i64 t3 = tcg_temp_new_i64(); 1932 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1933 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1934 tcg_gen_and_i64(t2, t2, t3); 1935 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1936 tcg_gen_or_i64(t2, t2, t3); 1937 tcg_gen_movi_i64(t3, 0); 1938 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1939 tcg_gen_rem_i64(ret, t0, t1); 1940 tcg_temp_free_i64(t2); 1941 tcg_temp_free_i64(t3); 1942 } else { 1943 TCGv_i64 t2 = tcg_const_i64(1); 1944 TCGv_i64 t3 = tcg_const_i64(0); 1945 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1946 tcg_gen_remu_i64(ret, t0, t1); 1947 tcg_temp_free_i64(t2); 1948 tcg_temp_free_i64(t3); 1949 } 1950 tcg_temp_free_i64(t0); 1951 tcg_temp_free_i64(t1); 1952 } 1953 1954 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1955 static void glue(gen_, name)(DisasContext *ctx) \ 1956 { \ 1957 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1958 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1959 sign); \ 1960 } 1961 1962 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1963 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1964 #endif 1965 1966 /* mulhw mulhw. */ 1967 static void gen_mulhw(DisasContext *ctx) 1968 { 1969 TCGv_i32 t0 = tcg_temp_new_i32(); 1970 TCGv_i32 t1 = tcg_temp_new_i32(); 1971 1972 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1973 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1974 tcg_gen_muls2_i32(t0, t1, t0, t1); 1975 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1976 tcg_temp_free_i32(t0); 1977 tcg_temp_free_i32(t1); 1978 if (unlikely(Rc(ctx->opcode) != 0)) { 1979 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1980 } 1981 } 1982 1983 /* mulhwu mulhwu. */ 1984 static void gen_mulhwu(DisasContext *ctx) 1985 { 1986 TCGv_i32 t0 = tcg_temp_new_i32(); 1987 TCGv_i32 t1 = tcg_temp_new_i32(); 1988 1989 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1990 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1991 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1992 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1993 tcg_temp_free_i32(t0); 1994 tcg_temp_free_i32(t1); 1995 if (unlikely(Rc(ctx->opcode) != 0)) { 1996 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1997 } 1998 } 1999 2000 /* mullw mullw. */ 2001 static void gen_mullw(DisasContext *ctx) 2002 { 2003 #if defined(TARGET_PPC64) 2004 TCGv_i64 t0, t1; 2005 t0 = tcg_temp_new_i64(); 2006 t1 = tcg_temp_new_i64(); 2007 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 2008 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 2009 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2010 tcg_temp_free(t0); 2011 tcg_temp_free(t1); 2012 #else 2013 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2014 cpu_gpr[rB(ctx->opcode)]); 2015 #endif 2016 if (unlikely(Rc(ctx->opcode) != 0)) { 2017 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2018 } 2019 } 2020 2021 /* mullwo mullwo. */ 2022 static void gen_mullwo(DisasContext *ctx) 2023 { 2024 TCGv_i32 t0 = tcg_temp_new_i32(); 2025 TCGv_i32 t1 = tcg_temp_new_i32(); 2026 2027 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2028 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2029 tcg_gen_muls2_i32(t0, t1, t0, t1); 2030 #if defined(TARGET_PPC64) 2031 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2032 #else 2033 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 2034 #endif 2035 2036 tcg_gen_sari_i32(t0, t0, 31); 2037 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 2038 tcg_gen_extu_i32_tl(cpu_ov, t0); 2039 if (is_isa300(ctx)) { 2040 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2041 } 2042 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2043 2044 tcg_temp_free_i32(t0); 2045 tcg_temp_free_i32(t1); 2046 if (unlikely(Rc(ctx->opcode) != 0)) { 2047 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2048 } 2049 } 2050 2051 /* mulli */ 2052 static void gen_mulli(DisasContext *ctx) 2053 { 2054 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2055 SIMM(ctx->opcode)); 2056 } 2057 2058 #if defined(TARGET_PPC64) 2059 /* mulhd mulhd. */ 2060 static void gen_mulhd(DisasContext *ctx) 2061 { 2062 TCGv lo = tcg_temp_new(); 2063 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2064 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2065 tcg_temp_free(lo); 2066 if (unlikely(Rc(ctx->opcode) != 0)) { 2067 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2068 } 2069 } 2070 2071 /* mulhdu mulhdu. */ 2072 static void gen_mulhdu(DisasContext *ctx) 2073 { 2074 TCGv lo = tcg_temp_new(); 2075 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2076 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2077 tcg_temp_free(lo); 2078 if (unlikely(Rc(ctx->opcode) != 0)) { 2079 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2080 } 2081 } 2082 2083 /* mulld mulld. */ 2084 static void gen_mulld(DisasContext *ctx) 2085 { 2086 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2087 cpu_gpr[rB(ctx->opcode)]); 2088 if (unlikely(Rc(ctx->opcode) != 0)) { 2089 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2090 } 2091 } 2092 2093 /* mulldo mulldo. */ 2094 static void gen_mulldo(DisasContext *ctx) 2095 { 2096 TCGv_i64 t0 = tcg_temp_new_i64(); 2097 TCGv_i64 t1 = tcg_temp_new_i64(); 2098 2099 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 2100 cpu_gpr[rB(ctx->opcode)]); 2101 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 2102 2103 tcg_gen_sari_i64(t0, t0, 63); 2104 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 2105 if (is_isa300(ctx)) { 2106 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2107 } 2108 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2109 2110 tcg_temp_free_i64(t0); 2111 tcg_temp_free_i64(t1); 2112 2113 if (unlikely(Rc(ctx->opcode) != 0)) { 2114 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2115 } 2116 } 2117 #endif 2118 2119 /* Common subf function */ 2120 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 2121 TCGv arg2, bool add_ca, bool compute_ca, 2122 bool compute_ov, bool compute_rc0) 2123 { 2124 TCGv t0 = ret; 2125 2126 if (compute_ca || compute_ov) { 2127 t0 = tcg_temp_new(); 2128 } 2129 2130 if (compute_ca) { 2131 /* dest = ~arg1 + arg2 [+ ca]. */ 2132 if (NARROW_MODE(ctx)) { 2133 /* 2134 * Caution: a non-obvious corner case of the spec is that 2135 * we must produce the *entire* 64-bit addition, but 2136 * produce the carry into bit 32. 2137 */ 2138 TCGv inv1 = tcg_temp_new(); 2139 TCGv t1 = tcg_temp_new(); 2140 tcg_gen_not_tl(inv1, arg1); 2141 if (add_ca) { 2142 tcg_gen_add_tl(t0, arg2, cpu_ca); 2143 } else { 2144 tcg_gen_addi_tl(t0, arg2, 1); 2145 } 2146 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 2147 tcg_gen_add_tl(t0, t0, inv1); 2148 tcg_temp_free(inv1); 2149 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 2150 tcg_temp_free(t1); 2151 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 2152 if (is_isa300(ctx)) { 2153 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2154 } 2155 } else if (add_ca) { 2156 TCGv zero, inv1 = tcg_temp_new(); 2157 tcg_gen_not_tl(inv1, arg1); 2158 zero = tcg_const_tl(0); 2159 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 2160 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 2161 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 2162 tcg_temp_free(zero); 2163 tcg_temp_free(inv1); 2164 } else { 2165 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 2166 tcg_gen_sub_tl(t0, arg2, arg1); 2167 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 2168 } 2169 } else if (add_ca) { 2170 /* 2171 * Since we're ignoring carry-out, we can simplify the 2172 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. 2173 */ 2174 tcg_gen_sub_tl(t0, arg2, arg1); 2175 tcg_gen_add_tl(t0, t0, cpu_ca); 2176 tcg_gen_subi_tl(t0, t0, 1); 2177 } else { 2178 tcg_gen_sub_tl(t0, arg2, arg1); 2179 } 2180 2181 if (compute_ov) { 2182 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 2183 } 2184 if (unlikely(compute_rc0)) { 2185 gen_set_Rc0(ctx, t0); 2186 } 2187 2188 if (t0 != ret) { 2189 tcg_gen_mov_tl(ret, t0); 2190 tcg_temp_free(t0); 2191 } 2192 } 2193 /* Sub functions with Two operands functions */ 2194 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 2195 static void glue(gen_, name)(DisasContext *ctx) \ 2196 { \ 2197 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2198 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 2199 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2200 } 2201 /* Sub functions with one operand and one immediate */ 2202 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 2203 add_ca, compute_ca, compute_ov) \ 2204 static void glue(gen_, name)(DisasContext *ctx) \ 2205 { \ 2206 TCGv t0 = tcg_const_tl(const_val); \ 2207 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2208 cpu_gpr[rA(ctx->opcode)], t0, \ 2209 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2210 tcg_temp_free(t0); \ 2211 } 2212 /* subf subf. subfo subfo. */ 2213 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 2214 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 2215 /* subfc subfc. subfco subfco. */ 2216 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 2217 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 2218 /* subfe subfe. subfeo subfo. */ 2219 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 2220 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 2221 /* subfme subfme. subfmeo subfmeo. */ 2222 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 2223 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 2224 /* subfze subfze. subfzeo subfzeo.*/ 2225 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 2226 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 2227 2228 /* subfic */ 2229 static void gen_subfic(DisasContext *ctx) 2230 { 2231 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 2232 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2233 c, 0, 1, 0, 0); 2234 tcg_temp_free(c); 2235 } 2236 2237 /* neg neg. nego nego. */ 2238 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 2239 { 2240 TCGv zero = tcg_const_tl(0); 2241 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2242 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 2243 tcg_temp_free(zero); 2244 } 2245 2246 static void gen_neg(DisasContext *ctx) 2247 { 2248 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2249 if (unlikely(Rc(ctx->opcode))) { 2250 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2251 } 2252 } 2253 2254 static void gen_nego(DisasContext *ctx) 2255 { 2256 gen_op_arith_neg(ctx, 1); 2257 } 2258 2259 /*** Integer logical ***/ 2260 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 2261 static void glue(gen_, name)(DisasContext *ctx) \ 2262 { \ 2263 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 2264 cpu_gpr[rB(ctx->opcode)]); \ 2265 if (unlikely(Rc(ctx->opcode) != 0)) \ 2266 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2267 } 2268 2269 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 2270 static void glue(gen_, name)(DisasContext *ctx) \ 2271 { \ 2272 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 2273 if (unlikely(Rc(ctx->opcode) != 0)) \ 2274 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2275 } 2276 2277 /* and & and. */ 2278 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 2279 /* andc & andc. */ 2280 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 2281 2282 /* andi. */ 2283 static void gen_andi_(DisasContext *ctx) 2284 { 2285 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2286 UIMM(ctx->opcode)); 2287 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2288 } 2289 2290 /* andis. */ 2291 static void gen_andis_(DisasContext *ctx) 2292 { 2293 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2294 UIMM(ctx->opcode) << 16); 2295 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2296 } 2297 2298 /* cntlzw */ 2299 static void gen_cntlzw(DisasContext *ctx) 2300 { 2301 TCGv_i32 t = tcg_temp_new_i32(); 2302 2303 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2304 tcg_gen_clzi_i32(t, t, 32); 2305 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2306 tcg_temp_free_i32(t); 2307 2308 if (unlikely(Rc(ctx->opcode) != 0)) { 2309 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2310 } 2311 } 2312 2313 /* cnttzw */ 2314 static void gen_cnttzw(DisasContext *ctx) 2315 { 2316 TCGv_i32 t = tcg_temp_new_i32(); 2317 2318 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2319 tcg_gen_ctzi_i32(t, t, 32); 2320 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2321 tcg_temp_free_i32(t); 2322 2323 if (unlikely(Rc(ctx->opcode) != 0)) { 2324 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2325 } 2326 } 2327 2328 /* eqv & eqv. */ 2329 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 2330 /* extsb & extsb. */ 2331 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 2332 /* extsh & extsh. */ 2333 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 2334 /* nand & nand. */ 2335 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 2336 /* nor & nor. */ 2337 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 2338 2339 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 2340 static void gen_pause(DisasContext *ctx) 2341 { 2342 TCGv_i32 t0 = tcg_const_i32(0); 2343 tcg_gen_st_i32(t0, cpu_env, 2344 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 2345 tcg_temp_free_i32(t0); 2346 2347 /* Stop translation, this gives other CPUs a chance to run */ 2348 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 2349 } 2350 #endif /* defined(TARGET_PPC64) */ 2351 2352 /* or & or. */ 2353 static void gen_or(DisasContext *ctx) 2354 { 2355 int rs, ra, rb; 2356 2357 rs = rS(ctx->opcode); 2358 ra = rA(ctx->opcode); 2359 rb = rB(ctx->opcode); 2360 /* Optimisation for mr. ri case */ 2361 if (rs != ra || rs != rb) { 2362 if (rs != rb) { 2363 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 2364 } else { 2365 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 2366 } 2367 if (unlikely(Rc(ctx->opcode) != 0)) { 2368 gen_set_Rc0(ctx, cpu_gpr[ra]); 2369 } 2370 } else if (unlikely(Rc(ctx->opcode) != 0)) { 2371 gen_set_Rc0(ctx, cpu_gpr[rs]); 2372 #if defined(TARGET_PPC64) 2373 } else if (rs != 0) { /* 0 is nop */ 2374 int prio = 0; 2375 2376 switch (rs) { 2377 case 1: 2378 /* Set process priority to low */ 2379 prio = 2; 2380 break; 2381 case 6: 2382 /* Set process priority to medium-low */ 2383 prio = 3; 2384 break; 2385 case 2: 2386 /* Set process priority to normal */ 2387 prio = 4; 2388 break; 2389 #if !defined(CONFIG_USER_ONLY) 2390 case 31: 2391 if (!ctx->pr) { 2392 /* Set process priority to very low */ 2393 prio = 1; 2394 } 2395 break; 2396 case 5: 2397 if (!ctx->pr) { 2398 /* Set process priority to medium-hight */ 2399 prio = 5; 2400 } 2401 break; 2402 case 3: 2403 if (!ctx->pr) { 2404 /* Set process priority to high */ 2405 prio = 6; 2406 } 2407 break; 2408 case 7: 2409 if (ctx->hv && !ctx->pr) { 2410 /* Set process priority to very high */ 2411 prio = 7; 2412 } 2413 break; 2414 #endif 2415 default: 2416 break; 2417 } 2418 if (prio) { 2419 TCGv t0 = tcg_temp_new(); 2420 gen_load_spr(t0, SPR_PPR); 2421 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 2422 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 2423 gen_store_spr(SPR_PPR, t0); 2424 tcg_temp_free(t0); 2425 } 2426 #if !defined(CONFIG_USER_ONLY) 2427 /* 2428 * Pause out of TCG otherwise spin loops with smt_low eat too 2429 * much CPU and the kernel hangs. This applies to all 2430 * encodings other than no-op, e.g., miso(rs=26), yield(27), 2431 * mdoio(29), mdoom(30), and all currently undefined. 2432 */ 2433 gen_pause(ctx); 2434 #endif 2435 #endif 2436 } 2437 } 2438 /* orc & orc. */ 2439 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 2440 2441 /* xor & xor. */ 2442 static void gen_xor(DisasContext *ctx) 2443 { 2444 /* Optimisation for "set to zero" case */ 2445 if (rS(ctx->opcode) != rB(ctx->opcode)) { 2446 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2447 cpu_gpr[rB(ctx->opcode)]); 2448 } else { 2449 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 2450 } 2451 if (unlikely(Rc(ctx->opcode) != 0)) { 2452 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2453 } 2454 } 2455 2456 /* ori */ 2457 static void gen_ori(DisasContext *ctx) 2458 { 2459 target_ulong uimm = UIMM(ctx->opcode); 2460 2461 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2462 return; 2463 } 2464 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2465 } 2466 2467 /* oris */ 2468 static void gen_oris(DisasContext *ctx) 2469 { 2470 target_ulong uimm = UIMM(ctx->opcode); 2471 2472 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2473 /* NOP */ 2474 return; 2475 } 2476 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2477 uimm << 16); 2478 } 2479 2480 /* xori */ 2481 static void gen_xori(DisasContext *ctx) 2482 { 2483 target_ulong uimm = UIMM(ctx->opcode); 2484 2485 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2486 /* NOP */ 2487 return; 2488 } 2489 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2490 } 2491 2492 /* xoris */ 2493 static void gen_xoris(DisasContext *ctx) 2494 { 2495 target_ulong uimm = UIMM(ctx->opcode); 2496 2497 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2498 /* NOP */ 2499 return; 2500 } 2501 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2502 uimm << 16); 2503 } 2504 2505 /* popcntb : PowerPC 2.03 specification */ 2506 static void gen_popcntb(DisasContext *ctx) 2507 { 2508 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2509 } 2510 2511 static void gen_popcntw(DisasContext *ctx) 2512 { 2513 #if defined(TARGET_PPC64) 2514 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2515 #else 2516 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2517 #endif 2518 } 2519 2520 #if defined(TARGET_PPC64) 2521 /* popcntd: PowerPC 2.06 specification */ 2522 static void gen_popcntd(DisasContext *ctx) 2523 { 2524 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2525 } 2526 #endif 2527 2528 /* prtyw: PowerPC 2.05 specification */ 2529 static void gen_prtyw(DisasContext *ctx) 2530 { 2531 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2532 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2533 TCGv t0 = tcg_temp_new(); 2534 tcg_gen_shri_tl(t0, rs, 16); 2535 tcg_gen_xor_tl(ra, rs, t0); 2536 tcg_gen_shri_tl(t0, ra, 8); 2537 tcg_gen_xor_tl(ra, ra, t0); 2538 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 2539 tcg_temp_free(t0); 2540 } 2541 2542 #if defined(TARGET_PPC64) 2543 /* prtyd: PowerPC 2.05 specification */ 2544 static void gen_prtyd(DisasContext *ctx) 2545 { 2546 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2547 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2548 TCGv t0 = tcg_temp_new(); 2549 tcg_gen_shri_tl(t0, rs, 32); 2550 tcg_gen_xor_tl(ra, rs, t0); 2551 tcg_gen_shri_tl(t0, ra, 16); 2552 tcg_gen_xor_tl(ra, ra, t0); 2553 tcg_gen_shri_tl(t0, ra, 8); 2554 tcg_gen_xor_tl(ra, ra, t0); 2555 tcg_gen_andi_tl(ra, ra, 1); 2556 tcg_temp_free(t0); 2557 } 2558 #endif 2559 2560 #if defined(TARGET_PPC64) 2561 /* bpermd */ 2562 static void gen_bpermd(DisasContext *ctx) 2563 { 2564 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 2565 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2566 } 2567 #endif 2568 2569 #if defined(TARGET_PPC64) 2570 /* extsw & extsw. */ 2571 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 2572 2573 /* cntlzd */ 2574 static void gen_cntlzd(DisasContext *ctx) 2575 { 2576 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2577 if (unlikely(Rc(ctx->opcode) != 0)) { 2578 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2579 } 2580 } 2581 2582 /* cnttzd */ 2583 static void gen_cnttzd(DisasContext *ctx) 2584 { 2585 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2586 if (unlikely(Rc(ctx->opcode) != 0)) { 2587 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2588 } 2589 } 2590 2591 /* darn */ 2592 static void gen_darn(DisasContext *ctx) 2593 { 2594 int l = L(ctx->opcode); 2595 2596 if (l > 2) { 2597 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 2598 } else { 2599 gen_icount_io_start(ctx); 2600 if (l == 0) { 2601 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 2602 } else { 2603 /* Return 64-bit random for both CRN and RRN */ 2604 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 2605 } 2606 } 2607 } 2608 #endif 2609 2610 /*** Integer rotate ***/ 2611 2612 /* rlwimi & rlwimi. */ 2613 static void gen_rlwimi(DisasContext *ctx) 2614 { 2615 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2616 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2617 uint32_t sh = SH(ctx->opcode); 2618 uint32_t mb = MB(ctx->opcode); 2619 uint32_t me = ME(ctx->opcode); 2620 2621 if (sh == (31 - me) && mb <= me) { 2622 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2623 } else { 2624 target_ulong mask; 2625 bool mask_in_32b = true; 2626 TCGv t1; 2627 2628 #if defined(TARGET_PPC64) 2629 mb += 32; 2630 me += 32; 2631 #endif 2632 mask = MASK(mb, me); 2633 2634 #if defined(TARGET_PPC64) 2635 if (mask > 0xffffffffu) { 2636 mask_in_32b = false; 2637 } 2638 #endif 2639 t1 = tcg_temp_new(); 2640 if (mask_in_32b) { 2641 TCGv_i32 t0 = tcg_temp_new_i32(); 2642 tcg_gen_trunc_tl_i32(t0, t_rs); 2643 tcg_gen_rotli_i32(t0, t0, sh); 2644 tcg_gen_extu_i32_tl(t1, t0); 2645 tcg_temp_free_i32(t0); 2646 } else { 2647 #if defined(TARGET_PPC64) 2648 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 2649 tcg_gen_rotli_i64(t1, t1, sh); 2650 #else 2651 g_assert_not_reached(); 2652 #endif 2653 } 2654 2655 tcg_gen_andi_tl(t1, t1, mask); 2656 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2657 tcg_gen_or_tl(t_ra, t_ra, t1); 2658 tcg_temp_free(t1); 2659 } 2660 if (unlikely(Rc(ctx->opcode) != 0)) { 2661 gen_set_Rc0(ctx, t_ra); 2662 } 2663 } 2664 2665 /* rlwinm & rlwinm. */ 2666 static void gen_rlwinm(DisasContext *ctx) 2667 { 2668 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2669 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2670 int sh = SH(ctx->opcode); 2671 int mb = MB(ctx->opcode); 2672 int me = ME(ctx->opcode); 2673 int len = me - mb + 1; 2674 int rsh = (32 - sh) & 31; 2675 2676 if (sh != 0 && len > 0 && me == (31 - sh)) { 2677 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2678 } else if (me == 31 && rsh + len <= 32) { 2679 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2680 } else { 2681 target_ulong mask; 2682 bool mask_in_32b = true; 2683 #if defined(TARGET_PPC64) 2684 mb += 32; 2685 me += 32; 2686 #endif 2687 mask = MASK(mb, me); 2688 #if defined(TARGET_PPC64) 2689 if (mask > 0xffffffffu) { 2690 mask_in_32b = false; 2691 } 2692 #endif 2693 if (mask_in_32b) { 2694 if (sh == 0) { 2695 tcg_gen_andi_tl(t_ra, t_rs, mask); 2696 } else { 2697 TCGv_i32 t0 = tcg_temp_new_i32(); 2698 tcg_gen_trunc_tl_i32(t0, t_rs); 2699 tcg_gen_rotli_i32(t0, t0, sh); 2700 tcg_gen_andi_i32(t0, t0, mask); 2701 tcg_gen_extu_i32_tl(t_ra, t0); 2702 tcg_temp_free_i32(t0); 2703 } 2704 } else { 2705 #if defined(TARGET_PPC64) 2706 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2707 tcg_gen_rotli_i64(t_ra, t_ra, sh); 2708 tcg_gen_andi_i64(t_ra, t_ra, mask); 2709 #else 2710 g_assert_not_reached(); 2711 #endif 2712 } 2713 } 2714 if (unlikely(Rc(ctx->opcode) != 0)) { 2715 gen_set_Rc0(ctx, t_ra); 2716 } 2717 } 2718 2719 /* rlwnm & rlwnm. */ 2720 static void gen_rlwnm(DisasContext *ctx) 2721 { 2722 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2723 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2724 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2725 uint32_t mb = MB(ctx->opcode); 2726 uint32_t me = ME(ctx->opcode); 2727 target_ulong mask; 2728 bool mask_in_32b = true; 2729 2730 #if defined(TARGET_PPC64) 2731 mb += 32; 2732 me += 32; 2733 #endif 2734 mask = MASK(mb, me); 2735 2736 #if defined(TARGET_PPC64) 2737 if (mask > 0xffffffffu) { 2738 mask_in_32b = false; 2739 } 2740 #endif 2741 if (mask_in_32b) { 2742 TCGv_i32 t0 = tcg_temp_new_i32(); 2743 TCGv_i32 t1 = tcg_temp_new_i32(); 2744 tcg_gen_trunc_tl_i32(t0, t_rb); 2745 tcg_gen_trunc_tl_i32(t1, t_rs); 2746 tcg_gen_andi_i32(t0, t0, 0x1f); 2747 tcg_gen_rotl_i32(t1, t1, t0); 2748 tcg_gen_extu_i32_tl(t_ra, t1); 2749 tcg_temp_free_i32(t0); 2750 tcg_temp_free_i32(t1); 2751 } else { 2752 #if defined(TARGET_PPC64) 2753 TCGv_i64 t0 = tcg_temp_new_i64(); 2754 tcg_gen_andi_i64(t0, t_rb, 0x1f); 2755 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2756 tcg_gen_rotl_i64(t_ra, t_ra, t0); 2757 tcg_temp_free_i64(t0); 2758 #else 2759 g_assert_not_reached(); 2760 #endif 2761 } 2762 2763 tcg_gen_andi_tl(t_ra, t_ra, mask); 2764 2765 if (unlikely(Rc(ctx->opcode) != 0)) { 2766 gen_set_Rc0(ctx, t_ra); 2767 } 2768 } 2769 2770 #if defined(TARGET_PPC64) 2771 #define GEN_PPC64_R2(name, opc1, opc2) \ 2772 static void glue(gen_, name##0)(DisasContext *ctx) \ 2773 { \ 2774 gen_##name(ctx, 0); \ 2775 } \ 2776 \ 2777 static void glue(gen_, name##1)(DisasContext *ctx) \ 2778 { \ 2779 gen_##name(ctx, 1); \ 2780 } 2781 #define GEN_PPC64_R4(name, opc1, opc2) \ 2782 static void glue(gen_, name##0)(DisasContext *ctx) \ 2783 { \ 2784 gen_##name(ctx, 0, 0); \ 2785 } \ 2786 \ 2787 static void glue(gen_, name##1)(DisasContext *ctx) \ 2788 { \ 2789 gen_##name(ctx, 0, 1); \ 2790 } \ 2791 \ 2792 static void glue(gen_, name##2)(DisasContext *ctx) \ 2793 { \ 2794 gen_##name(ctx, 1, 0); \ 2795 } \ 2796 \ 2797 static void glue(gen_, name##3)(DisasContext *ctx) \ 2798 { \ 2799 gen_##name(ctx, 1, 1); \ 2800 } 2801 2802 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2803 { 2804 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2805 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2806 int len = me - mb + 1; 2807 int rsh = (64 - sh) & 63; 2808 2809 if (sh != 0 && len > 0 && me == (63 - sh)) { 2810 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2811 } else if (me == 63 && rsh + len <= 64) { 2812 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2813 } else { 2814 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2815 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2816 } 2817 if (unlikely(Rc(ctx->opcode) != 0)) { 2818 gen_set_Rc0(ctx, t_ra); 2819 } 2820 } 2821 2822 /* rldicl - rldicl. */ 2823 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2824 { 2825 uint32_t sh, mb; 2826 2827 sh = SH(ctx->opcode) | (shn << 5); 2828 mb = MB(ctx->opcode) | (mbn << 5); 2829 gen_rldinm(ctx, mb, 63, sh); 2830 } 2831 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2832 2833 /* rldicr - rldicr. */ 2834 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2835 { 2836 uint32_t sh, me; 2837 2838 sh = SH(ctx->opcode) | (shn << 5); 2839 me = MB(ctx->opcode) | (men << 5); 2840 gen_rldinm(ctx, 0, me, sh); 2841 } 2842 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2843 2844 /* rldic - rldic. */ 2845 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2846 { 2847 uint32_t sh, mb; 2848 2849 sh = SH(ctx->opcode) | (shn << 5); 2850 mb = MB(ctx->opcode) | (mbn << 5); 2851 gen_rldinm(ctx, mb, 63 - sh, sh); 2852 } 2853 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2854 2855 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2856 { 2857 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2858 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2859 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2860 TCGv t0; 2861 2862 t0 = tcg_temp_new(); 2863 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2864 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2865 tcg_temp_free(t0); 2866 2867 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2868 if (unlikely(Rc(ctx->opcode) != 0)) { 2869 gen_set_Rc0(ctx, t_ra); 2870 } 2871 } 2872 2873 /* rldcl - rldcl. */ 2874 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2875 { 2876 uint32_t mb; 2877 2878 mb = MB(ctx->opcode) | (mbn << 5); 2879 gen_rldnm(ctx, mb, 63); 2880 } 2881 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2882 2883 /* rldcr - rldcr. */ 2884 static inline void gen_rldcr(DisasContext *ctx, int men) 2885 { 2886 uint32_t me; 2887 2888 me = MB(ctx->opcode) | (men << 5); 2889 gen_rldnm(ctx, 0, me); 2890 } 2891 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2892 2893 /* rldimi - rldimi. */ 2894 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2895 { 2896 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2897 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2898 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2899 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2900 uint32_t me = 63 - sh; 2901 2902 if (mb <= me) { 2903 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2904 } else { 2905 target_ulong mask = MASK(mb, me); 2906 TCGv t1 = tcg_temp_new(); 2907 2908 tcg_gen_rotli_tl(t1, t_rs, sh); 2909 tcg_gen_andi_tl(t1, t1, mask); 2910 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2911 tcg_gen_or_tl(t_ra, t_ra, t1); 2912 tcg_temp_free(t1); 2913 } 2914 if (unlikely(Rc(ctx->opcode) != 0)) { 2915 gen_set_Rc0(ctx, t_ra); 2916 } 2917 } 2918 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2919 #endif 2920 2921 /*** Integer shift ***/ 2922 2923 /* slw & slw. */ 2924 static void gen_slw(DisasContext *ctx) 2925 { 2926 TCGv t0, t1; 2927 2928 t0 = tcg_temp_new(); 2929 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2930 #if defined(TARGET_PPC64) 2931 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2932 tcg_gen_sari_tl(t0, t0, 0x3f); 2933 #else 2934 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2935 tcg_gen_sari_tl(t0, t0, 0x1f); 2936 #endif 2937 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2938 t1 = tcg_temp_new(); 2939 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2940 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2941 tcg_temp_free(t1); 2942 tcg_temp_free(t0); 2943 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2944 if (unlikely(Rc(ctx->opcode) != 0)) { 2945 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2946 } 2947 } 2948 2949 /* sraw & sraw. */ 2950 static void gen_sraw(DisasContext *ctx) 2951 { 2952 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2953 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2954 if (unlikely(Rc(ctx->opcode) != 0)) { 2955 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2956 } 2957 } 2958 2959 /* srawi & srawi. */ 2960 static void gen_srawi(DisasContext *ctx) 2961 { 2962 int sh = SH(ctx->opcode); 2963 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2964 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2965 if (sh == 0) { 2966 tcg_gen_ext32s_tl(dst, src); 2967 tcg_gen_movi_tl(cpu_ca, 0); 2968 if (is_isa300(ctx)) { 2969 tcg_gen_movi_tl(cpu_ca32, 0); 2970 } 2971 } else { 2972 TCGv t0; 2973 tcg_gen_ext32s_tl(dst, src); 2974 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2975 t0 = tcg_temp_new(); 2976 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2977 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2978 tcg_temp_free(t0); 2979 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2980 if (is_isa300(ctx)) { 2981 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2982 } 2983 tcg_gen_sari_tl(dst, dst, sh); 2984 } 2985 if (unlikely(Rc(ctx->opcode) != 0)) { 2986 gen_set_Rc0(ctx, dst); 2987 } 2988 } 2989 2990 /* srw & srw. */ 2991 static void gen_srw(DisasContext *ctx) 2992 { 2993 TCGv t0, t1; 2994 2995 t0 = tcg_temp_new(); 2996 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2997 #if defined(TARGET_PPC64) 2998 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2999 tcg_gen_sari_tl(t0, t0, 0x3f); 3000 #else 3001 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 3002 tcg_gen_sari_tl(t0, t0, 0x1f); 3003 #endif 3004 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3005 tcg_gen_ext32u_tl(t0, t0); 3006 t1 = tcg_temp_new(); 3007 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 3008 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3009 tcg_temp_free(t1); 3010 tcg_temp_free(t0); 3011 if (unlikely(Rc(ctx->opcode) != 0)) { 3012 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3013 } 3014 } 3015 3016 #if defined(TARGET_PPC64) 3017 /* sld & sld. */ 3018 static void gen_sld(DisasContext *ctx) 3019 { 3020 TCGv t0, t1; 3021 3022 t0 = tcg_temp_new(); 3023 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3024 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3025 tcg_gen_sari_tl(t0, t0, 0x3f); 3026 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3027 t1 = tcg_temp_new(); 3028 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3029 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3030 tcg_temp_free(t1); 3031 tcg_temp_free(t0); 3032 if (unlikely(Rc(ctx->opcode) != 0)) { 3033 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3034 } 3035 } 3036 3037 /* srad & srad. */ 3038 static void gen_srad(DisasContext *ctx) 3039 { 3040 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 3041 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3042 if (unlikely(Rc(ctx->opcode) != 0)) { 3043 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3044 } 3045 } 3046 /* sradi & sradi. */ 3047 static inline void gen_sradi(DisasContext *ctx, int n) 3048 { 3049 int sh = SH(ctx->opcode) + (n << 5); 3050 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3051 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3052 if (sh == 0) { 3053 tcg_gen_mov_tl(dst, src); 3054 tcg_gen_movi_tl(cpu_ca, 0); 3055 if (is_isa300(ctx)) { 3056 tcg_gen_movi_tl(cpu_ca32, 0); 3057 } 3058 } else { 3059 TCGv t0; 3060 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 3061 t0 = tcg_temp_new(); 3062 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 3063 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 3064 tcg_temp_free(t0); 3065 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 3066 if (is_isa300(ctx)) { 3067 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 3068 } 3069 tcg_gen_sari_tl(dst, src, sh); 3070 } 3071 if (unlikely(Rc(ctx->opcode) != 0)) { 3072 gen_set_Rc0(ctx, dst); 3073 } 3074 } 3075 3076 static void gen_sradi0(DisasContext *ctx) 3077 { 3078 gen_sradi(ctx, 0); 3079 } 3080 3081 static void gen_sradi1(DisasContext *ctx) 3082 { 3083 gen_sradi(ctx, 1); 3084 } 3085 3086 /* extswsli & extswsli. */ 3087 static inline void gen_extswsli(DisasContext *ctx, int n) 3088 { 3089 int sh = SH(ctx->opcode) + (n << 5); 3090 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3091 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3092 3093 tcg_gen_ext32s_tl(dst, src); 3094 tcg_gen_shli_tl(dst, dst, sh); 3095 if (unlikely(Rc(ctx->opcode) != 0)) { 3096 gen_set_Rc0(ctx, dst); 3097 } 3098 } 3099 3100 static void gen_extswsli0(DisasContext *ctx) 3101 { 3102 gen_extswsli(ctx, 0); 3103 } 3104 3105 static void gen_extswsli1(DisasContext *ctx) 3106 { 3107 gen_extswsli(ctx, 1); 3108 } 3109 3110 /* srd & srd. */ 3111 static void gen_srd(DisasContext *ctx) 3112 { 3113 TCGv t0, t1; 3114 3115 t0 = tcg_temp_new(); 3116 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3117 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3118 tcg_gen_sari_tl(t0, t0, 0x3f); 3119 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3120 t1 = tcg_temp_new(); 3121 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3122 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3123 tcg_temp_free(t1); 3124 tcg_temp_free(t0); 3125 if (unlikely(Rc(ctx->opcode) != 0)) { 3126 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3127 } 3128 } 3129 #endif 3130 3131 /*** Addressing modes ***/ 3132 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 3133 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 3134 target_long maskl) 3135 { 3136 target_long simm = SIMM(ctx->opcode); 3137 3138 simm &= ~maskl; 3139 if (rA(ctx->opcode) == 0) { 3140 if (NARROW_MODE(ctx)) { 3141 simm = (uint32_t)simm; 3142 } 3143 tcg_gen_movi_tl(EA, simm); 3144 } else if (likely(simm != 0)) { 3145 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 3146 if (NARROW_MODE(ctx)) { 3147 tcg_gen_ext32u_tl(EA, EA); 3148 } 3149 } else { 3150 if (NARROW_MODE(ctx)) { 3151 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3152 } else { 3153 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3154 } 3155 } 3156 } 3157 3158 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 3159 { 3160 if (rA(ctx->opcode) == 0) { 3161 if (NARROW_MODE(ctx)) { 3162 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3163 } else { 3164 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3165 } 3166 } else { 3167 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3168 if (NARROW_MODE(ctx)) { 3169 tcg_gen_ext32u_tl(EA, EA); 3170 } 3171 } 3172 } 3173 3174 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 3175 { 3176 if (rA(ctx->opcode) == 0) { 3177 tcg_gen_movi_tl(EA, 0); 3178 } else if (NARROW_MODE(ctx)) { 3179 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3180 } else { 3181 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3182 } 3183 } 3184 3185 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 3186 target_long val) 3187 { 3188 tcg_gen_addi_tl(ret, arg1, val); 3189 if (NARROW_MODE(ctx)) { 3190 tcg_gen_ext32u_tl(ret, ret); 3191 } 3192 } 3193 3194 static inline void gen_align_no_le(DisasContext *ctx) 3195 { 3196 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 3197 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 3198 } 3199 3200 /*** Integer load ***/ 3201 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 3202 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 3203 3204 #define GEN_QEMU_LOAD_TL(ldop, op) \ 3205 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 3206 TCGv val, \ 3207 TCGv addr) \ 3208 { \ 3209 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 3210 } 3211 3212 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 3213 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 3214 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 3215 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 3216 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 3217 3218 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 3219 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 3220 3221 #define GEN_QEMU_LOAD_64(ldop, op) \ 3222 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 3223 TCGv_i64 val, \ 3224 TCGv addr) \ 3225 { \ 3226 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 3227 } 3228 3229 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 3230 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 3231 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 3232 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 3233 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 3234 3235 #if defined(TARGET_PPC64) 3236 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 3237 #endif 3238 3239 #define GEN_QEMU_STORE_TL(stop, op) \ 3240 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 3241 TCGv val, \ 3242 TCGv addr) \ 3243 { \ 3244 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 3245 } 3246 3247 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY) 3248 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 3249 #endif 3250 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 3251 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 3252 3253 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 3254 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 3255 3256 #define GEN_QEMU_STORE_64(stop, op) \ 3257 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 3258 TCGv_i64 val, \ 3259 TCGv addr) \ 3260 { \ 3261 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 3262 } 3263 3264 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 3265 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 3266 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 3267 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 3268 3269 #if defined(TARGET_PPC64) 3270 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 3271 #endif 3272 3273 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 3274 static void glue(gen_, name##x)(DisasContext *ctx) \ 3275 { \ 3276 TCGv EA; \ 3277 chk; \ 3278 gen_set_access_type(ctx, ACCESS_INT); \ 3279 EA = tcg_temp_new(); \ 3280 gen_addr_reg_index(ctx, EA); \ 3281 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 3282 tcg_temp_free(EA); \ 3283 } 3284 3285 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 3286 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3287 3288 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 3289 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3290 3291 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 3292 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3293 { \ 3294 TCGv EA; \ 3295 CHK_SV; \ 3296 gen_set_access_type(ctx, ACCESS_INT); \ 3297 EA = tcg_temp_new(); \ 3298 gen_addr_reg_index(ctx, EA); \ 3299 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 3300 tcg_temp_free(EA); \ 3301 } 3302 3303 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 3304 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 3305 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 3306 #if defined(TARGET_PPC64) 3307 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 3308 #endif 3309 3310 #if defined(TARGET_PPC64) 3311 /* CI load/store variants */ 3312 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 3313 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 3314 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 3315 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 3316 3317 /* lq */ 3318 static void gen_lq(DisasContext *ctx) 3319 { 3320 int ra, rd; 3321 TCGv EA, hi, lo; 3322 3323 /* lq is a legal user mode instruction starting in ISA 2.07 */ 3324 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3325 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3326 3327 if (!legal_in_user_mode && ctx->pr) { 3328 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 3329 return; 3330 } 3331 3332 if (!le_is_supported && ctx->le_mode) { 3333 gen_align_no_le(ctx); 3334 return; 3335 } 3336 ra = rA(ctx->opcode); 3337 rd = rD(ctx->opcode); 3338 if (unlikely((rd & 1) || rd == ra)) { 3339 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3340 return; 3341 } 3342 3343 gen_set_access_type(ctx, ACCESS_INT); 3344 EA = tcg_temp_new(); 3345 gen_addr_imm_index(ctx, EA, 0x0F); 3346 3347 /* Note that the low part is always in RD+1, even in LE mode. */ 3348 lo = cpu_gpr[rd + 1]; 3349 hi = cpu_gpr[rd]; 3350 3351 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3352 if (HAVE_ATOMIC128) { 3353 TCGv_i32 oi = tcg_temp_new_i32(); 3354 if (ctx->le_mode) { 3355 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 3356 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3357 } else { 3358 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 3359 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3360 } 3361 tcg_temp_free_i32(oi); 3362 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3363 } else { 3364 /* Restart with exclusive lock. */ 3365 gen_helper_exit_atomic(cpu_env); 3366 ctx->base.is_jmp = DISAS_NORETURN; 3367 } 3368 } else if (ctx->le_mode) { 3369 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 3370 gen_addr_add(ctx, EA, EA, 8); 3371 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3372 } else { 3373 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 3374 gen_addr_add(ctx, EA, EA, 8); 3375 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3376 } 3377 tcg_temp_free(EA); 3378 } 3379 #endif 3380 3381 /*** Integer store ***/ 3382 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 3383 static void glue(gen_, name##x)(DisasContext *ctx) \ 3384 { \ 3385 TCGv EA; \ 3386 chk; \ 3387 gen_set_access_type(ctx, ACCESS_INT); \ 3388 EA = tcg_temp_new(); \ 3389 gen_addr_reg_index(ctx, EA); \ 3390 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 3391 tcg_temp_free(EA); \ 3392 } 3393 #define GEN_STX(name, stop, opc2, opc3, type) \ 3394 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3395 3396 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 3397 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3398 3399 #define GEN_STEPX(name, stop, opc2, opc3) \ 3400 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3401 { \ 3402 TCGv EA; \ 3403 CHK_SV; \ 3404 gen_set_access_type(ctx, ACCESS_INT); \ 3405 EA = tcg_temp_new(); \ 3406 gen_addr_reg_index(ctx, EA); \ 3407 tcg_gen_qemu_st_tl( \ 3408 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 3409 tcg_temp_free(EA); \ 3410 } 3411 3412 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 3413 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 3414 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 3415 #if defined(TARGET_PPC64) 3416 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04) 3417 #endif 3418 3419 #if defined(TARGET_PPC64) 3420 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 3421 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 3422 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 3423 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 3424 3425 static void gen_std(DisasContext *ctx) 3426 { 3427 int rs; 3428 TCGv EA; 3429 3430 rs = rS(ctx->opcode); 3431 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 3432 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3433 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3434 TCGv hi, lo; 3435 3436 if (!(ctx->insns_flags & PPC_64BX)) { 3437 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3438 } 3439 3440 if (!legal_in_user_mode && ctx->pr) { 3441 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 3442 return; 3443 } 3444 3445 if (!le_is_supported && ctx->le_mode) { 3446 gen_align_no_le(ctx); 3447 return; 3448 } 3449 3450 if (unlikely(rs & 1)) { 3451 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3452 return; 3453 } 3454 gen_set_access_type(ctx, ACCESS_INT); 3455 EA = tcg_temp_new(); 3456 gen_addr_imm_index(ctx, EA, 0x03); 3457 3458 /* Note that the low part is always in RS+1, even in LE mode. */ 3459 lo = cpu_gpr[rs + 1]; 3460 hi = cpu_gpr[rs]; 3461 3462 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3463 if (HAVE_ATOMIC128) { 3464 TCGv_i32 oi = tcg_temp_new_i32(); 3465 if (ctx->le_mode) { 3466 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128, 3467 ctx->mem_idx)); 3468 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 3469 } else { 3470 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128, 3471 ctx->mem_idx)); 3472 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 3473 } 3474 tcg_temp_free_i32(oi); 3475 } else { 3476 /* Restart with exclusive lock. */ 3477 gen_helper_exit_atomic(cpu_env); 3478 ctx->base.is_jmp = DISAS_NORETURN; 3479 } 3480 } else if (ctx->le_mode) { 3481 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 3482 gen_addr_add(ctx, EA, EA, 8); 3483 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3484 } else { 3485 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 3486 gen_addr_add(ctx, EA, EA, 8); 3487 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3488 } 3489 tcg_temp_free(EA); 3490 } else { 3491 /* std / stdu */ 3492 if (Rc(ctx->opcode)) { 3493 if (unlikely(rA(ctx->opcode) == 0)) { 3494 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3495 return; 3496 } 3497 } 3498 gen_set_access_type(ctx, ACCESS_INT); 3499 EA = tcg_temp_new(); 3500 gen_addr_imm_index(ctx, EA, 0x03); 3501 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 3502 if (Rc(ctx->opcode)) { 3503 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 3504 } 3505 tcg_temp_free(EA); 3506 } 3507 } 3508 #endif 3509 /*** Integer load and store with byte reverse ***/ 3510 3511 /* lhbrx */ 3512 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 3513 3514 /* lwbrx */ 3515 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 3516 3517 #if defined(TARGET_PPC64) 3518 /* ldbrx */ 3519 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 3520 /* stdbrx */ 3521 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 3522 #endif /* TARGET_PPC64 */ 3523 3524 /* sthbrx */ 3525 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 3526 /* stwbrx */ 3527 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 3528 3529 /*** Integer load and store multiple ***/ 3530 3531 /* lmw */ 3532 static void gen_lmw(DisasContext *ctx) 3533 { 3534 TCGv t0; 3535 TCGv_i32 t1; 3536 3537 if (ctx->le_mode) { 3538 gen_align_no_le(ctx); 3539 return; 3540 } 3541 gen_set_access_type(ctx, ACCESS_INT); 3542 t0 = tcg_temp_new(); 3543 t1 = tcg_const_i32(rD(ctx->opcode)); 3544 gen_addr_imm_index(ctx, t0, 0); 3545 gen_helper_lmw(cpu_env, t0, t1); 3546 tcg_temp_free(t0); 3547 tcg_temp_free_i32(t1); 3548 } 3549 3550 /* stmw */ 3551 static void gen_stmw(DisasContext *ctx) 3552 { 3553 TCGv t0; 3554 TCGv_i32 t1; 3555 3556 if (ctx->le_mode) { 3557 gen_align_no_le(ctx); 3558 return; 3559 } 3560 gen_set_access_type(ctx, ACCESS_INT); 3561 t0 = tcg_temp_new(); 3562 t1 = tcg_const_i32(rS(ctx->opcode)); 3563 gen_addr_imm_index(ctx, t0, 0); 3564 gen_helper_stmw(cpu_env, t0, t1); 3565 tcg_temp_free(t0); 3566 tcg_temp_free_i32(t1); 3567 } 3568 3569 /*** Integer load and store strings ***/ 3570 3571 /* lswi */ 3572 /* 3573 * PowerPC32 specification says we must generate an exception if rA is 3574 * in the range of registers to be loaded. In an other hand, IBM says 3575 * this is valid, but rA won't be loaded. For now, I'll follow the 3576 * spec... 3577 */ 3578 static void gen_lswi(DisasContext *ctx) 3579 { 3580 TCGv t0; 3581 TCGv_i32 t1, t2; 3582 int nb = NB(ctx->opcode); 3583 int start = rD(ctx->opcode); 3584 int ra = rA(ctx->opcode); 3585 int nr; 3586 3587 if (ctx->le_mode) { 3588 gen_align_no_le(ctx); 3589 return; 3590 } 3591 if (nb == 0) { 3592 nb = 32; 3593 } 3594 nr = DIV_ROUND_UP(nb, 4); 3595 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 3596 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 3597 return; 3598 } 3599 gen_set_access_type(ctx, ACCESS_INT); 3600 t0 = tcg_temp_new(); 3601 gen_addr_register(ctx, t0); 3602 t1 = tcg_const_i32(nb); 3603 t2 = tcg_const_i32(start); 3604 gen_helper_lsw(cpu_env, t0, t1, t2); 3605 tcg_temp_free(t0); 3606 tcg_temp_free_i32(t1); 3607 tcg_temp_free_i32(t2); 3608 } 3609 3610 /* lswx */ 3611 static void gen_lswx(DisasContext *ctx) 3612 { 3613 TCGv t0; 3614 TCGv_i32 t1, t2, t3; 3615 3616 if (ctx->le_mode) { 3617 gen_align_no_le(ctx); 3618 return; 3619 } 3620 gen_set_access_type(ctx, ACCESS_INT); 3621 t0 = tcg_temp_new(); 3622 gen_addr_reg_index(ctx, t0); 3623 t1 = tcg_const_i32(rD(ctx->opcode)); 3624 t2 = tcg_const_i32(rA(ctx->opcode)); 3625 t3 = tcg_const_i32(rB(ctx->opcode)); 3626 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 3627 tcg_temp_free(t0); 3628 tcg_temp_free_i32(t1); 3629 tcg_temp_free_i32(t2); 3630 tcg_temp_free_i32(t3); 3631 } 3632 3633 /* stswi */ 3634 static void gen_stswi(DisasContext *ctx) 3635 { 3636 TCGv t0; 3637 TCGv_i32 t1, t2; 3638 int nb = NB(ctx->opcode); 3639 3640 if (ctx->le_mode) { 3641 gen_align_no_le(ctx); 3642 return; 3643 } 3644 gen_set_access_type(ctx, ACCESS_INT); 3645 t0 = tcg_temp_new(); 3646 gen_addr_register(ctx, t0); 3647 if (nb == 0) { 3648 nb = 32; 3649 } 3650 t1 = tcg_const_i32(nb); 3651 t2 = tcg_const_i32(rS(ctx->opcode)); 3652 gen_helper_stsw(cpu_env, t0, t1, t2); 3653 tcg_temp_free(t0); 3654 tcg_temp_free_i32(t1); 3655 tcg_temp_free_i32(t2); 3656 } 3657 3658 /* stswx */ 3659 static void gen_stswx(DisasContext *ctx) 3660 { 3661 TCGv t0; 3662 TCGv_i32 t1, t2; 3663 3664 if (ctx->le_mode) { 3665 gen_align_no_le(ctx); 3666 return; 3667 } 3668 gen_set_access_type(ctx, ACCESS_INT); 3669 t0 = tcg_temp_new(); 3670 gen_addr_reg_index(ctx, t0); 3671 t1 = tcg_temp_new_i32(); 3672 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3673 tcg_gen_andi_i32(t1, t1, 0x7F); 3674 t2 = tcg_const_i32(rS(ctx->opcode)); 3675 gen_helper_stsw(cpu_env, t0, t1, t2); 3676 tcg_temp_free(t0); 3677 tcg_temp_free_i32(t1); 3678 tcg_temp_free_i32(t2); 3679 } 3680 3681 /*** Memory synchronisation ***/ 3682 /* eieio */ 3683 static void gen_eieio(DisasContext *ctx) 3684 { 3685 TCGBar bar = TCG_MO_LD_ST; 3686 3687 /* 3688 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3689 * tell the CPU it is a store-forwarding barrier. 3690 */ 3691 if (ctx->opcode & 0x2000000) { 3692 /* 3693 * ISA says that "Reserved fields in instructions are ignored 3694 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3695 * as this is not an instruction software should be using, 3696 * complain to the user. 3697 */ 3698 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3699 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3700 TARGET_FMT_lx "\n", ctx->cia); 3701 } else { 3702 bar = TCG_MO_ST_LD; 3703 } 3704 } 3705 3706 tcg_gen_mb(bar | TCG_BAR_SC); 3707 } 3708 3709 #if !defined(CONFIG_USER_ONLY) 3710 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3711 { 3712 TCGv_i32 t; 3713 TCGLabel *l; 3714 3715 if (!ctx->lazy_tlb_flush) { 3716 return; 3717 } 3718 l = gen_new_label(); 3719 t = tcg_temp_new_i32(); 3720 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3721 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3722 if (global) { 3723 gen_helper_check_tlb_flush_global(cpu_env); 3724 } else { 3725 gen_helper_check_tlb_flush_local(cpu_env); 3726 } 3727 gen_set_label(l); 3728 tcg_temp_free_i32(t); 3729 } 3730 #else 3731 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3732 #endif 3733 3734 /* isync */ 3735 static void gen_isync(DisasContext *ctx) 3736 { 3737 /* 3738 * We need to check for a pending TLB flush. This can only happen in 3739 * kernel mode however so check MSR_PR 3740 */ 3741 if (!ctx->pr) { 3742 gen_check_tlb_flush(ctx, false); 3743 } 3744 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3745 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 3746 } 3747 3748 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3749 3750 static void gen_load_locked(DisasContext *ctx, MemOp memop) 3751 { 3752 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3753 TCGv t0 = tcg_temp_new(); 3754 3755 gen_set_access_type(ctx, ACCESS_RES); 3756 gen_addr_reg_index(ctx, t0); 3757 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3758 tcg_gen_mov_tl(cpu_reserve, t0); 3759 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3760 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3761 tcg_temp_free(t0); 3762 } 3763 3764 #define LARX(name, memop) \ 3765 static void gen_##name(DisasContext *ctx) \ 3766 { \ 3767 gen_load_locked(ctx, memop); \ 3768 } 3769 3770 /* lwarx */ 3771 LARX(lbarx, DEF_MEMOP(MO_UB)) 3772 LARX(lharx, DEF_MEMOP(MO_UW)) 3773 LARX(lwarx, DEF_MEMOP(MO_UL)) 3774 3775 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop, 3776 TCGv EA, TCGCond cond, int addend) 3777 { 3778 TCGv t = tcg_temp_new(); 3779 TCGv t2 = tcg_temp_new(); 3780 TCGv u = tcg_temp_new(); 3781 3782 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3783 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3784 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3785 tcg_gen_addi_tl(u, t, addend); 3786 3787 /* E.g. for fetch and increment bounded... */ 3788 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3789 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3790 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3791 3792 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3793 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3794 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3795 3796 tcg_temp_free(t); 3797 tcg_temp_free(t2); 3798 tcg_temp_free(u); 3799 } 3800 3801 static void gen_ld_atomic(DisasContext *ctx, MemOp memop) 3802 { 3803 uint32_t gpr_FC = FC(ctx->opcode); 3804 TCGv EA = tcg_temp_new(); 3805 int rt = rD(ctx->opcode); 3806 bool need_serial; 3807 TCGv src, dst; 3808 3809 gen_addr_register(ctx, EA); 3810 dst = cpu_gpr[rt]; 3811 src = cpu_gpr[(rt + 1) & 31]; 3812 3813 need_serial = false; 3814 memop |= MO_ALIGN; 3815 switch (gpr_FC) { 3816 case 0: /* Fetch and add */ 3817 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3818 break; 3819 case 1: /* Fetch and xor */ 3820 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3821 break; 3822 case 2: /* Fetch and or */ 3823 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3824 break; 3825 case 3: /* Fetch and 'and' */ 3826 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3827 break; 3828 case 4: /* Fetch and max unsigned */ 3829 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3830 break; 3831 case 5: /* Fetch and max signed */ 3832 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3833 break; 3834 case 6: /* Fetch and min unsigned */ 3835 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3836 break; 3837 case 7: /* Fetch and min signed */ 3838 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3839 break; 3840 case 8: /* Swap */ 3841 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3842 break; 3843 3844 case 16: /* Compare and swap not equal */ 3845 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3846 need_serial = true; 3847 } else { 3848 TCGv t0 = tcg_temp_new(); 3849 TCGv t1 = tcg_temp_new(); 3850 3851 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3852 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3853 tcg_gen_mov_tl(t1, src); 3854 } else { 3855 tcg_gen_ext32u_tl(t1, src); 3856 } 3857 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3858 cpu_gpr[(rt + 2) & 31], t0); 3859 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3860 tcg_gen_mov_tl(dst, t0); 3861 3862 tcg_temp_free(t0); 3863 tcg_temp_free(t1); 3864 } 3865 break; 3866 3867 case 24: /* Fetch and increment bounded */ 3868 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3869 need_serial = true; 3870 } else { 3871 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3872 } 3873 break; 3874 case 25: /* Fetch and increment equal */ 3875 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3876 need_serial = true; 3877 } else { 3878 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3879 } 3880 break; 3881 case 28: /* Fetch and decrement bounded */ 3882 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3883 need_serial = true; 3884 } else { 3885 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3886 } 3887 break; 3888 3889 default: 3890 /* invoke data storage error handler */ 3891 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3892 } 3893 tcg_temp_free(EA); 3894 3895 if (need_serial) { 3896 /* Restart with exclusive lock. */ 3897 gen_helper_exit_atomic(cpu_env); 3898 ctx->base.is_jmp = DISAS_NORETURN; 3899 } 3900 } 3901 3902 static void gen_lwat(DisasContext *ctx) 3903 { 3904 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3905 } 3906 3907 #ifdef TARGET_PPC64 3908 static void gen_ldat(DisasContext *ctx) 3909 { 3910 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3911 } 3912 #endif 3913 3914 static void gen_st_atomic(DisasContext *ctx, MemOp memop) 3915 { 3916 uint32_t gpr_FC = FC(ctx->opcode); 3917 TCGv EA = tcg_temp_new(); 3918 TCGv src, discard; 3919 3920 gen_addr_register(ctx, EA); 3921 src = cpu_gpr[rD(ctx->opcode)]; 3922 discard = tcg_temp_new(); 3923 3924 memop |= MO_ALIGN; 3925 switch (gpr_FC) { 3926 case 0: /* add and Store */ 3927 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3928 break; 3929 case 1: /* xor and Store */ 3930 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3931 break; 3932 case 2: /* Or and Store */ 3933 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3934 break; 3935 case 3: /* 'and' and Store */ 3936 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3937 break; 3938 case 4: /* Store max unsigned */ 3939 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3940 break; 3941 case 5: /* Store max signed */ 3942 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3943 break; 3944 case 6: /* Store min unsigned */ 3945 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3946 break; 3947 case 7: /* Store min signed */ 3948 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3949 break; 3950 case 24: /* Store twin */ 3951 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3952 /* Restart with exclusive lock. */ 3953 gen_helper_exit_atomic(cpu_env); 3954 ctx->base.is_jmp = DISAS_NORETURN; 3955 } else { 3956 TCGv t = tcg_temp_new(); 3957 TCGv t2 = tcg_temp_new(); 3958 TCGv s = tcg_temp_new(); 3959 TCGv s2 = tcg_temp_new(); 3960 TCGv ea_plus_s = tcg_temp_new(); 3961 3962 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3963 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3964 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3965 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3966 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3967 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3968 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3969 3970 tcg_temp_free(ea_plus_s); 3971 tcg_temp_free(s2); 3972 tcg_temp_free(s); 3973 tcg_temp_free(t2); 3974 tcg_temp_free(t); 3975 } 3976 break; 3977 default: 3978 /* invoke data storage error handler */ 3979 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3980 } 3981 tcg_temp_free(discard); 3982 tcg_temp_free(EA); 3983 } 3984 3985 static void gen_stwat(DisasContext *ctx) 3986 { 3987 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3988 } 3989 3990 #ifdef TARGET_PPC64 3991 static void gen_stdat(DisasContext *ctx) 3992 { 3993 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3994 } 3995 #endif 3996 3997 static void gen_conditional_store(DisasContext *ctx, MemOp memop) 3998 { 3999 TCGLabel *l1 = gen_new_label(); 4000 TCGLabel *l2 = gen_new_label(); 4001 TCGv t0 = tcg_temp_new(); 4002 int reg = rS(ctx->opcode); 4003 4004 gen_set_access_type(ctx, ACCESS_RES); 4005 gen_addr_reg_index(ctx, t0); 4006 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 4007 tcg_temp_free(t0); 4008 4009 t0 = tcg_temp_new(); 4010 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 4011 cpu_gpr[reg], ctx->mem_idx, 4012 DEF_MEMOP(memop) | MO_ALIGN); 4013 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 4014 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 4015 tcg_gen_or_tl(t0, t0, cpu_so); 4016 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 4017 tcg_temp_free(t0); 4018 tcg_gen_br(l2); 4019 4020 gen_set_label(l1); 4021 4022 /* 4023 * Address mismatch implies failure. But we still need to provide 4024 * the memory barrier semantics of the instruction. 4025 */ 4026 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 4027 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4028 4029 gen_set_label(l2); 4030 tcg_gen_movi_tl(cpu_reserve, -1); 4031 } 4032 4033 #define STCX(name, memop) \ 4034 static void gen_##name(DisasContext *ctx) \ 4035 { \ 4036 gen_conditional_store(ctx, memop); \ 4037 } 4038 4039 STCX(stbcx_, DEF_MEMOP(MO_UB)) 4040 STCX(sthcx_, DEF_MEMOP(MO_UW)) 4041 STCX(stwcx_, DEF_MEMOP(MO_UL)) 4042 4043 #if defined(TARGET_PPC64) 4044 /* ldarx */ 4045 LARX(ldarx, DEF_MEMOP(MO_Q)) 4046 /* stdcx. */ 4047 STCX(stdcx_, DEF_MEMOP(MO_Q)) 4048 4049 /* lqarx */ 4050 static void gen_lqarx(DisasContext *ctx) 4051 { 4052 int rd = rD(ctx->opcode); 4053 TCGv EA, hi, lo; 4054 4055 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 4056 (rd == rB(ctx->opcode)))) { 4057 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4058 return; 4059 } 4060 4061 gen_set_access_type(ctx, ACCESS_RES); 4062 EA = tcg_temp_new(); 4063 gen_addr_reg_index(ctx, EA); 4064 4065 /* Note that the low part is always in RD+1, even in LE mode. */ 4066 lo = cpu_gpr[rd + 1]; 4067 hi = cpu_gpr[rd]; 4068 4069 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4070 if (HAVE_ATOMIC128) { 4071 TCGv_i32 oi = tcg_temp_new_i32(); 4072 if (ctx->le_mode) { 4073 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN, 4074 ctx->mem_idx)); 4075 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 4076 } else { 4077 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN, 4078 ctx->mem_idx)); 4079 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 4080 } 4081 tcg_temp_free_i32(oi); 4082 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 4083 } else { 4084 /* Restart with exclusive lock. */ 4085 gen_helper_exit_atomic(cpu_env); 4086 ctx->base.is_jmp = DISAS_NORETURN; 4087 tcg_temp_free(EA); 4088 return; 4089 } 4090 } else if (ctx->le_mode) { 4091 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 4092 tcg_gen_mov_tl(cpu_reserve, EA); 4093 gen_addr_add(ctx, EA, EA, 8); 4094 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 4095 } else { 4096 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 4097 tcg_gen_mov_tl(cpu_reserve, EA); 4098 gen_addr_add(ctx, EA, EA, 8); 4099 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 4100 } 4101 tcg_temp_free(EA); 4102 4103 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 4104 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 4105 } 4106 4107 /* stqcx. */ 4108 static void gen_stqcx_(DisasContext *ctx) 4109 { 4110 int rs = rS(ctx->opcode); 4111 TCGv EA, hi, lo; 4112 4113 if (unlikely(rs & 1)) { 4114 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4115 return; 4116 } 4117 4118 gen_set_access_type(ctx, ACCESS_RES); 4119 EA = tcg_temp_new(); 4120 gen_addr_reg_index(ctx, EA); 4121 4122 /* Note that the low part is always in RS+1, even in LE mode. */ 4123 lo = cpu_gpr[rs + 1]; 4124 hi = cpu_gpr[rs]; 4125 4126 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4127 if (HAVE_CMPXCHG128) { 4128 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_128) | MO_ALIGN); 4129 if (ctx->le_mode) { 4130 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 4131 EA, lo, hi, oi); 4132 } else { 4133 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 4134 EA, lo, hi, oi); 4135 } 4136 tcg_temp_free_i32(oi); 4137 } else { 4138 /* Restart with exclusive lock. */ 4139 gen_helper_exit_atomic(cpu_env); 4140 ctx->base.is_jmp = DISAS_NORETURN; 4141 } 4142 tcg_temp_free(EA); 4143 } else { 4144 TCGLabel *lab_fail = gen_new_label(); 4145 TCGLabel *lab_over = gen_new_label(); 4146 TCGv_i64 t0 = tcg_temp_new_i64(); 4147 TCGv_i64 t1 = tcg_temp_new_i64(); 4148 4149 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 4150 tcg_temp_free(EA); 4151 4152 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 4153 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4154 ? offsetof(CPUPPCState, reserve_val2) 4155 : offsetof(CPUPPCState, reserve_val))); 4156 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4157 4158 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4159 gen_qemu_ld64_i64(ctx, t0, t0); 4160 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4161 ? offsetof(CPUPPCState, reserve_val) 4162 : offsetof(CPUPPCState, reserve_val2))); 4163 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4164 4165 /* Success */ 4166 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 4167 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4168 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 4169 4170 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4171 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4172 tcg_gen_br(lab_over); 4173 4174 gen_set_label(lab_fail); 4175 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4176 4177 gen_set_label(lab_over); 4178 tcg_gen_movi_tl(cpu_reserve, -1); 4179 tcg_temp_free_i64(t0); 4180 tcg_temp_free_i64(t1); 4181 } 4182 } 4183 #endif /* defined(TARGET_PPC64) */ 4184 4185 /* sync */ 4186 static void gen_sync(DisasContext *ctx) 4187 { 4188 uint32_t l = (ctx->opcode >> 21) & 3; 4189 4190 /* 4191 * We may need to check for a pending TLB flush. 4192 * 4193 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 4194 * 4195 * Additionally, this can only happen in kernel mode however so 4196 * check MSR_PR as well. 4197 */ 4198 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 4199 gen_check_tlb_flush(ctx, true); 4200 } 4201 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 4202 } 4203 4204 /* wait */ 4205 static void gen_wait(DisasContext *ctx) 4206 { 4207 TCGv_i32 t0 = tcg_const_i32(1); 4208 tcg_gen_st_i32(t0, cpu_env, 4209 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 4210 tcg_temp_free_i32(t0); 4211 /* Stop translation, as the CPU is supposed to sleep from now */ 4212 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4213 } 4214 4215 #if defined(TARGET_PPC64) 4216 static void gen_doze(DisasContext *ctx) 4217 { 4218 #if defined(CONFIG_USER_ONLY) 4219 GEN_PRIV; 4220 #else 4221 TCGv_i32 t; 4222 4223 CHK_HV; 4224 t = tcg_const_i32(PPC_PM_DOZE); 4225 gen_helper_pminsn(cpu_env, t); 4226 tcg_temp_free_i32(t); 4227 /* Stop translation, as the CPU is supposed to sleep from now */ 4228 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4229 #endif /* defined(CONFIG_USER_ONLY) */ 4230 } 4231 4232 static void gen_nap(DisasContext *ctx) 4233 { 4234 #if defined(CONFIG_USER_ONLY) 4235 GEN_PRIV; 4236 #else 4237 TCGv_i32 t; 4238 4239 CHK_HV; 4240 t = tcg_const_i32(PPC_PM_NAP); 4241 gen_helper_pminsn(cpu_env, t); 4242 tcg_temp_free_i32(t); 4243 /* Stop translation, as the CPU is supposed to sleep from now */ 4244 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4245 #endif /* defined(CONFIG_USER_ONLY) */ 4246 } 4247 4248 static void gen_stop(DisasContext *ctx) 4249 { 4250 #if defined(CONFIG_USER_ONLY) 4251 GEN_PRIV; 4252 #else 4253 TCGv_i32 t; 4254 4255 CHK_HV; 4256 t = tcg_const_i32(PPC_PM_STOP); 4257 gen_helper_pminsn(cpu_env, t); 4258 tcg_temp_free_i32(t); 4259 /* Stop translation, as the CPU is supposed to sleep from now */ 4260 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4261 #endif /* defined(CONFIG_USER_ONLY) */ 4262 } 4263 4264 static void gen_sleep(DisasContext *ctx) 4265 { 4266 #if defined(CONFIG_USER_ONLY) 4267 GEN_PRIV; 4268 #else 4269 TCGv_i32 t; 4270 4271 CHK_HV; 4272 t = tcg_const_i32(PPC_PM_SLEEP); 4273 gen_helper_pminsn(cpu_env, t); 4274 tcg_temp_free_i32(t); 4275 /* Stop translation, as the CPU is supposed to sleep from now */ 4276 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4277 #endif /* defined(CONFIG_USER_ONLY) */ 4278 } 4279 4280 static void gen_rvwinkle(DisasContext *ctx) 4281 { 4282 #if defined(CONFIG_USER_ONLY) 4283 GEN_PRIV; 4284 #else 4285 TCGv_i32 t; 4286 4287 CHK_HV; 4288 t = tcg_const_i32(PPC_PM_RVWINKLE); 4289 gen_helper_pminsn(cpu_env, t); 4290 tcg_temp_free_i32(t); 4291 /* Stop translation, as the CPU is supposed to sleep from now */ 4292 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4293 #endif /* defined(CONFIG_USER_ONLY) */ 4294 } 4295 #endif /* #if defined(TARGET_PPC64) */ 4296 4297 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 4298 { 4299 #if defined(TARGET_PPC64) 4300 if (ctx->has_cfar) { 4301 tcg_gen_movi_tl(cpu_cfar, nip); 4302 } 4303 #endif 4304 } 4305 4306 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 4307 { 4308 return translator_use_goto_tb(&ctx->base, dest); 4309 } 4310 4311 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 4312 { 4313 if (unlikely(ctx->singlestep_enabled)) { 4314 gen_debug_exception(ctx); 4315 } else { 4316 tcg_gen_lookup_and_goto_ptr(); 4317 } 4318 } 4319 4320 /*** Branch ***/ 4321 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 4322 { 4323 if (NARROW_MODE(ctx)) { 4324 dest = (uint32_t) dest; 4325 } 4326 if (use_goto_tb(ctx, dest)) { 4327 tcg_gen_goto_tb(n); 4328 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4329 tcg_gen_exit_tb(ctx->base.tb, n); 4330 } else { 4331 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4332 gen_lookup_and_goto_ptr(ctx); 4333 } 4334 } 4335 4336 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 4337 { 4338 if (NARROW_MODE(ctx)) { 4339 nip = (uint32_t)nip; 4340 } 4341 tcg_gen_movi_tl(cpu_lr, nip); 4342 } 4343 4344 /* b ba bl bla */ 4345 static void gen_b(DisasContext *ctx) 4346 { 4347 target_ulong li, target; 4348 4349 /* sign extend LI */ 4350 li = LI(ctx->opcode); 4351 li = (li ^ 0x02000000) - 0x02000000; 4352 if (likely(AA(ctx->opcode) == 0)) { 4353 target = ctx->cia + li; 4354 } else { 4355 target = li; 4356 } 4357 if (LK(ctx->opcode)) { 4358 gen_setlr(ctx, ctx->base.pc_next); 4359 } 4360 gen_update_cfar(ctx, ctx->cia); 4361 gen_goto_tb(ctx, 0, target); 4362 ctx->base.is_jmp = DISAS_NORETURN; 4363 } 4364 4365 #define BCOND_IM 0 4366 #define BCOND_LR 1 4367 #define BCOND_CTR 2 4368 #define BCOND_TAR 3 4369 4370 static void gen_bcond(DisasContext *ctx, int type) 4371 { 4372 uint32_t bo = BO(ctx->opcode); 4373 TCGLabel *l1; 4374 TCGv target; 4375 4376 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 4377 target = tcg_temp_local_new(); 4378 if (type == BCOND_CTR) { 4379 tcg_gen_mov_tl(target, cpu_ctr); 4380 } else if (type == BCOND_TAR) { 4381 gen_load_spr(target, SPR_TAR); 4382 } else { 4383 tcg_gen_mov_tl(target, cpu_lr); 4384 } 4385 } else { 4386 target = NULL; 4387 } 4388 if (LK(ctx->opcode)) { 4389 gen_setlr(ctx, ctx->base.pc_next); 4390 } 4391 l1 = gen_new_label(); 4392 if ((bo & 0x4) == 0) { 4393 /* Decrement and test CTR */ 4394 TCGv temp = tcg_temp_new(); 4395 4396 if (type == BCOND_CTR) { 4397 /* 4398 * All ISAs up to v3 describe this form of bcctr as invalid but 4399 * some processors, ie. 64-bit server processors compliant with 4400 * arch 2.x, do implement a "test and decrement" logic instead, 4401 * as described in their respective UMs. This logic involves CTR 4402 * to act as both the branch target and a counter, which makes 4403 * it basically useless and thus never used in real code. 4404 * 4405 * This form was hence chosen to trigger extra micro-architectural 4406 * side-effect on real HW needed for the Spectre v2 workaround. 4407 * It is up to guests that implement such workaround, ie. linux, to 4408 * use this form in a way it just triggers the side-effect without 4409 * doing anything else harmful. 4410 */ 4411 if (unlikely(!is_book3s_arch2x(ctx))) { 4412 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4413 tcg_temp_free(temp); 4414 tcg_temp_free(target); 4415 return; 4416 } 4417 4418 if (NARROW_MODE(ctx)) { 4419 tcg_gen_ext32u_tl(temp, cpu_ctr); 4420 } else { 4421 tcg_gen_mov_tl(temp, cpu_ctr); 4422 } 4423 if (bo & 0x2) { 4424 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4425 } else { 4426 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4427 } 4428 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4429 } else { 4430 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4431 if (NARROW_MODE(ctx)) { 4432 tcg_gen_ext32u_tl(temp, cpu_ctr); 4433 } else { 4434 tcg_gen_mov_tl(temp, cpu_ctr); 4435 } 4436 if (bo & 0x2) { 4437 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4438 } else { 4439 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4440 } 4441 } 4442 tcg_temp_free(temp); 4443 } 4444 if ((bo & 0x10) == 0) { 4445 /* Test CR */ 4446 uint32_t bi = BI(ctx->opcode); 4447 uint32_t mask = 0x08 >> (bi & 0x03); 4448 TCGv_i32 temp = tcg_temp_new_i32(); 4449 4450 if (bo & 0x8) { 4451 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4452 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 4453 } else { 4454 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4455 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 4456 } 4457 tcg_temp_free_i32(temp); 4458 } 4459 gen_update_cfar(ctx, ctx->cia); 4460 if (type == BCOND_IM) { 4461 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 4462 if (likely(AA(ctx->opcode) == 0)) { 4463 gen_goto_tb(ctx, 0, ctx->cia + li); 4464 } else { 4465 gen_goto_tb(ctx, 0, li); 4466 } 4467 } else { 4468 if (NARROW_MODE(ctx)) { 4469 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 4470 } else { 4471 tcg_gen_andi_tl(cpu_nip, target, ~3); 4472 } 4473 gen_lookup_and_goto_ptr(ctx); 4474 tcg_temp_free(target); 4475 } 4476 if ((bo & 0x14) != 0x14) { 4477 /* fallthrough case */ 4478 gen_set_label(l1); 4479 gen_goto_tb(ctx, 1, ctx->base.pc_next); 4480 } 4481 ctx->base.is_jmp = DISAS_NORETURN; 4482 } 4483 4484 static void gen_bc(DisasContext *ctx) 4485 { 4486 gen_bcond(ctx, BCOND_IM); 4487 } 4488 4489 static void gen_bcctr(DisasContext *ctx) 4490 { 4491 gen_bcond(ctx, BCOND_CTR); 4492 } 4493 4494 static void gen_bclr(DisasContext *ctx) 4495 { 4496 gen_bcond(ctx, BCOND_LR); 4497 } 4498 4499 static void gen_bctar(DisasContext *ctx) 4500 { 4501 gen_bcond(ctx, BCOND_TAR); 4502 } 4503 4504 /*** Condition register logical ***/ 4505 #define GEN_CRLOGIC(name, tcg_op, opc) \ 4506 static void glue(gen_, name)(DisasContext *ctx) \ 4507 { \ 4508 uint8_t bitmask; \ 4509 int sh; \ 4510 TCGv_i32 t0, t1; \ 4511 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 4512 t0 = tcg_temp_new_i32(); \ 4513 if (sh > 0) \ 4514 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 4515 else if (sh < 0) \ 4516 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 4517 else \ 4518 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 4519 t1 = tcg_temp_new_i32(); \ 4520 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 4521 if (sh > 0) \ 4522 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 4523 else if (sh < 0) \ 4524 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 4525 else \ 4526 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 4527 tcg_op(t0, t0, t1); \ 4528 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 4529 tcg_gen_andi_i32(t0, t0, bitmask); \ 4530 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 4531 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 4532 tcg_temp_free_i32(t0); \ 4533 tcg_temp_free_i32(t1); \ 4534 } 4535 4536 /* crand */ 4537 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 4538 /* crandc */ 4539 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 4540 /* creqv */ 4541 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 4542 /* crnand */ 4543 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 4544 /* crnor */ 4545 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 4546 /* cror */ 4547 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 4548 /* crorc */ 4549 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 4550 /* crxor */ 4551 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 4552 4553 /* mcrf */ 4554 static void gen_mcrf(DisasContext *ctx) 4555 { 4556 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 4557 } 4558 4559 /*** System linkage ***/ 4560 4561 /* rfi (supervisor only) */ 4562 static void gen_rfi(DisasContext *ctx) 4563 { 4564 #if defined(CONFIG_USER_ONLY) 4565 GEN_PRIV; 4566 #else 4567 /* 4568 * This instruction doesn't exist anymore on 64-bit server 4569 * processors compliant with arch 2.x 4570 */ 4571 if (is_book3s_arch2x(ctx)) { 4572 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4573 return; 4574 } 4575 /* Restore CPU state */ 4576 CHK_SV; 4577 gen_icount_io_start(ctx); 4578 gen_update_cfar(ctx, ctx->cia); 4579 gen_helper_rfi(cpu_env); 4580 ctx->base.is_jmp = DISAS_EXIT; 4581 #endif 4582 } 4583 4584 #if defined(TARGET_PPC64) 4585 static void gen_rfid(DisasContext *ctx) 4586 { 4587 #if defined(CONFIG_USER_ONLY) 4588 GEN_PRIV; 4589 #else 4590 /* Restore CPU state */ 4591 CHK_SV; 4592 gen_icount_io_start(ctx); 4593 gen_update_cfar(ctx, ctx->cia); 4594 gen_helper_rfid(cpu_env); 4595 ctx->base.is_jmp = DISAS_EXIT; 4596 #endif 4597 } 4598 4599 #if !defined(CONFIG_USER_ONLY) 4600 static void gen_rfscv(DisasContext *ctx) 4601 { 4602 #if defined(CONFIG_USER_ONLY) 4603 GEN_PRIV; 4604 #else 4605 /* Restore CPU state */ 4606 CHK_SV; 4607 gen_icount_io_start(ctx); 4608 gen_update_cfar(ctx, ctx->cia); 4609 gen_helper_rfscv(cpu_env); 4610 ctx->base.is_jmp = DISAS_EXIT; 4611 #endif 4612 } 4613 #endif 4614 4615 static void gen_hrfid(DisasContext *ctx) 4616 { 4617 #if defined(CONFIG_USER_ONLY) 4618 GEN_PRIV; 4619 #else 4620 /* Restore CPU state */ 4621 CHK_HV; 4622 gen_helper_hrfid(cpu_env); 4623 ctx->base.is_jmp = DISAS_EXIT; 4624 #endif 4625 } 4626 #endif 4627 4628 /* sc */ 4629 #if defined(CONFIG_USER_ONLY) 4630 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 4631 #else 4632 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 4633 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED 4634 #endif 4635 static void gen_sc(DisasContext *ctx) 4636 { 4637 uint32_t lev; 4638 4639 lev = (ctx->opcode >> 5) & 0x7F; 4640 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 4641 } 4642 4643 #if defined(TARGET_PPC64) 4644 #if !defined(CONFIG_USER_ONLY) 4645 static void gen_scv(DisasContext *ctx) 4646 { 4647 uint32_t lev = (ctx->opcode >> 5) & 0x7F; 4648 4649 /* Set the PC back to the faulting instruction. */ 4650 gen_update_nip(ctx, ctx->cia); 4651 gen_helper_scv(cpu_env, tcg_constant_i32(lev)); 4652 4653 ctx->base.is_jmp = DISAS_NORETURN; 4654 } 4655 #endif 4656 #endif 4657 4658 /*** Trap ***/ 4659 4660 /* Check for unconditional traps (always or never) */ 4661 static bool check_unconditional_trap(DisasContext *ctx) 4662 { 4663 /* Trap never */ 4664 if (TO(ctx->opcode) == 0) { 4665 return true; 4666 } 4667 /* Trap always */ 4668 if (TO(ctx->opcode) == 31) { 4669 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 4670 return true; 4671 } 4672 return false; 4673 } 4674 4675 /* tw */ 4676 static void gen_tw(DisasContext *ctx) 4677 { 4678 TCGv_i32 t0; 4679 4680 if (check_unconditional_trap(ctx)) { 4681 return; 4682 } 4683 t0 = tcg_const_i32(TO(ctx->opcode)); 4684 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4685 t0); 4686 tcg_temp_free_i32(t0); 4687 } 4688 4689 /* twi */ 4690 static void gen_twi(DisasContext *ctx) 4691 { 4692 TCGv t0; 4693 TCGv_i32 t1; 4694 4695 if (check_unconditional_trap(ctx)) { 4696 return; 4697 } 4698 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4699 t1 = tcg_const_i32(TO(ctx->opcode)); 4700 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4701 tcg_temp_free(t0); 4702 tcg_temp_free_i32(t1); 4703 } 4704 4705 #if defined(TARGET_PPC64) 4706 /* td */ 4707 static void gen_td(DisasContext *ctx) 4708 { 4709 TCGv_i32 t0; 4710 4711 if (check_unconditional_trap(ctx)) { 4712 return; 4713 } 4714 t0 = tcg_const_i32(TO(ctx->opcode)); 4715 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4716 t0); 4717 tcg_temp_free_i32(t0); 4718 } 4719 4720 /* tdi */ 4721 static void gen_tdi(DisasContext *ctx) 4722 { 4723 TCGv t0; 4724 TCGv_i32 t1; 4725 4726 if (check_unconditional_trap(ctx)) { 4727 return; 4728 } 4729 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4730 t1 = tcg_const_i32(TO(ctx->opcode)); 4731 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4732 tcg_temp_free(t0); 4733 tcg_temp_free_i32(t1); 4734 } 4735 #endif 4736 4737 /*** Processor control ***/ 4738 4739 /* mcrxr */ 4740 static void gen_mcrxr(DisasContext *ctx) 4741 { 4742 TCGv_i32 t0 = tcg_temp_new_i32(); 4743 TCGv_i32 t1 = tcg_temp_new_i32(); 4744 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4745 4746 tcg_gen_trunc_tl_i32(t0, cpu_so); 4747 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4748 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4749 tcg_gen_shli_i32(t0, t0, 3); 4750 tcg_gen_shli_i32(t1, t1, 2); 4751 tcg_gen_shli_i32(dst, dst, 1); 4752 tcg_gen_or_i32(dst, dst, t0); 4753 tcg_gen_or_i32(dst, dst, t1); 4754 tcg_temp_free_i32(t0); 4755 tcg_temp_free_i32(t1); 4756 4757 tcg_gen_movi_tl(cpu_so, 0); 4758 tcg_gen_movi_tl(cpu_ov, 0); 4759 tcg_gen_movi_tl(cpu_ca, 0); 4760 } 4761 4762 #ifdef TARGET_PPC64 4763 /* mcrxrx */ 4764 static void gen_mcrxrx(DisasContext *ctx) 4765 { 4766 TCGv t0 = tcg_temp_new(); 4767 TCGv t1 = tcg_temp_new(); 4768 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4769 4770 /* copy OV and OV32 */ 4771 tcg_gen_shli_tl(t0, cpu_ov, 1); 4772 tcg_gen_or_tl(t0, t0, cpu_ov32); 4773 tcg_gen_shli_tl(t0, t0, 2); 4774 /* copy CA and CA32 */ 4775 tcg_gen_shli_tl(t1, cpu_ca, 1); 4776 tcg_gen_or_tl(t1, t1, cpu_ca32); 4777 tcg_gen_or_tl(t0, t0, t1); 4778 tcg_gen_trunc_tl_i32(dst, t0); 4779 tcg_temp_free(t0); 4780 tcg_temp_free(t1); 4781 } 4782 #endif 4783 4784 /* mfcr mfocrf */ 4785 static void gen_mfcr(DisasContext *ctx) 4786 { 4787 uint32_t crm, crn; 4788 4789 if (likely(ctx->opcode & 0x00100000)) { 4790 crm = CRM(ctx->opcode); 4791 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4792 crn = ctz32(crm); 4793 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4794 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4795 cpu_gpr[rD(ctx->opcode)], crn * 4); 4796 } 4797 } else { 4798 TCGv_i32 t0 = tcg_temp_new_i32(); 4799 tcg_gen_mov_i32(t0, cpu_crf[0]); 4800 tcg_gen_shli_i32(t0, t0, 4); 4801 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4802 tcg_gen_shli_i32(t0, t0, 4); 4803 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4804 tcg_gen_shli_i32(t0, t0, 4); 4805 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4806 tcg_gen_shli_i32(t0, t0, 4); 4807 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4808 tcg_gen_shli_i32(t0, t0, 4); 4809 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4810 tcg_gen_shli_i32(t0, t0, 4); 4811 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4812 tcg_gen_shli_i32(t0, t0, 4); 4813 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4814 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4815 tcg_temp_free_i32(t0); 4816 } 4817 } 4818 4819 /* mfmsr */ 4820 static void gen_mfmsr(DisasContext *ctx) 4821 { 4822 CHK_SV; 4823 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4824 } 4825 4826 /* mfspr */ 4827 static inline void gen_op_mfspr(DisasContext *ctx) 4828 { 4829 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4830 uint32_t sprn = SPR(ctx->opcode); 4831 4832 #if defined(CONFIG_USER_ONLY) 4833 read_cb = ctx->spr_cb[sprn].uea_read; 4834 #else 4835 if (ctx->pr) { 4836 read_cb = ctx->spr_cb[sprn].uea_read; 4837 } else if (ctx->hv) { 4838 read_cb = ctx->spr_cb[sprn].hea_read; 4839 } else { 4840 read_cb = ctx->spr_cb[sprn].oea_read; 4841 } 4842 #endif 4843 if (likely(read_cb != NULL)) { 4844 if (likely(read_cb != SPR_NOACCESS)) { 4845 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4846 } else { 4847 /* Privilege exception */ 4848 /* 4849 * This is a hack to avoid warnings when running Linux: 4850 * this OS breaks the PowerPC virtualisation model, 4851 * allowing userland application to read the PVR 4852 */ 4853 if (sprn != SPR_PVR) { 4854 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4855 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4856 ctx->cia); 4857 } 4858 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4859 } 4860 } else { 4861 /* ISA 2.07 defines these as no-ops */ 4862 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4863 (sprn >= 808 && sprn <= 811)) { 4864 /* This is a nop */ 4865 return; 4866 } 4867 /* Not defined */ 4868 qemu_log_mask(LOG_GUEST_ERROR, 4869 "Trying to read invalid spr %d (0x%03x) at " 4870 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 4871 4872 /* 4873 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 4874 * generate a priv, a hv emu or a no-op 4875 */ 4876 if (sprn & 0x10) { 4877 if (ctx->pr) { 4878 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4879 } 4880 } else { 4881 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4882 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4883 } 4884 } 4885 } 4886 } 4887 4888 static void gen_mfspr(DisasContext *ctx) 4889 { 4890 gen_op_mfspr(ctx); 4891 } 4892 4893 /* mftb */ 4894 static void gen_mftb(DisasContext *ctx) 4895 { 4896 gen_op_mfspr(ctx); 4897 } 4898 4899 /* mtcrf mtocrf*/ 4900 static void gen_mtcrf(DisasContext *ctx) 4901 { 4902 uint32_t crm, crn; 4903 4904 crm = CRM(ctx->opcode); 4905 if (likely((ctx->opcode & 0x00100000))) { 4906 if (crm && ((crm & (crm - 1)) == 0)) { 4907 TCGv_i32 temp = tcg_temp_new_i32(); 4908 crn = ctz32(crm); 4909 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4910 tcg_gen_shri_i32(temp, temp, crn * 4); 4911 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4912 tcg_temp_free_i32(temp); 4913 } 4914 } else { 4915 TCGv_i32 temp = tcg_temp_new_i32(); 4916 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4917 for (crn = 0 ; crn < 8 ; crn++) { 4918 if (crm & (1 << crn)) { 4919 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4920 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4921 } 4922 } 4923 tcg_temp_free_i32(temp); 4924 } 4925 } 4926 4927 /* mtmsr */ 4928 #if defined(TARGET_PPC64) 4929 static void gen_mtmsrd(DisasContext *ctx) 4930 { 4931 if (unlikely(!is_book3s_arch2x(ctx))) { 4932 gen_invalid(ctx); 4933 return; 4934 } 4935 4936 CHK_SV; 4937 4938 #if !defined(CONFIG_USER_ONLY) 4939 TCGv t0, t1; 4940 target_ulong mask; 4941 4942 t0 = tcg_temp_new(); 4943 t1 = tcg_temp_new(); 4944 4945 gen_icount_io_start(ctx); 4946 4947 if (ctx->opcode & 0x00010000) { 4948 /* L=1 form only updates EE and RI */ 4949 mask = (1ULL << MSR_RI) | (1ULL << MSR_EE); 4950 } else { 4951 /* mtmsrd does not alter HV, S, ME, or LE */ 4952 mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) | 4953 (1ULL << MSR_HV)); 4954 /* 4955 * XXX: we need to update nip before the store if we enter 4956 * power saving mode, we will exit the loop directly from 4957 * ppc_store_msr 4958 */ 4959 gen_update_nip(ctx, ctx->base.pc_next); 4960 } 4961 4962 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask); 4963 tcg_gen_andi_tl(t1, cpu_msr, ~mask); 4964 tcg_gen_or_tl(t0, t0, t1); 4965 4966 gen_helper_store_msr(cpu_env, t0); 4967 4968 /* Must stop the translation as machine state (may have) changed */ 4969 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 4970 4971 tcg_temp_free(t0); 4972 tcg_temp_free(t1); 4973 #endif /* !defined(CONFIG_USER_ONLY) */ 4974 } 4975 #endif /* defined(TARGET_PPC64) */ 4976 4977 static void gen_mtmsr(DisasContext *ctx) 4978 { 4979 CHK_SV; 4980 4981 #if !defined(CONFIG_USER_ONLY) 4982 TCGv t0, t1; 4983 target_ulong mask = 0xFFFFFFFF; 4984 4985 t0 = tcg_temp_new(); 4986 t1 = tcg_temp_new(); 4987 4988 gen_icount_io_start(ctx); 4989 if (ctx->opcode & 0x00010000) { 4990 /* L=1 form only updates EE and RI */ 4991 mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE); 4992 } else { 4993 /* mtmsr does not alter S, ME, or LE */ 4994 mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S)); 4995 4996 /* 4997 * XXX: we need to update nip before the store if we enter 4998 * power saving mode, we will exit the loop directly from 4999 * ppc_store_msr 5000 */ 5001 gen_update_nip(ctx, ctx->base.pc_next); 5002 } 5003 5004 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask); 5005 tcg_gen_andi_tl(t1, cpu_msr, ~mask); 5006 tcg_gen_or_tl(t0, t0, t1); 5007 5008 gen_helper_store_msr(cpu_env, t0); 5009 5010 /* Must stop the translation as machine state (may have) changed */ 5011 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 5012 5013 tcg_temp_free(t0); 5014 tcg_temp_free(t1); 5015 #endif 5016 } 5017 5018 /* mtspr */ 5019 static void gen_mtspr(DisasContext *ctx) 5020 { 5021 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 5022 uint32_t sprn = SPR(ctx->opcode); 5023 5024 #if defined(CONFIG_USER_ONLY) 5025 write_cb = ctx->spr_cb[sprn].uea_write; 5026 #else 5027 if (ctx->pr) { 5028 write_cb = ctx->spr_cb[sprn].uea_write; 5029 } else if (ctx->hv) { 5030 write_cb = ctx->spr_cb[sprn].hea_write; 5031 } else { 5032 write_cb = ctx->spr_cb[sprn].oea_write; 5033 } 5034 #endif 5035 if (likely(write_cb != NULL)) { 5036 if (likely(write_cb != SPR_NOACCESS)) { 5037 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 5038 } else { 5039 /* Privilege exception */ 5040 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 5041 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 5042 ctx->cia); 5043 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 5044 } 5045 } else { 5046 /* ISA 2.07 defines these as no-ops */ 5047 if ((ctx->insns_flags2 & PPC2_ISA207S) && 5048 (sprn >= 808 && sprn <= 811)) { 5049 /* This is a nop */ 5050 return; 5051 } 5052 5053 /* Not defined */ 5054 qemu_log_mask(LOG_GUEST_ERROR, 5055 "Trying to write invalid spr %d (0x%03x) at " 5056 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 5057 5058 5059 /* 5060 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 5061 * generate a priv, a hv emu or a no-op 5062 */ 5063 if (sprn & 0x10) { 5064 if (ctx->pr) { 5065 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5066 } 5067 } else { 5068 if (ctx->pr || sprn == 0) { 5069 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5070 } 5071 } 5072 } 5073 } 5074 5075 #if defined(TARGET_PPC64) 5076 /* setb */ 5077 static void gen_setb(DisasContext *ctx) 5078 { 5079 TCGv_i32 t0 = tcg_temp_new_i32(); 5080 TCGv_i32 t8 = tcg_constant_i32(8); 5081 TCGv_i32 tm1 = tcg_constant_i32(-1); 5082 int crf = crfS(ctx->opcode); 5083 5084 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 5085 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 5086 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 5087 5088 tcg_temp_free_i32(t0); 5089 } 5090 #endif 5091 5092 /*** Cache management ***/ 5093 5094 /* dcbf */ 5095 static void gen_dcbf(DisasContext *ctx) 5096 { 5097 /* XXX: specification says this is treated as a load by the MMU */ 5098 TCGv t0; 5099 gen_set_access_type(ctx, ACCESS_CACHE); 5100 t0 = tcg_temp_new(); 5101 gen_addr_reg_index(ctx, t0); 5102 gen_qemu_ld8u(ctx, t0, t0); 5103 tcg_temp_free(t0); 5104 } 5105 5106 /* dcbfep (external PID dcbf) */ 5107 static void gen_dcbfep(DisasContext *ctx) 5108 { 5109 /* XXX: specification says this is treated as a load by the MMU */ 5110 TCGv t0; 5111 CHK_SV; 5112 gen_set_access_type(ctx, ACCESS_CACHE); 5113 t0 = tcg_temp_new(); 5114 gen_addr_reg_index(ctx, t0); 5115 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5116 tcg_temp_free(t0); 5117 } 5118 5119 /* dcbi (Supervisor only) */ 5120 static void gen_dcbi(DisasContext *ctx) 5121 { 5122 #if defined(CONFIG_USER_ONLY) 5123 GEN_PRIV; 5124 #else 5125 TCGv EA, val; 5126 5127 CHK_SV; 5128 EA = tcg_temp_new(); 5129 gen_set_access_type(ctx, ACCESS_CACHE); 5130 gen_addr_reg_index(ctx, EA); 5131 val = tcg_temp_new(); 5132 /* XXX: specification says this should be treated as a store by the MMU */ 5133 gen_qemu_ld8u(ctx, val, EA); 5134 gen_qemu_st8(ctx, val, EA); 5135 tcg_temp_free(val); 5136 tcg_temp_free(EA); 5137 #endif /* defined(CONFIG_USER_ONLY) */ 5138 } 5139 5140 /* dcdst */ 5141 static void gen_dcbst(DisasContext *ctx) 5142 { 5143 /* XXX: specification say this is treated as a load by the MMU */ 5144 TCGv t0; 5145 gen_set_access_type(ctx, ACCESS_CACHE); 5146 t0 = tcg_temp_new(); 5147 gen_addr_reg_index(ctx, t0); 5148 gen_qemu_ld8u(ctx, t0, t0); 5149 tcg_temp_free(t0); 5150 } 5151 5152 /* dcbstep (dcbstep External PID version) */ 5153 static void gen_dcbstep(DisasContext *ctx) 5154 { 5155 /* XXX: specification say this is treated as a load by the MMU */ 5156 TCGv t0; 5157 gen_set_access_type(ctx, ACCESS_CACHE); 5158 t0 = tcg_temp_new(); 5159 gen_addr_reg_index(ctx, t0); 5160 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5161 tcg_temp_free(t0); 5162 } 5163 5164 /* dcbt */ 5165 static void gen_dcbt(DisasContext *ctx) 5166 { 5167 /* 5168 * interpreted as no-op 5169 * XXX: specification say this is treated as a load by the MMU but 5170 * does not generate any exception 5171 */ 5172 } 5173 5174 /* dcbtep */ 5175 static void gen_dcbtep(DisasContext *ctx) 5176 { 5177 /* 5178 * interpreted as no-op 5179 * XXX: specification say this is treated as a load by the MMU but 5180 * does not generate any exception 5181 */ 5182 } 5183 5184 /* dcbtst */ 5185 static void gen_dcbtst(DisasContext *ctx) 5186 { 5187 /* 5188 * interpreted as no-op 5189 * XXX: specification say this is treated as a load by the MMU but 5190 * does not generate any exception 5191 */ 5192 } 5193 5194 /* dcbtstep */ 5195 static void gen_dcbtstep(DisasContext *ctx) 5196 { 5197 /* 5198 * interpreted as no-op 5199 * XXX: specification say this is treated as a load by the MMU but 5200 * does not generate any exception 5201 */ 5202 } 5203 5204 /* dcbtls */ 5205 static void gen_dcbtls(DisasContext *ctx) 5206 { 5207 /* Always fails locking the cache */ 5208 TCGv t0 = tcg_temp_new(); 5209 gen_load_spr(t0, SPR_Exxx_L1CSR0); 5210 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 5211 gen_store_spr(SPR_Exxx_L1CSR0, t0); 5212 tcg_temp_free(t0); 5213 } 5214 5215 /* dcbz */ 5216 static void gen_dcbz(DisasContext *ctx) 5217 { 5218 TCGv tcgv_addr; 5219 TCGv_i32 tcgv_op; 5220 5221 gen_set_access_type(ctx, ACCESS_CACHE); 5222 tcgv_addr = tcg_temp_new(); 5223 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5224 gen_addr_reg_index(ctx, tcgv_addr); 5225 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 5226 tcg_temp_free(tcgv_addr); 5227 tcg_temp_free_i32(tcgv_op); 5228 } 5229 5230 /* dcbzep */ 5231 static void gen_dcbzep(DisasContext *ctx) 5232 { 5233 TCGv tcgv_addr; 5234 TCGv_i32 tcgv_op; 5235 5236 gen_set_access_type(ctx, ACCESS_CACHE); 5237 tcgv_addr = tcg_temp_new(); 5238 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5239 gen_addr_reg_index(ctx, tcgv_addr); 5240 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 5241 tcg_temp_free(tcgv_addr); 5242 tcg_temp_free_i32(tcgv_op); 5243 } 5244 5245 /* dst / dstt */ 5246 static void gen_dst(DisasContext *ctx) 5247 { 5248 if (rA(ctx->opcode) == 0) { 5249 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5250 } else { 5251 /* interpreted as no-op */ 5252 } 5253 } 5254 5255 /* dstst /dststt */ 5256 static void gen_dstst(DisasContext *ctx) 5257 { 5258 if (rA(ctx->opcode) == 0) { 5259 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5260 } else { 5261 /* interpreted as no-op */ 5262 } 5263 5264 } 5265 5266 /* dss / dssall */ 5267 static void gen_dss(DisasContext *ctx) 5268 { 5269 /* interpreted as no-op */ 5270 } 5271 5272 /* icbi */ 5273 static void gen_icbi(DisasContext *ctx) 5274 { 5275 TCGv t0; 5276 gen_set_access_type(ctx, ACCESS_CACHE); 5277 t0 = tcg_temp_new(); 5278 gen_addr_reg_index(ctx, t0); 5279 gen_helper_icbi(cpu_env, t0); 5280 tcg_temp_free(t0); 5281 } 5282 5283 /* icbiep */ 5284 static void gen_icbiep(DisasContext *ctx) 5285 { 5286 TCGv t0; 5287 gen_set_access_type(ctx, ACCESS_CACHE); 5288 t0 = tcg_temp_new(); 5289 gen_addr_reg_index(ctx, t0); 5290 gen_helper_icbiep(cpu_env, t0); 5291 tcg_temp_free(t0); 5292 } 5293 5294 /* Optional: */ 5295 /* dcba */ 5296 static void gen_dcba(DisasContext *ctx) 5297 { 5298 /* 5299 * interpreted as no-op 5300 * XXX: specification say this is treated as a store by the MMU 5301 * but does not generate any exception 5302 */ 5303 } 5304 5305 /*** Segment register manipulation ***/ 5306 /* Supervisor only: */ 5307 5308 /* mfsr */ 5309 static void gen_mfsr(DisasContext *ctx) 5310 { 5311 #if defined(CONFIG_USER_ONLY) 5312 GEN_PRIV; 5313 #else 5314 TCGv t0; 5315 5316 CHK_SV; 5317 t0 = tcg_const_tl(SR(ctx->opcode)); 5318 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5319 tcg_temp_free(t0); 5320 #endif /* defined(CONFIG_USER_ONLY) */ 5321 } 5322 5323 /* mfsrin */ 5324 static void gen_mfsrin(DisasContext *ctx) 5325 { 5326 #if defined(CONFIG_USER_ONLY) 5327 GEN_PRIV; 5328 #else 5329 TCGv t0; 5330 5331 CHK_SV; 5332 t0 = tcg_temp_new(); 5333 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5334 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5335 tcg_temp_free(t0); 5336 #endif /* defined(CONFIG_USER_ONLY) */ 5337 } 5338 5339 /* mtsr */ 5340 static void gen_mtsr(DisasContext *ctx) 5341 { 5342 #if defined(CONFIG_USER_ONLY) 5343 GEN_PRIV; 5344 #else 5345 TCGv t0; 5346 5347 CHK_SV; 5348 t0 = tcg_const_tl(SR(ctx->opcode)); 5349 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5350 tcg_temp_free(t0); 5351 #endif /* defined(CONFIG_USER_ONLY) */ 5352 } 5353 5354 /* mtsrin */ 5355 static void gen_mtsrin(DisasContext *ctx) 5356 { 5357 #if defined(CONFIG_USER_ONLY) 5358 GEN_PRIV; 5359 #else 5360 TCGv t0; 5361 CHK_SV; 5362 5363 t0 = tcg_temp_new(); 5364 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5365 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 5366 tcg_temp_free(t0); 5367 #endif /* defined(CONFIG_USER_ONLY) */ 5368 } 5369 5370 #if defined(TARGET_PPC64) 5371 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 5372 5373 /* mfsr */ 5374 static void gen_mfsr_64b(DisasContext *ctx) 5375 { 5376 #if defined(CONFIG_USER_ONLY) 5377 GEN_PRIV; 5378 #else 5379 TCGv t0; 5380 5381 CHK_SV; 5382 t0 = tcg_const_tl(SR(ctx->opcode)); 5383 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5384 tcg_temp_free(t0); 5385 #endif /* defined(CONFIG_USER_ONLY) */ 5386 } 5387 5388 /* mfsrin */ 5389 static void gen_mfsrin_64b(DisasContext *ctx) 5390 { 5391 #if defined(CONFIG_USER_ONLY) 5392 GEN_PRIV; 5393 #else 5394 TCGv t0; 5395 5396 CHK_SV; 5397 t0 = tcg_temp_new(); 5398 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5399 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5400 tcg_temp_free(t0); 5401 #endif /* defined(CONFIG_USER_ONLY) */ 5402 } 5403 5404 /* mtsr */ 5405 static void gen_mtsr_64b(DisasContext *ctx) 5406 { 5407 #if defined(CONFIG_USER_ONLY) 5408 GEN_PRIV; 5409 #else 5410 TCGv t0; 5411 5412 CHK_SV; 5413 t0 = tcg_const_tl(SR(ctx->opcode)); 5414 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5415 tcg_temp_free(t0); 5416 #endif /* defined(CONFIG_USER_ONLY) */ 5417 } 5418 5419 /* mtsrin */ 5420 static void gen_mtsrin_64b(DisasContext *ctx) 5421 { 5422 #if defined(CONFIG_USER_ONLY) 5423 GEN_PRIV; 5424 #else 5425 TCGv t0; 5426 5427 CHK_SV; 5428 t0 = tcg_temp_new(); 5429 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5430 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5431 tcg_temp_free(t0); 5432 #endif /* defined(CONFIG_USER_ONLY) */ 5433 } 5434 5435 /* slbmte */ 5436 static void gen_slbmte(DisasContext *ctx) 5437 { 5438 #if defined(CONFIG_USER_ONLY) 5439 GEN_PRIV; 5440 #else 5441 CHK_SV; 5442 5443 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 5444 cpu_gpr[rS(ctx->opcode)]); 5445 #endif /* defined(CONFIG_USER_ONLY) */ 5446 } 5447 5448 static void gen_slbmfee(DisasContext *ctx) 5449 { 5450 #if defined(CONFIG_USER_ONLY) 5451 GEN_PRIV; 5452 #else 5453 CHK_SV; 5454 5455 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5456 cpu_gpr[rB(ctx->opcode)]); 5457 #endif /* defined(CONFIG_USER_ONLY) */ 5458 } 5459 5460 static void gen_slbmfev(DisasContext *ctx) 5461 { 5462 #if defined(CONFIG_USER_ONLY) 5463 GEN_PRIV; 5464 #else 5465 CHK_SV; 5466 5467 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5468 cpu_gpr[rB(ctx->opcode)]); 5469 #endif /* defined(CONFIG_USER_ONLY) */ 5470 } 5471 5472 static void gen_slbfee_(DisasContext *ctx) 5473 { 5474 #if defined(CONFIG_USER_ONLY) 5475 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5476 #else 5477 TCGLabel *l1, *l2; 5478 5479 if (unlikely(ctx->pr)) { 5480 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5481 return; 5482 } 5483 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5484 cpu_gpr[rB(ctx->opcode)]); 5485 l1 = gen_new_label(); 5486 l2 = gen_new_label(); 5487 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5488 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 5489 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 5490 tcg_gen_br(l2); 5491 gen_set_label(l1); 5492 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 5493 gen_set_label(l2); 5494 #endif 5495 } 5496 #endif /* defined(TARGET_PPC64) */ 5497 5498 /*** Lookaside buffer management ***/ 5499 /* Optional & supervisor only: */ 5500 5501 /* tlbia */ 5502 static void gen_tlbia(DisasContext *ctx) 5503 { 5504 #if defined(CONFIG_USER_ONLY) 5505 GEN_PRIV; 5506 #else 5507 CHK_HV; 5508 5509 gen_helper_tlbia(cpu_env); 5510 #endif /* defined(CONFIG_USER_ONLY) */ 5511 } 5512 5513 /* tlbiel */ 5514 static void gen_tlbiel(DisasContext *ctx) 5515 { 5516 #if defined(CONFIG_USER_ONLY) 5517 GEN_PRIV; 5518 #else 5519 bool psr = (ctx->opcode >> 17) & 0x1; 5520 5521 if (ctx->pr || (!ctx->hv && !psr && ctx->hr)) { 5522 /* 5523 * tlbiel is privileged except when PSR=0 and HR=1, making it 5524 * hypervisor privileged. 5525 */ 5526 GEN_PRIV; 5527 } 5528 5529 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5530 #endif /* defined(CONFIG_USER_ONLY) */ 5531 } 5532 5533 /* tlbie */ 5534 static void gen_tlbie(DisasContext *ctx) 5535 { 5536 #if defined(CONFIG_USER_ONLY) 5537 GEN_PRIV; 5538 #else 5539 bool psr = (ctx->opcode >> 17) & 0x1; 5540 TCGv_i32 t1; 5541 5542 if (ctx->pr) { 5543 /* tlbie is privileged... */ 5544 GEN_PRIV; 5545 } else if (!ctx->hv) { 5546 if (!ctx->gtse || (!psr && ctx->hr)) { 5547 /* 5548 * ... except when GTSE=0 or when PSR=0 and HR=1, making it 5549 * hypervisor privileged. 5550 */ 5551 GEN_PRIV; 5552 } 5553 } 5554 5555 if (NARROW_MODE(ctx)) { 5556 TCGv t0 = tcg_temp_new(); 5557 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 5558 gen_helper_tlbie(cpu_env, t0); 5559 tcg_temp_free(t0); 5560 } else { 5561 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5562 } 5563 t1 = tcg_temp_new_i32(); 5564 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5565 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 5566 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5567 tcg_temp_free_i32(t1); 5568 #endif /* defined(CONFIG_USER_ONLY) */ 5569 } 5570 5571 /* tlbsync */ 5572 static void gen_tlbsync(DisasContext *ctx) 5573 { 5574 #if defined(CONFIG_USER_ONLY) 5575 GEN_PRIV; 5576 #else 5577 5578 if (ctx->gtse) { 5579 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 5580 } else { 5581 CHK_HV; /* Else hypervisor privileged */ 5582 } 5583 5584 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 5585 if (ctx->insns_flags & PPC_BOOKE) { 5586 gen_check_tlb_flush(ctx, true); 5587 } 5588 #endif /* defined(CONFIG_USER_ONLY) */ 5589 } 5590 5591 #if defined(TARGET_PPC64) 5592 /* slbia */ 5593 static void gen_slbia(DisasContext *ctx) 5594 { 5595 #if defined(CONFIG_USER_ONLY) 5596 GEN_PRIV; 5597 #else 5598 uint32_t ih = (ctx->opcode >> 21) & 0x7; 5599 TCGv_i32 t0 = tcg_const_i32(ih); 5600 5601 CHK_SV; 5602 5603 gen_helper_slbia(cpu_env, t0); 5604 tcg_temp_free_i32(t0); 5605 #endif /* defined(CONFIG_USER_ONLY) */ 5606 } 5607 5608 /* slbie */ 5609 static void gen_slbie(DisasContext *ctx) 5610 { 5611 #if defined(CONFIG_USER_ONLY) 5612 GEN_PRIV; 5613 #else 5614 CHK_SV; 5615 5616 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5617 #endif /* defined(CONFIG_USER_ONLY) */ 5618 } 5619 5620 /* slbieg */ 5621 static void gen_slbieg(DisasContext *ctx) 5622 { 5623 #if defined(CONFIG_USER_ONLY) 5624 GEN_PRIV; 5625 #else 5626 CHK_SV; 5627 5628 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5629 #endif /* defined(CONFIG_USER_ONLY) */ 5630 } 5631 5632 /* slbsync */ 5633 static void gen_slbsync(DisasContext *ctx) 5634 { 5635 #if defined(CONFIG_USER_ONLY) 5636 GEN_PRIV; 5637 #else 5638 CHK_SV; 5639 gen_check_tlb_flush(ctx, true); 5640 #endif /* defined(CONFIG_USER_ONLY) */ 5641 } 5642 5643 #endif /* defined(TARGET_PPC64) */ 5644 5645 /*** External control ***/ 5646 /* Optional: */ 5647 5648 /* eciwx */ 5649 static void gen_eciwx(DisasContext *ctx) 5650 { 5651 TCGv t0; 5652 /* Should check EAR[E] ! */ 5653 gen_set_access_type(ctx, ACCESS_EXT); 5654 t0 = tcg_temp_new(); 5655 gen_addr_reg_index(ctx, t0); 5656 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5657 DEF_MEMOP(MO_UL | MO_ALIGN)); 5658 tcg_temp_free(t0); 5659 } 5660 5661 /* ecowx */ 5662 static void gen_ecowx(DisasContext *ctx) 5663 { 5664 TCGv t0; 5665 /* Should check EAR[E] ! */ 5666 gen_set_access_type(ctx, ACCESS_EXT); 5667 t0 = tcg_temp_new(); 5668 gen_addr_reg_index(ctx, t0); 5669 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5670 DEF_MEMOP(MO_UL | MO_ALIGN)); 5671 tcg_temp_free(t0); 5672 } 5673 5674 /* PowerPC 601 specific instructions */ 5675 5676 /* abs - abs. */ 5677 static void gen_abs(DisasContext *ctx) 5678 { 5679 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5680 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5681 5682 tcg_gen_abs_tl(d, a); 5683 if (unlikely(Rc(ctx->opcode) != 0)) { 5684 gen_set_Rc0(ctx, d); 5685 } 5686 } 5687 5688 /* abso - abso. */ 5689 static void gen_abso(DisasContext *ctx) 5690 { 5691 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5692 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5693 5694 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000); 5695 tcg_gen_abs_tl(d, a); 5696 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 5697 if (unlikely(Rc(ctx->opcode) != 0)) { 5698 gen_set_Rc0(ctx, d); 5699 } 5700 } 5701 5702 /* clcs */ 5703 static void gen_clcs(DisasContext *ctx) 5704 { 5705 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5706 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5707 tcg_temp_free_i32(t0); 5708 /* Rc=1 sets CR0 to an undefined state */ 5709 } 5710 5711 /* div - div. */ 5712 static void gen_div(DisasContext *ctx) 5713 { 5714 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5715 cpu_gpr[rB(ctx->opcode)]); 5716 if (unlikely(Rc(ctx->opcode) != 0)) { 5717 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5718 } 5719 } 5720 5721 /* divo - divo. */ 5722 static void gen_divo(DisasContext *ctx) 5723 { 5724 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5725 cpu_gpr[rB(ctx->opcode)]); 5726 if (unlikely(Rc(ctx->opcode) != 0)) { 5727 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5728 } 5729 } 5730 5731 /* divs - divs. */ 5732 static void gen_divs(DisasContext *ctx) 5733 { 5734 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5735 cpu_gpr[rB(ctx->opcode)]); 5736 if (unlikely(Rc(ctx->opcode) != 0)) { 5737 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5738 } 5739 } 5740 5741 /* divso - divso. */ 5742 static void gen_divso(DisasContext *ctx) 5743 { 5744 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5745 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5746 if (unlikely(Rc(ctx->opcode) != 0)) { 5747 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5748 } 5749 } 5750 5751 /* doz - doz. */ 5752 static void gen_doz(DisasContext *ctx) 5753 { 5754 TCGLabel *l1 = gen_new_label(); 5755 TCGLabel *l2 = gen_new_label(); 5756 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5757 cpu_gpr[rA(ctx->opcode)], l1); 5758 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 5759 cpu_gpr[rA(ctx->opcode)]); 5760 tcg_gen_br(l2); 5761 gen_set_label(l1); 5762 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5763 gen_set_label(l2); 5764 if (unlikely(Rc(ctx->opcode) != 0)) { 5765 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5766 } 5767 } 5768 5769 /* dozo - dozo. */ 5770 static void gen_dozo(DisasContext *ctx) 5771 { 5772 TCGLabel *l1 = gen_new_label(); 5773 TCGLabel *l2 = gen_new_label(); 5774 TCGv t0 = tcg_temp_new(); 5775 TCGv t1 = tcg_temp_new(); 5776 TCGv t2 = tcg_temp_new(); 5777 /* Start with XER OV disabled, the most likely case */ 5778 tcg_gen_movi_tl(cpu_ov, 0); 5779 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5780 cpu_gpr[rA(ctx->opcode)], l1); 5781 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5782 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5783 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5784 tcg_gen_andc_tl(t1, t1, t2); 5785 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5786 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5787 tcg_gen_movi_tl(cpu_ov, 1); 5788 tcg_gen_movi_tl(cpu_so, 1); 5789 tcg_gen_br(l2); 5790 gen_set_label(l1); 5791 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5792 gen_set_label(l2); 5793 tcg_temp_free(t0); 5794 tcg_temp_free(t1); 5795 tcg_temp_free(t2); 5796 if (unlikely(Rc(ctx->opcode) != 0)) { 5797 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5798 } 5799 } 5800 5801 /* dozi */ 5802 static void gen_dozi(DisasContext *ctx) 5803 { 5804 target_long simm = SIMM(ctx->opcode); 5805 TCGLabel *l1 = gen_new_label(); 5806 TCGLabel *l2 = gen_new_label(); 5807 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5808 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5809 tcg_gen_br(l2); 5810 gen_set_label(l1); 5811 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5812 gen_set_label(l2); 5813 if (unlikely(Rc(ctx->opcode) != 0)) { 5814 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5815 } 5816 } 5817 5818 /* lscbx - lscbx. */ 5819 static void gen_lscbx(DisasContext *ctx) 5820 { 5821 TCGv t0 = tcg_temp_new(); 5822 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5823 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5824 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5825 5826 gen_addr_reg_index(ctx, t0); 5827 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5828 tcg_temp_free_i32(t1); 5829 tcg_temp_free_i32(t2); 5830 tcg_temp_free_i32(t3); 5831 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5832 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5833 if (unlikely(Rc(ctx->opcode) != 0)) { 5834 gen_set_Rc0(ctx, t0); 5835 } 5836 tcg_temp_free(t0); 5837 } 5838 5839 /* maskg - maskg. */ 5840 static void gen_maskg(DisasContext *ctx) 5841 { 5842 TCGLabel *l1 = gen_new_label(); 5843 TCGv t0 = tcg_temp_new(); 5844 TCGv t1 = tcg_temp_new(); 5845 TCGv t2 = tcg_temp_new(); 5846 TCGv t3 = tcg_temp_new(); 5847 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5848 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5849 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5850 tcg_gen_addi_tl(t2, t0, 1); 5851 tcg_gen_shr_tl(t2, t3, t2); 5852 tcg_gen_shr_tl(t3, t3, t1); 5853 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5854 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5855 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5856 gen_set_label(l1); 5857 tcg_temp_free(t0); 5858 tcg_temp_free(t1); 5859 tcg_temp_free(t2); 5860 tcg_temp_free(t3); 5861 if (unlikely(Rc(ctx->opcode) != 0)) { 5862 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5863 } 5864 } 5865 5866 /* maskir - maskir. */ 5867 static void gen_maskir(DisasContext *ctx) 5868 { 5869 TCGv t0 = tcg_temp_new(); 5870 TCGv t1 = tcg_temp_new(); 5871 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5872 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5873 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5874 tcg_temp_free(t0); 5875 tcg_temp_free(t1); 5876 if (unlikely(Rc(ctx->opcode) != 0)) { 5877 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5878 } 5879 } 5880 5881 /* mul - mul. */ 5882 static void gen_mul(DisasContext *ctx) 5883 { 5884 TCGv_i64 t0 = tcg_temp_new_i64(); 5885 TCGv_i64 t1 = tcg_temp_new_i64(); 5886 TCGv t2 = tcg_temp_new(); 5887 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5888 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5889 tcg_gen_mul_i64(t0, t0, t1); 5890 tcg_gen_trunc_i64_tl(t2, t0); 5891 gen_store_spr(SPR_MQ, t2); 5892 tcg_gen_shri_i64(t1, t0, 32); 5893 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5894 tcg_temp_free_i64(t0); 5895 tcg_temp_free_i64(t1); 5896 tcg_temp_free(t2); 5897 if (unlikely(Rc(ctx->opcode) != 0)) { 5898 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5899 } 5900 } 5901 5902 /* mulo - mulo. */ 5903 static void gen_mulo(DisasContext *ctx) 5904 { 5905 TCGLabel *l1 = gen_new_label(); 5906 TCGv_i64 t0 = tcg_temp_new_i64(); 5907 TCGv_i64 t1 = tcg_temp_new_i64(); 5908 TCGv t2 = tcg_temp_new(); 5909 /* Start with XER OV disabled, the most likely case */ 5910 tcg_gen_movi_tl(cpu_ov, 0); 5911 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5912 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5913 tcg_gen_mul_i64(t0, t0, t1); 5914 tcg_gen_trunc_i64_tl(t2, t0); 5915 gen_store_spr(SPR_MQ, t2); 5916 tcg_gen_shri_i64(t1, t0, 32); 5917 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5918 tcg_gen_ext32s_i64(t1, t0); 5919 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5920 tcg_gen_movi_tl(cpu_ov, 1); 5921 tcg_gen_movi_tl(cpu_so, 1); 5922 gen_set_label(l1); 5923 tcg_temp_free_i64(t0); 5924 tcg_temp_free_i64(t1); 5925 tcg_temp_free(t2); 5926 if (unlikely(Rc(ctx->opcode) != 0)) { 5927 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5928 } 5929 } 5930 5931 /* nabs - nabs. */ 5932 static void gen_nabs(DisasContext *ctx) 5933 { 5934 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5935 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5936 5937 tcg_gen_abs_tl(d, a); 5938 tcg_gen_neg_tl(d, d); 5939 if (unlikely(Rc(ctx->opcode) != 0)) { 5940 gen_set_Rc0(ctx, d); 5941 } 5942 } 5943 5944 /* nabso - nabso. */ 5945 static void gen_nabso(DisasContext *ctx) 5946 { 5947 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5948 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5949 5950 tcg_gen_abs_tl(d, a); 5951 tcg_gen_neg_tl(d, d); 5952 /* nabs never overflows */ 5953 tcg_gen_movi_tl(cpu_ov, 0); 5954 if (unlikely(Rc(ctx->opcode) != 0)) { 5955 gen_set_Rc0(ctx, d); 5956 } 5957 } 5958 5959 /* rlmi - rlmi. */ 5960 static void gen_rlmi(DisasContext *ctx) 5961 { 5962 uint32_t mb = MB(ctx->opcode); 5963 uint32_t me = ME(ctx->opcode); 5964 TCGv t0 = tcg_temp_new(); 5965 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5966 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5967 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5968 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 5969 ~MASK(mb, me)); 5970 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5971 tcg_temp_free(t0); 5972 if (unlikely(Rc(ctx->opcode) != 0)) { 5973 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5974 } 5975 } 5976 5977 /* rrib - rrib. */ 5978 static void gen_rrib(DisasContext *ctx) 5979 { 5980 TCGv t0 = tcg_temp_new(); 5981 TCGv t1 = tcg_temp_new(); 5982 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5983 tcg_gen_movi_tl(t1, 0x80000000); 5984 tcg_gen_shr_tl(t1, t1, t0); 5985 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5986 tcg_gen_and_tl(t0, t0, t1); 5987 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5988 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5989 tcg_temp_free(t0); 5990 tcg_temp_free(t1); 5991 if (unlikely(Rc(ctx->opcode) != 0)) { 5992 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5993 } 5994 } 5995 5996 /* sle - sle. */ 5997 static void gen_sle(DisasContext *ctx) 5998 { 5999 TCGv t0 = tcg_temp_new(); 6000 TCGv t1 = tcg_temp_new(); 6001 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6002 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6003 tcg_gen_subfi_tl(t1, 32, t1); 6004 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6005 tcg_gen_or_tl(t1, t0, t1); 6006 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6007 gen_store_spr(SPR_MQ, t1); 6008 tcg_temp_free(t0); 6009 tcg_temp_free(t1); 6010 if (unlikely(Rc(ctx->opcode) != 0)) { 6011 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6012 } 6013 } 6014 6015 /* sleq - sleq. */ 6016 static void gen_sleq(DisasContext *ctx) 6017 { 6018 TCGv t0 = tcg_temp_new(); 6019 TCGv t1 = tcg_temp_new(); 6020 TCGv t2 = tcg_temp_new(); 6021 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6022 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 6023 tcg_gen_shl_tl(t2, t2, t0); 6024 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6025 gen_load_spr(t1, SPR_MQ); 6026 gen_store_spr(SPR_MQ, t0); 6027 tcg_gen_and_tl(t0, t0, t2); 6028 tcg_gen_andc_tl(t1, t1, t2); 6029 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6030 tcg_temp_free(t0); 6031 tcg_temp_free(t1); 6032 tcg_temp_free(t2); 6033 if (unlikely(Rc(ctx->opcode) != 0)) { 6034 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6035 } 6036 } 6037 6038 /* sliq - sliq. */ 6039 static void gen_sliq(DisasContext *ctx) 6040 { 6041 int sh = SH(ctx->opcode); 6042 TCGv t0 = tcg_temp_new(); 6043 TCGv t1 = tcg_temp_new(); 6044 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6045 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6046 tcg_gen_or_tl(t1, t0, t1); 6047 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6048 gen_store_spr(SPR_MQ, t1); 6049 tcg_temp_free(t0); 6050 tcg_temp_free(t1); 6051 if (unlikely(Rc(ctx->opcode) != 0)) { 6052 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6053 } 6054 } 6055 6056 /* slliq - slliq. */ 6057 static void gen_slliq(DisasContext *ctx) 6058 { 6059 int sh = SH(ctx->opcode); 6060 TCGv t0 = tcg_temp_new(); 6061 TCGv t1 = tcg_temp_new(); 6062 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6063 gen_load_spr(t1, SPR_MQ); 6064 gen_store_spr(SPR_MQ, t0); 6065 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 6066 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 6067 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6068 tcg_temp_free(t0); 6069 tcg_temp_free(t1); 6070 if (unlikely(Rc(ctx->opcode) != 0)) { 6071 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6072 } 6073 } 6074 6075 /* sllq - sllq. */ 6076 static void gen_sllq(DisasContext *ctx) 6077 { 6078 TCGLabel *l1 = gen_new_label(); 6079 TCGLabel *l2 = gen_new_label(); 6080 TCGv t0 = tcg_temp_local_new(); 6081 TCGv t1 = tcg_temp_local_new(); 6082 TCGv t2 = tcg_temp_local_new(); 6083 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6084 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6085 tcg_gen_shl_tl(t1, t1, t2); 6086 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6087 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6088 gen_load_spr(t0, SPR_MQ); 6089 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6090 tcg_gen_br(l2); 6091 gen_set_label(l1); 6092 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6093 gen_load_spr(t2, SPR_MQ); 6094 tcg_gen_andc_tl(t1, t2, t1); 6095 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6096 gen_set_label(l2); 6097 tcg_temp_free(t0); 6098 tcg_temp_free(t1); 6099 tcg_temp_free(t2); 6100 if (unlikely(Rc(ctx->opcode) != 0)) { 6101 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6102 } 6103 } 6104 6105 /* slq - slq. */ 6106 static void gen_slq(DisasContext *ctx) 6107 { 6108 TCGLabel *l1 = gen_new_label(); 6109 TCGv t0 = tcg_temp_new(); 6110 TCGv t1 = tcg_temp_new(); 6111 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6112 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6113 tcg_gen_subfi_tl(t1, 32, t1); 6114 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6115 tcg_gen_or_tl(t1, t0, t1); 6116 gen_store_spr(SPR_MQ, t1); 6117 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6118 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6119 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6120 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6121 gen_set_label(l1); 6122 tcg_temp_free(t0); 6123 tcg_temp_free(t1); 6124 if (unlikely(Rc(ctx->opcode) != 0)) { 6125 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6126 } 6127 } 6128 6129 /* sraiq - sraiq. */ 6130 static void gen_sraiq(DisasContext *ctx) 6131 { 6132 int sh = SH(ctx->opcode); 6133 TCGLabel *l1 = gen_new_label(); 6134 TCGv t0 = tcg_temp_new(); 6135 TCGv t1 = tcg_temp_new(); 6136 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6137 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6138 tcg_gen_or_tl(t0, t0, t1); 6139 gen_store_spr(SPR_MQ, t0); 6140 tcg_gen_movi_tl(cpu_ca, 0); 6141 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6142 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 6143 tcg_gen_movi_tl(cpu_ca, 1); 6144 gen_set_label(l1); 6145 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 6146 tcg_temp_free(t0); 6147 tcg_temp_free(t1); 6148 if (unlikely(Rc(ctx->opcode) != 0)) { 6149 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6150 } 6151 } 6152 6153 /* sraq - sraq. */ 6154 static void gen_sraq(DisasContext *ctx) 6155 { 6156 TCGLabel *l1 = gen_new_label(); 6157 TCGLabel *l2 = gen_new_label(); 6158 TCGv t0 = tcg_temp_new(); 6159 TCGv t1 = tcg_temp_local_new(); 6160 TCGv t2 = tcg_temp_local_new(); 6161 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6162 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6163 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 6164 tcg_gen_subfi_tl(t2, 32, t2); 6165 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 6166 tcg_gen_or_tl(t0, t0, t2); 6167 gen_store_spr(SPR_MQ, t0); 6168 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6169 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 6170 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 6171 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 6172 gen_set_label(l1); 6173 tcg_temp_free(t0); 6174 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 6175 tcg_gen_movi_tl(cpu_ca, 0); 6176 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 6177 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 6178 tcg_gen_movi_tl(cpu_ca, 1); 6179 gen_set_label(l2); 6180 tcg_temp_free(t1); 6181 tcg_temp_free(t2); 6182 if (unlikely(Rc(ctx->opcode) != 0)) { 6183 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6184 } 6185 } 6186 6187 /* sre - sre. */ 6188 static void gen_sre(DisasContext *ctx) 6189 { 6190 TCGv t0 = tcg_temp_new(); 6191 TCGv t1 = tcg_temp_new(); 6192 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6193 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6194 tcg_gen_subfi_tl(t1, 32, t1); 6195 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6196 tcg_gen_or_tl(t1, t0, t1); 6197 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6198 gen_store_spr(SPR_MQ, t1); 6199 tcg_temp_free(t0); 6200 tcg_temp_free(t1); 6201 if (unlikely(Rc(ctx->opcode) != 0)) { 6202 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6203 } 6204 } 6205 6206 /* srea - srea. */ 6207 static void gen_srea(DisasContext *ctx) 6208 { 6209 TCGv t0 = tcg_temp_new(); 6210 TCGv t1 = tcg_temp_new(); 6211 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6212 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6213 gen_store_spr(SPR_MQ, t0); 6214 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 6215 tcg_temp_free(t0); 6216 tcg_temp_free(t1); 6217 if (unlikely(Rc(ctx->opcode) != 0)) { 6218 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6219 } 6220 } 6221 6222 /* sreq */ 6223 static void gen_sreq(DisasContext *ctx) 6224 { 6225 TCGv t0 = tcg_temp_new(); 6226 TCGv t1 = tcg_temp_new(); 6227 TCGv t2 = tcg_temp_new(); 6228 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6229 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6230 tcg_gen_shr_tl(t1, t1, t0); 6231 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6232 gen_load_spr(t2, SPR_MQ); 6233 gen_store_spr(SPR_MQ, t0); 6234 tcg_gen_and_tl(t0, t0, t1); 6235 tcg_gen_andc_tl(t2, t2, t1); 6236 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6237 tcg_temp_free(t0); 6238 tcg_temp_free(t1); 6239 tcg_temp_free(t2); 6240 if (unlikely(Rc(ctx->opcode) != 0)) { 6241 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6242 } 6243 } 6244 6245 /* sriq */ 6246 static void gen_sriq(DisasContext *ctx) 6247 { 6248 int sh = SH(ctx->opcode); 6249 TCGv t0 = tcg_temp_new(); 6250 TCGv t1 = tcg_temp_new(); 6251 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6252 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6253 tcg_gen_or_tl(t1, t0, t1); 6254 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6255 gen_store_spr(SPR_MQ, t1); 6256 tcg_temp_free(t0); 6257 tcg_temp_free(t1); 6258 if (unlikely(Rc(ctx->opcode) != 0)) { 6259 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6260 } 6261 } 6262 6263 /* srliq */ 6264 static void gen_srliq(DisasContext *ctx) 6265 { 6266 int sh = SH(ctx->opcode); 6267 TCGv t0 = tcg_temp_new(); 6268 TCGv t1 = tcg_temp_new(); 6269 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6270 gen_load_spr(t1, SPR_MQ); 6271 gen_store_spr(SPR_MQ, t0); 6272 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 6273 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 6274 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6275 tcg_temp_free(t0); 6276 tcg_temp_free(t1); 6277 if (unlikely(Rc(ctx->opcode) != 0)) { 6278 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6279 } 6280 } 6281 6282 /* srlq */ 6283 static void gen_srlq(DisasContext *ctx) 6284 { 6285 TCGLabel *l1 = gen_new_label(); 6286 TCGLabel *l2 = gen_new_label(); 6287 TCGv t0 = tcg_temp_local_new(); 6288 TCGv t1 = tcg_temp_local_new(); 6289 TCGv t2 = tcg_temp_local_new(); 6290 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6291 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6292 tcg_gen_shr_tl(t2, t1, t2); 6293 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6294 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6295 gen_load_spr(t0, SPR_MQ); 6296 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6297 tcg_gen_br(l2); 6298 gen_set_label(l1); 6299 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6300 tcg_gen_and_tl(t0, t0, t2); 6301 gen_load_spr(t1, SPR_MQ); 6302 tcg_gen_andc_tl(t1, t1, t2); 6303 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6304 gen_set_label(l2); 6305 tcg_temp_free(t0); 6306 tcg_temp_free(t1); 6307 tcg_temp_free(t2); 6308 if (unlikely(Rc(ctx->opcode) != 0)) { 6309 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6310 } 6311 } 6312 6313 /* srq */ 6314 static void gen_srq(DisasContext *ctx) 6315 { 6316 TCGLabel *l1 = gen_new_label(); 6317 TCGv t0 = tcg_temp_new(); 6318 TCGv t1 = tcg_temp_new(); 6319 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6320 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6321 tcg_gen_subfi_tl(t1, 32, t1); 6322 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6323 tcg_gen_or_tl(t1, t0, t1); 6324 gen_store_spr(SPR_MQ, t1); 6325 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6326 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6327 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6328 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6329 gen_set_label(l1); 6330 tcg_temp_free(t0); 6331 tcg_temp_free(t1); 6332 if (unlikely(Rc(ctx->opcode) != 0)) { 6333 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6334 } 6335 } 6336 6337 /* PowerPC 602 specific instructions */ 6338 6339 /* dsa */ 6340 static void gen_dsa(DisasContext *ctx) 6341 { 6342 /* XXX: TODO */ 6343 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6344 } 6345 6346 /* esa */ 6347 static void gen_esa(DisasContext *ctx) 6348 { 6349 /* XXX: TODO */ 6350 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6351 } 6352 6353 /* mfrom */ 6354 static void gen_mfrom(DisasContext *ctx) 6355 { 6356 #if defined(CONFIG_USER_ONLY) 6357 GEN_PRIV; 6358 #else 6359 CHK_SV; 6360 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 6361 #endif /* defined(CONFIG_USER_ONLY) */ 6362 } 6363 6364 /* 602 - 603 - G2 TLB management */ 6365 6366 /* tlbld */ 6367 static void gen_tlbld_6xx(DisasContext *ctx) 6368 { 6369 #if defined(CONFIG_USER_ONLY) 6370 GEN_PRIV; 6371 #else 6372 CHK_SV; 6373 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6374 #endif /* defined(CONFIG_USER_ONLY) */ 6375 } 6376 6377 /* tlbli */ 6378 static void gen_tlbli_6xx(DisasContext *ctx) 6379 { 6380 #if defined(CONFIG_USER_ONLY) 6381 GEN_PRIV; 6382 #else 6383 CHK_SV; 6384 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6385 #endif /* defined(CONFIG_USER_ONLY) */ 6386 } 6387 6388 /* 74xx TLB management */ 6389 6390 /* tlbld */ 6391 static void gen_tlbld_74xx(DisasContext *ctx) 6392 { 6393 #if defined(CONFIG_USER_ONLY) 6394 GEN_PRIV; 6395 #else 6396 CHK_SV; 6397 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6398 #endif /* defined(CONFIG_USER_ONLY) */ 6399 } 6400 6401 /* tlbli */ 6402 static void gen_tlbli_74xx(DisasContext *ctx) 6403 { 6404 #if defined(CONFIG_USER_ONLY) 6405 GEN_PRIV; 6406 #else 6407 CHK_SV; 6408 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6409 #endif /* defined(CONFIG_USER_ONLY) */ 6410 } 6411 6412 /* POWER instructions not in PowerPC 601 */ 6413 6414 /* clf */ 6415 static void gen_clf(DisasContext *ctx) 6416 { 6417 /* Cache line flush: implemented as no-op */ 6418 } 6419 6420 /* cli */ 6421 static void gen_cli(DisasContext *ctx) 6422 { 6423 #if defined(CONFIG_USER_ONLY) 6424 GEN_PRIV; 6425 #else 6426 /* Cache line invalidate: privileged and treated as no-op */ 6427 CHK_SV; 6428 #endif /* defined(CONFIG_USER_ONLY) */ 6429 } 6430 6431 /* dclst */ 6432 static void gen_dclst(DisasContext *ctx) 6433 { 6434 /* Data cache line store: treated as no-op */ 6435 } 6436 6437 static void gen_mfsri(DisasContext *ctx) 6438 { 6439 #if defined(CONFIG_USER_ONLY) 6440 GEN_PRIV; 6441 #else 6442 int ra = rA(ctx->opcode); 6443 int rd = rD(ctx->opcode); 6444 TCGv t0; 6445 6446 CHK_SV; 6447 t0 = tcg_temp_new(); 6448 gen_addr_reg_index(ctx, t0); 6449 tcg_gen_extract_tl(t0, t0, 28, 4); 6450 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 6451 tcg_temp_free(t0); 6452 if (ra != 0 && ra != rd) { 6453 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 6454 } 6455 #endif /* defined(CONFIG_USER_ONLY) */ 6456 } 6457 6458 static void gen_rac(DisasContext *ctx) 6459 { 6460 #if defined(CONFIG_USER_ONLY) 6461 GEN_PRIV; 6462 #else 6463 TCGv t0; 6464 6465 CHK_SV; 6466 t0 = tcg_temp_new(); 6467 gen_addr_reg_index(ctx, t0); 6468 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6469 tcg_temp_free(t0); 6470 #endif /* defined(CONFIG_USER_ONLY) */ 6471 } 6472 6473 static void gen_rfsvc(DisasContext *ctx) 6474 { 6475 #if defined(CONFIG_USER_ONLY) 6476 GEN_PRIV; 6477 #else 6478 CHK_SV; 6479 6480 gen_helper_rfsvc(cpu_env); 6481 ctx->base.is_jmp = DISAS_EXIT; 6482 #endif /* defined(CONFIG_USER_ONLY) */ 6483 } 6484 6485 /* svc is not implemented for now */ 6486 6487 /* BookE specific instructions */ 6488 6489 /* XXX: not implemented on 440 ? */ 6490 static void gen_mfapidi(DisasContext *ctx) 6491 { 6492 /* XXX: TODO */ 6493 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6494 } 6495 6496 /* XXX: not implemented on 440 ? */ 6497 static void gen_tlbiva(DisasContext *ctx) 6498 { 6499 #if defined(CONFIG_USER_ONLY) 6500 GEN_PRIV; 6501 #else 6502 TCGv t0; 6503 6504 CHK_SV; 6505 t0 = tcg_temp_new(); 6506 gen_addr_reg_index(ctx, t0); 6507 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6508 tcg_temp_free(t0); 6509 #endif /* defined(CONFIG_USER_ONLY) */ 6510 } 6511 6512 /* All 405 MAC instructions are translated here */ 6513 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 6514 int ra, int rb, int rt, int Rc) 6515 { 6516 TCGv t0, t1; 6517 6518 t0 = tcg_temp_local_new(); 6519 t1 = tcg_temp_local_new(); 6520 6521 switch (opc3 & 0x0D) { 6522 case 0x05: 6523 /* macchw - macchw. - macchwo - macchwo. */ 6524 /* macchws - macchws. - macchwso - macchwso. */ 6525 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 6526 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 6527 /* mulchw - mulchw. */ 6528 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6529 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6530 tcg_gen_ext16s_tl(t1, t1); 6531 break; 6532 case 0x04: 6533 /* macchwu - macchwu. - macchwuo - macchwuo. */ 6534 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 6535 /* mulchwu - mulchwu. */ 6536 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6537 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6538 tcg_gen_ext16u_tl(t1, t1); 6539 break; 6540 case 0x01: 6541 /* machhw - machhw. - machhwo - machhwo. */ 6542 /* machhws - machhws. - machhwso - machhwso. */ 6543 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 6544 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 6545 /* mulhhw - mulhhw. */ 6546 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 6547 tcg_gen_ext16s_tl(t0, t0); 6548 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6549 tcg_gen_ext16s_tl(t1, t1); 6550 break; 6551 case 0x00: 6552 /* machhwu - machhwu. - machhwuo - machhwuo. */ 6553 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 6554 /* mulhhwu - mulhhwu. */ 6555 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 6556 tcg_gen_ext16u_tl(t0, t0); 6557 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6558 tcg_gen_ext16u_tl(t1, t1); 6559 break; 6560 case 0x0D: 6561 /* maclhw - maclhw. - maclhwo - maclhwo. */ 6562 /* maclhws - maclhws. - maclhwso - maclhwso. */ 6563 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 6564 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 6565 /* mullhw - mullhw. */ 6566 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6567 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 6568 break; 6569 case 0x0C: 6570 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 6571 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 6572 /* mullhwu - mullhwu. */ 6573 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6574 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 6575 break; 6576 } 6577 if (opc2 & 0x04) { 6578 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 6579 tcg_gen_mul_tl(t1, t0, t1); 6580 if (opc2 & 0x02) { 6581 /* nmultiply-and-accumulate (0x0E) */ 6582 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 6583 } else { 6584 /* multiply-and-accumulate (0x0C) */ 6585 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 6586 } 6587 6588 if (opc3 & 0x12) { 6589 /* Check overflow and/or saturate */ 6590 TCGLabel *l1 = gen_new_label(); 6591 6592 if (opc3 & 0x10) { 6593 /* Start with XER OV disabled, the most likely case */ 6594 tcg_gen_movi_tl(cpu_ov, 0); 6595 } 6596 if (opc3 & 0x01) { 6597 /* Signed */ 6598 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 6599 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 6600 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 6601 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 6602 if (opc3 & 0x02) { 6603 /* Saturate */ 6604 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 6605 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 6606 } 6607 } else { 6608 /* Unsigned */ 6609 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 6610 if (opc3 & 0x02) { 6611 /* Saturate */ 6612 tcg_gen_movi_tl(t0, UINT32_MAX); 6613 } 6614 } 6615 if (opc3 & 0x10) { 6616 /* Check overflow */ 6617 tcg_gen_movi_tl(cpu_ov, 1); 6618 tcg_gen_movi_tl(cpu_so, 1); 6619 } 6620 gen_set_label(l1); 6621 tcg_gen_mov_tl(cpu_gpr[rt], t0); 6622 } 6623 } else { 6624 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 6625 } 6626 tcg_temp_free(t0); 6627 tcg_temp_free(t1); 6628 if (unlikely(Rc) != 0) { 6629 /* Update Rc0 */ 6630 gen_set_Rc0(ctx, cpu_gpr[rt]); 6631 } 6632 } 6633 6634 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 6635 static void glue(gen_, name)(DisasContext *ctx) \ 6636 { \ 6637 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 6638 rD(ctx->opcode), Rc(ctx->opcode)); \ 6639 } 6640 6641 /* macchw - macchw. */ 6642 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 6643 /* macchwo - macchwo. */ 6644 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 6645 /* macchws - macchws. */ 6646 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 6647 /* macchwso - macchwso. */ 6648 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 6649 /* macchwsu - macchwsu. */ 6650 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 6651 /* macchwsuo - macchwsuo. */ 6652 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 6653 /* macchwu - macchwu. */ 6654 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 6655 /* macchwuo - macchwuo. */ 6656 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 6657 /* machhw - machhw. */ 6658 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 6659 /* machhwo - machhwo. */ 6660 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 6661 /* machhws - machhws. */ 6662 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 6663 /* machhwso - machhwso. */ 6664 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 6665 /* machhwsu - machhwsu. */ 6666 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 6667 /* machhwsuo - machhwsuo. */ 6668 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 6669 /* machhwu - machhwu. */ 6670 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 6671 /* machhwuo - machhwuo. */ 6672 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 6673 /* maclhw - maclhw. */ 6674 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 6675 /* maclhwo - maclhwo. */ 6676 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 6677 /* maclhws - maclhws. */ 6678 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 6679 /* maclhwso - maclhwso. */ 6680 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 6681 /* maclhwu - maclhwu. */ 6682 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 6683 /* maclhwuo - maclhwuo. */ 6684 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 6685 /* maclhwsu - maclhwsu. */ 6686 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 6687 /* maclhwsuo - maclhwsuo. */ 6688 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 6689 /* nmacchw - nmacchw. */ 6690 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 6691 /* nmacchwo - nmacchwo. */ 6692 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 6693 /* nmacchws - nmacchws. */ 6694 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 6695 /* nmacchwso - nmacchwso. */ 6696 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 6697 /* nmachhw - nmachhw. */ 6698 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 6699 /* nmachhwo - nmachhwo. */ 6700 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 6701 /* nmachhws - nmachhws. */ 6702 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 6703 /* nmachhwso - nmachhwso. */ 6704 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 6705 /* nmaclhw - nmaclhw. */ 6706 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 6707 /* nmaclhwo - nmaclhwo. */ 6708 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 6709 /* nmaclhws - nmaclhws. */ 6710 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 6711 /* nmaclhwso - nmaclhwso. */ 6712 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 6713 6714 /* mulchw - mulchw. */ 6715 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 6716 /* mulchwu - mulchwu. */ 6717 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 6718 /* mulhhw - mulhhw. */ 6719 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 6720 /* mulhhwu - mulhhwu. */ 6721 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 6722 /* mullhw - mullhw. */ 6723 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 6724 /* mullhwu - mullhwu. */ 6725 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 6726 6727 /* mfdcr */ 6728 static void gen_mfdcr(DisasContext *ctx) 6729 { 6730 #if defined(CONFIG_USER_ONLY) 6731 GEN_PRIV; 6732 #else 6733 TCGv dcrn; 6734 6735 CHK_SV; 6736 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6737 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6738 tcg_temp_free(dcrn); 6739 #endif /* defined(CONFIG_USER_ONLY) */ 6740 } 6741 6742 /* mtdcr */ 6743 static void gen_mtdcr(DisasContext *ctx) 6744 { 6745 #if defined(CONFIG_USER_ONLY) 6746 GEN_PRIV; 6747 #else 6748 TCGv dcrn; 6749 6750 CHK_SV; 6751 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6752 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6753 tcg_temp_free(dcrn); 6754 #endif /* defined(CONFIG_USER_ONLY) */ 6755 } 6756 6757 /* mfdcrx */ 6758 /* XXX: not implemented on 440 ? */ 6759 static void gen_mfdcrx(DisasContext *ctx) 6760 { 6761 #if defined(CONFIG_USER_ONLY) 6762 GEN_PRIV; 6763 #else 6764 CHK_SV; 6765 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6766 cpu_gpr[rA(ctx->opcode)]); 6767 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6768 #endif /* defined(CONFIG_USER_ONLY) */ 6769 } 6770 6771 /* mtdcrx */ 6772 /* XXX: not implemented on 440 ? */ 6773 static void gen_mtdcrx(DisasContext *ctx) 6774 { 6775 #if defined(CONFIG_USER_ONLY) 6776 GEN_PRIV; 6777 #else 6778 CHK_SV; 6779 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6780 cpu_gpr[rS(ctx->opcode)]); 6781 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6782 #endif /* defined(CONFIG_USER_ONLY) */ 6783 } 6784 6785 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6786 static void gen_mfdcrux(DisasContext *ctx) 6787 { 6788 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6789 cpu_gpr[rA(ctx->opcode)]); 6790 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6791 } 6792 6793 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6794 static void gen_mtdcrux(DisasContext *ctx) 6795 { 6796 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6797 cpu_gpr[rS(ctx->opcode)]); 6798 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6799 } 6800 6801 /* dccci */ 6802 static void gen_dccci(DisasContext *ctx) 6803 { 6804 CHK_SV; 6805 /* interpreted as no-op */ 6806 } 6807 6808 /* dcread */ 6809 static void gen_dcread(DisasContext *ctx) 6810 { 6811 #if defined(CONFIG_USER_ONLY) 6812 GEN_PRIV; 6813 #else 6814 TCGv EA, val; 6815 6816 CHK_SV; 6817 gen_set_access_type(ctx, ACCESS_CACHE); 6818 EA = tcg_temp_new(); 6819 gen_addr_reg_index(ctx, EA); 6820 val = tcg_temp_new(); 6821 gen_qemu_ld32u(ctx, val, EA); 6822 tcg_temp_free(val); 6823 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6824 tcg_temp_free(EA); 6825 #endif /* defined(CONFIG_USER_ONLY) */ 6826 } 6827 6828 /* icbt */ 6829 static void gen_icbt_40x(DisasContext *ctx) 6830 { 6831 /* 6832 * interpreted as no-op 6833 * XXX: specification say this is treated as a load by the MMU but 6834 * does not generate any exception 6835 */ 6836 } 6837 6838 /* iccci */ 6839 static void gen_iccci(DisasContext *ctx) 6840 { 6841 CHK_SV; 6842 /* interpreted as no-op */ 6843 } 6844 6845 /* icread */ 6846 static void gen_icread(DisasContext *ctx) 6847 { 6848 CHK_SV; 6849 /* interpreted as no-op */ 6850 } 6851 6852 /* rfci (supervisor only) */ 6853 static void gen_rfci_40x(DisasContext *ctx) 6854 { 6855 #if defined(CONFIG_USER_ONLY) 6856 GEN_PRIV; 6857 #else 6858 CHK_SV; 6859 /* Restore CPU state */ 6860 gen_helper_40x_rfci(cpu_env); 6861 ctx->base.is_jmp = DISAS_EXIT; 6862 #endif /* defined(CONFIG_USER_ONLY) */ 6863 } 6864 6865 static void gen_rfci(DisasContext *ctx) 6866 { 6867 #if defined(CONFIG_USER_ONLY) 6868 GEN_PRIV; 6869 #else 6870 CHK_SV; 6871 /* Restore CPU state */ 6872 gen_helper_rfci(cpu_env); 6873 ctx->base.is_jmp = DISAS_EXIT; 6874 #endif /* defined(CONFIG_USER_ONLY) */ 6875 } 6876 6877 /* BookE specific */ 6878 6879 /* XXX: not implemented on 440 ? */ 6880 static void gen_rfdi(DisasContext *ctx) 6881 { 6882 #if defined(CONFIG_USER_ONLY) 6883 GEN_PRIV; 6884 #else 6885 CHK_SV; 6886 /* Restore CPU state */ 6887 gen_helper_rfdi(cpu_env); 6888 ctx->base.is_jmp = DISAS_EXIT; 6889 #endif /* defined(CONFIG_USER_ONLY) */ 6890 } 6891 6892 /* XXX: not implemented on 440 ? */ 6893 static void gen_rfmci(DisasContext *ctx) 6894 { 6895 #if defined(CONFIG_USER_ONLY) 6896 GEN_PRIV; 6897 #else 6898 CHK_SV; 6899 /* Restore CPU state */ 6900 gen_helper_rfmci(cpu_env); 6901 ctx->base.is_jmp = DISAS_EXIT; 6902 #endif /* defined(CONFIG_USER_ONLY) */ 6903 } 6904 6905 /* TLB management - PowerPC 405 implementation */ 6906 6907 /* tlbre */ 6908 static void gen_tlbre_40x(DisasContext *ctx) 6909 { 6910 #if defined(CONFIG_USER_ONLY) 6911 GEN_PRIV; 6912 #else 6913 CHK_SV; 6914 switch (rB(ctx->opcode)) { 6915 case 0: 6916 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6917 cpu_gpr[rA(ctx->opcode)]); 6918 break; 6919 case 1: 6920 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6921 cpu_gpr[rA(ctx->opcode)]); 6922 break; 6923 default: 6924 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6925 break; 6926 } 6927 #endif /* defined(CONFIG_USER_ONLY) */ 6928 } 6929 6930 /* tlbsx - tlbsx. */ 6931 static void gen_tlbsx_40x(DisasContext *ctx) 6932 { 6933 #if defined(CONFIG_USER_ONLY) 6934 GEN_PRIV; 6935 #else 6936 TCGv t0; 6937 6938 CHK_SV; 6939 t0 = tcg_temp_new(); 6940 gen_addr_reg_index(ctx, t0); 6941 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6942 tcg_temp_free(t0); 6943 if (Rc(ctx->opcode)) { 6944 TCGLabel *l1 = gen_new_label(); 6945 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6946 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6947 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6948 gen_set_label(l1); 6949 } 6950 #endif /* defined(CONFIG_USER_ONLY) */ 6951 } 6952 6953 /* tlbwe */ 6954 static void gen_tlbwe_40x(DisasContext *ctx) 6955 { 6956 #if defined(CONFIG_USER_ONLY) 6957 GEN_PRIV; 6958 #else 6959 CHK_SV; 6960 6961 switch (rB(ctx->opcode)) { 6962 case 0: 6963 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6964 cpu_gpr[rS(ctx->opcode)]); 6965 break; 6966 case 1: 6967 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6968 cpu_gpr[rS(ctx->opcode)]); 6969 break; 6970 default: 6971 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6972 break; 6973 } 6974 #endif /* defined(CONFIG_USER_ONLY) */ 6975 } 6976 6977 /* TLB management - PowerPC 440 implementation */ 6978 6979 /* tlbre */ 6980 static void gen_tlbre_440(DisasContext *ctx) 6981 { 6982 #if defined(CONFIG_USER_ONLY) 6983 GEN_PRIV; 6984 #else 6985 CHK_SV; 6986 6987 switch (rB(ctx->opcode)) { 6988 case 0: 6989 case 1: 6990 case 2: 6991 { 6992 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6993 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6994 t0, cpu_gpr[rA(ctx->opcode)]); 6995 tcg_temp_free_i32(t0); 6996 } 6997 break; 6998 default: 6999 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7000 break; 7001 } 7002 #endif /* defined(CONFIG_USER_ONLY) */ 7003 } 7004 7005 /* tlbsx - tlbsx. */ 7006 static void gen_tlbsx_440(DisasContext *ctx) 7007 { 7008 #if defined(CONFIG_USER_ONLY) 7009 GEN_PRIV; 7010 #else 7011 TCGv t0; 7012 7013 CHK_SV; 7014 t0 = tcg_temp_new(); 7015 gen_addr_reg_index(ctx, t0); 7016 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 7017 tcg_temp_free(t0); 7018 if (Rc(ctx->opcode)) { 7019 TCGLabel *l1 = gen_new_label(); 7020 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 7021 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 7022 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 7023 gen_set_label(l1); 7024 } 7025 #endif /* defined(CONFIG_USER_ONLY) */ 7026 } 7027 7028 /* tlbwe */ 7029 static void gen_tlbwe_440(DisasContext *ctx) 7030 { 7031 #if defined(CONFIG_USER_ONLY) 7032 GEN_PRIV; 7033 #else 7034 CHK_SV; 7035 switch (rB(ctx->opcode)) { 7036 case 0: 7037 case 1: 7038 case 2: 7039 { 7040 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 7041 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 7042 cpu_gpr[rS(ctx->opcode)]); 7043 tcg_temp_free_i32(t0); 7044 } 7045 break; 7046 default: 7047 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7048 break; 7049 } 7050 #endif /* defined(CONFIG_USER_ONLY) */ 7051 } 7052 7053 /* TLB management - PowerPC BookE 2.06 implementation */ 7054 7055 /* tlbre */ 7056 static void gen_tlbre_booke206(DisasContext *ctx) 7057 { 7058 #if defined(CONFIG_USER_ONLY) 7059 GEN_PRIV; 7060 #else 7061 CHK_SV; 7062 gen_helper_booke206_tlbre(cpu_env); 7063 #endif /* defined(CONFIG_USER_ONLY) */ 7064 } 7065 7066 /* tlbsx - tlbsx. */ 7067 static void gen_tlbsx_booke206(DisasContext *ctx) 7068 { 7069 #if defined(CONFIG_USER_ONLY) 7070 GEN_PRIV; 7071 #else 7072 TCGv t0; 7073 7074 CHK_SV; 7075 if (rA(ctx->opcode)) { 7076 t0 = tcg_temp_new(); 7077 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 7078 } else { 7079 t0 = tcg_const_tl(0); 7080 } 7081 7082 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 7083 gen_helper_booke206_tlbsx(cpu_env, t0); 7084 tcg_temp_free(t0); 7085 #endif /* defined(CONFIG_USER_ONLY) */ 7086 } 7087 7088 /* tlbwe */ 7089 static void gen_tlbwe_booke206(DisasContext *ctx) 7090 { 7091 #if defined(CONFIG_USER_ONLY) 7092 GEN_PRIV; 7093 #else 7094 CHK_SV; 7095 gen_helper_booke206_tlbwe(cpu_env); 7096 #endif /* defined(CONFIG_USER_ONLY) */ 7097 } 7098 7099 static void gen_tlbivax_booke206(DisasContext *ctx) 7100 { 7101 #if defined(CONFIG_USER_ONLY) 7102 GEN_PRIV; 7103 #else 7104 TCGv t0; 7105 7106 CHK_SV; 7107 t0 = tcg_temp_new(); 7108 gen_addr_reg_index(ctx, t0); 7109 gen_helper_booke206_tlbivax(cpu_env, t0); 7110 tcg_temp_free(t0); 7111 #endif /* defined(CONFIG_USER_ONLY) */ 7112 } 7113 7114 static void gen_tlbilx_booke206(DisasContext *ctx) 7115 { 7116 #if defined(CONFIG_USER_ONLY) 7117 GEN_PRIV; 7118 #else 7119 TCGv t0; 7120 7121 CHK_SV; 7122 t0 = tcg_temp_new(); 7123 gen_addr_reg_index(ctx, t0); 7124 7125 switch ((ctx->opcode >> 21) & 0x3) { 7126 case 0: 7127 gen_helper_booke206_tlbilx0(cpu_env, t0); 7128 break; 7129 case 1: 7130 gen_helper_booke206_tlbilx1(cpu_env, t0); 7131 break; 7132 case 3: 7133 gen_helper_booke206_tlbilx3(cpu_env, t0); 7134 break; 7135 default: 7136 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7137 break; 7138 } 7139 7140 tcg_temp_free(t0); 7141 #endif /* defined(CONFIG_USER_ONLY) */ 7142 } 7143 7144 7145 /* wrtee */ 7146 static void gen_wrtee(DisasContext *ctx) 7147 { 7148 #if defined(CONFIG_USER_ONLY) 7149 GEN_PRIV; 7150 #else 7151 TCGv t0; 7152 7153 CHK_SV; 7154 t0 = tcg_temp_new(); 7155 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 7156 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7157 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 7158 tcg_temp_free(t0); 7159 /* 7160 * Stop translation to have a chance to raise an exception if we 7161 * just set msr_ee to 1 7162 */ 7163 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7164 #endif /* defined(CONFIG_USER_ONLY) */ 7165 } 7166 7167 /* wrteei */ 7168 static void gen_wrteei(DisasContext *ctx) 7169 { 7170 #if defined(CONFIG_USER_ONLY) 7171 GEN_PRIV; 7172 #else 7173 CHK_SV; 7174 if (ctx->opcode & 0x00008000) { 7175 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 7176 /* Stop translation to have a chance to raise an exception */ 7177 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7178 } else { 7179 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7180 } 7181 #endif /* defined(CONFIG_USER_ONLY) */ 7182 } 7183 7184 /* PowerPC 440 specific instructions */ 7185 7186 /* dlmzb */ 7187 static void gen_dlmzb(DisasContext *ctx) 7188 { 7189 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 7190 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 7191 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 7192 tcg_temp_free_i32(t0); 7193 } 7194 7195 /* mbar replaces eieio on 440 */ 7196 static void gen_mbar(DisasContext *ctx) 7197 { 7198 /* interpreted as no-op */ 7199 } 7200 7201 /* msync replaces sync on 440 */ 7202 static void gen_msync_4xx(DisasContext *ctx) 7203 { 7204 /* Only e500 seems to treat reserved bits as invalid */ 7205 if ((ctx->insns_flags2 & PPC2_BOOKE206) && 7206 (ctx->opcode & 0x03FFF801)) { 7207 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7208 } 7209 /* otherwise interpreted as no-op */ 7210 } 7211 7212 /* icbt */ 7213 static void gen_icbt_440(DisasContext *ctx) 7214 { 7215 /* 7216 * interpreted as no-op 7217 * XXX: specification say this is treated as a load by the MMU but 7218 * does not generate any exception 7219 */ 7220 } 7221 7222 /* Embedded.Processor Control */ 7223 7224 static void gen_msgclr(DisasContext *ctx) 7225 { 7226 #if defined(CONFIG_USER_ONLY) 7227 GEN_PRIV; 7228 #else 7229 CHK_HV; 7230 if (is_book3s_arch2x(ctx)) { 7231 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7232 } else { 7233 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7234 } 7235 #endif /* defined(CONFIG_USER_ONLY) */ 7236 } 7237 7238 static void gen_msgsnd(DisasContext *ctx) 7239 { 7240 #if defined(CONFIG_USER_ONLY) 7241 GEN_PRIV; 7242 #else 7243 CHK_HV; 7244 if (is_book3s_arch2x(ctx)) { 7245 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7246 } else { 7247 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7248 } 7249 #endif /* defined(CONFIG_USER_ONLY) */ 7250 } 7251 7252 #if defined(TARGET_PPC64) 7253 static void gen_msgclrp(DisasContext *ctx) 7254 { 7255 #if defined(CONFIG_USER_ONLY) 7256 GEN_PRIV; 7257 #else 7258 CHK_SV; 7259 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7260 #endif /* defined(CONFIG_USER_ONLY) */ 7261 } 7262 7263 static void gen_msgsndp(DisasContext *ctx) 7264 { 7265 #if defined(CONFIG_USER_ONLY) 7266 GEN_PRIV; 7267 #else 7268 CHK_SV; 7269 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7270 #endif /* defined(CONFIG_USER_ONLY) */ 7271 } 7272 #endif 7273 7274 static void gen_msgsync(DisasContext *ctx) 7275 { 7276 #if defined(CONFIG_USER_ONLY) 7277 GEN_PRIV; 7278 #else 7279 CHK_HV; 7280 #endif /* defined(CONFIG_USER_ONLY) */ 7281 /* interpreted as no-op */ 7282 } 7283 7284 #if defined(TARGET_PPC64) 7285 static void gen_maddld(DisasContext *ctx) 7286 { 7287 TCGv_i64 t1 = tcg_temp_new_i64(); 7288 7289 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 7290 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 7291 tcg_temp_free_i64(t1); 7292 } 7293 7294 /* maddhd maddhdu */ 7295 static void gen_maddhd_maddhdu(DisasContext *ctx) 7296 { 7297 TCGv_i64 lo = tcg_temp_new_i64(); 7298 TCGv_i64 hi = tcg_temp_new_i64(); 7299 TCGv_i64 t1 = tcg_temp_new_i64(); 7300 7301 if (Rc(ctx->opcode)) { 7302 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7303 cpu_gpr[rB(ctx->opcode)]); 7304 tcg_gen_movi_i64(t1, 0); 7305 } else { 7306 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7307 cpu_gpr[rB(ctx->opcode)]); 7308 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 7309 } 7310 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 7311 cpu_gpr[rC(ctx->opcode)], t1); 7312 tcg_temp_free_i64(lo); 7313 tcg_temp_free_i64(hi); 7314 tcg_temp_free_i64(t1); 7315 } 7316 #endif /* defined(TARGET_PPC64) */ 7317 7318 static void gen_tbegin(DisasContext *ctx) 7319 { 7320 if (unlikely(!ctx->tm_enabled)) { 7321 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7322 return; 7323 } 7324 gen_helper_tbegin(cpu_env); 7325 } 7326 7327 #define GEN_TM_NOOP(name) \ 7328 static inline void gen_##name(DisasContext *ctx) \ 7329 { \ 7330 if (unlikely(!ctx->tm_enabled)) { \ 7331 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7332 return; \ 7333 } \ 7334 /* \ 7335 * Because tbegin always fails in QEMU, these user \ 7336 * space instructions all have a simple implementation: \ 7337 * \ 7338 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7339 * = 0b0 || 0b00 || 0b0 \ 7340 */ \ 7341 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7342 } 7343 7344 GEN_TM_NOOP(tend); 7345 GEN_TM_NOOP(tabort); 7346 GEN_TM_NOOP(tabortwc); 7347 GEN_TM_NOOP(tabortwci); 7348 GEN_TM_NOOP(tabortdc); 7349 GEN_TM_NOOP(tabortdci); 7350 GEN_TM_NOOP(tsr); 7351 7352 static inline void gen_cp_abort(DisasContext *ctx) 7353 { 7354 /* Do Nothing */ 7355 } 7356 7357 #define GEN_CP_PASTE_NOOP(name) \ 7358 static inline void gen_##name(DisasContext *ctx) \ 7359 { \ 7360 /* \ 7361 * Generate invalid exception until we have an \ 7362 * implementation of the copy paste facility \ 7363 */ \ 7364 gen_invalid(ctx); \ 7365 } 7366 7367 GEN_CP_PASTE_NOOP(copy) 7368 GEN_CP_PASTE_NOOP(paste) 7369 7370 static void gen_tcheck(DisasContext *ctx) 7371 { 7372 if (unlikely(!ctx->tm_enabled)) { 7373 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7374 return; 7375 } 7376 /* 7377 * Because tbegin always fails, the tcheck implementation is 7378 * simple: 7379 * 7380 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 7381 * = 0b1 || 0b00 || 0b0 7382 */ 7383 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 7384 } 7385 7386 #if defined(CONFIG_USER_ONLY) 7387 #define GEN_TM_PRIV_NOOP(name) \ 7388 static inline void gen_##name(DisasContext *ctx) \ 7389 { \ 7390 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 7391 } 7392 7393 #else 7394 7395 #define GEN_TM_PRIV_NOOP(name) \ 7396 static inline void gen_##name(DisasContext *ctx) \ 7397 { \ 7398 CHK_SV; \ 7399 if (unlikely(!ctx->tm_enabled)) { \ 7400 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7401 return; \ 7402 } \ 7403 /* \ 7404 * Because tbegin always fails, the implementation is \ 7405 * simple: \ 7406 * \ 7407 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7408 * = 0b0 || 0b00 | 0b0 \ 7409 */ \ 7410 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7411 } 7412 7413 #endif 7414 7415 GEN_TM_PRIV_NOOP(treclaim); 7416 GEN_TM_PRIV_NOOP(trechkpt); 7417 7418 static inline void get_fpr(TCGv_i64 dst, int regno) 7419 { 7420 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno)); 7421 } 7422 7423 static inline void set_fpr(int regno, TCGv_i64 src) 7424 { 7425 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno)); 7426 } 7427 7428 static inline void get_avr64(TCGv_i64 dst, int regno, bool high) 7429 { 7430 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high)); 7431 } 7432 7433 static inline void set_avr64(int regno, TCGv_i64 src, bool high) 7434 { 7435 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high)); 7436 } 7437 7438 /* 7439 * Helpers for decodetree used by !function for decoding arguments. 7440 */ 7441 static int times_4(DisasContext *ctx, int x) 7442 { 7443 return x * 4; 7444 } 7445 7446 /* 7447 * Helpers for trans_* functions to check for specific insns flags. 7448 * Use token pasting to ensure that we use the proper flag with the 7449 * proper variable. 7450 */ 7451 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \ 7452 do { \ 7453 if (((CTX)->insns_flags & PPC_##NAME) == 0) { \ 7454 return false; \ 7455 } \ 7456 } while (0) 7457 7458 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \ 7459 do { \ 7460 if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \ 7461 return false; \ 7462 } \ 7463 } while (0) 7464 7465 /* Then special-case the check for 64-bit so that we elide code for ppc32. */ 7466 #if TARGET_LONG_BITS == 32 7467 # define REQUIRE_64BIT(CTX) return false 7468 #else 7469 # define REQUIRE_64BIT(CTX) REQUIRE_INSNS_FLAGS(CTX, 64B) 7470 #endif 7471 7472 /* 7473 * Helpers for implementing sets of trans_* functions. 7474 * Defer the implementation of NAME to FUNC, with optional extra arguments. 7475 */ 7476 #define TRANS(NAME, FUNC, ...) \ 7477 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7478 { return FUNC(ctx, a, __VA_ARGS__); } 7479 7480 #define TRANS64(NAME, FUNC, ...) \ 7481 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7482 { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); } 7483 7484 /* TODO: More TRANS* helpers for extra insn_flags checks. */ 7485 7486 7487 #include "decode-insn32.c.inc" 7488 #include "decode-insn64.c.inc" 7489 #include "power8-pmu-regs.c.inc" 7490 7491 #include "translate/fixedpoint-impl.c.inc" 7492 7493 #include "translate/fp-impl.c.inc" 7494 7495 #include "translate/vmx-impl.c.inc" 7496 7497 #include "translate/vsx-impl.c.inc" 7498 #include "translate/vector-impl.c.inc" 7499 7500 #include "translate/dfp-impl.c.inc" 7501 7502 #include "translate/spe-impl.c.inc" 7503 7504 /* Handles lfdp, lxsd, lxssp */ 7505 static void gen_dform39(DisasContext *ctx) 7506 { 7507 switch (ctx->opcode & 0x3) { 7508 case 0: /* lfdp */ 7509 if (ctx->insns_flags2 & PPC2_ISA205) { 7510 return gen_lfdp(ctx); 7511 } 7512 break; 7513 case 2: /* lxsd */ 7514 if (ctx->insns_flags2 & PPC2_ISA300) { 7515 return gen_lxsd(ctx); 7516 } 7517 break; 7518 case 3: /* lxssp */ 7519 if (ctx->insns_flags2 & PPC2_ISA300) { 7520 return gen_lxssp(ctx); 7521 } 7522 break; 7523 } 7524 return gen_invalid(ctx); 7525 } 7526 7527 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 7528 static void gen_dform3D(DisasContext *ctx) 7529 { 7530 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 7531 switch (ctx->opcode & 0x7) { 7532 case 1: /* lxv */ 7533 if (ctx->insns_flags2 & PPC2_ISA300) { 7534 return gen_lxv(ctx); 7535 } 7536 break; 7537 case 5: /* stxv */ 7538 if (ctx->insns_flags2 & PPC2_ISA300) { 7539 return gen_stxv(ctx); 7540 } 7541 break; 7542 } 7543 } else { /* DS-FORM */ 7544 switch (ctx->opcode & 0x3) { 7545 case 0: /* stfdp */ 7546 if (ctx->insns_flags2 & PPC2_ISA205) { 7547 return gen_stfdp(ctx); 7548 } 7549 break; 7550 case 2: /* stxsd */ 7551 if (ctx->insns_flags2 & PPC2_ISA300) { 7552 return gen_stxsd(ctx); 7553 } 7554 break; 7555 case 3: /* stxssp */ 7556 if (ctx->insns_flags2 & PPC2_ISA300) { 7557 return gen_stxssp(ctx); 7558 } 7559 break; 7560 } 7561 } 7562 return gen_invalid(ctx); 7563 } 7564 7565 #if defined(TARGET_PPC64) 7566 /* brd */ 7567 static void gen_brd(DisasContext *ctx) 7568 { 7569 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7570 } 7571 7572 /* brw */ 7573 static void gen_brw(DisasContext *ctx) 7574 { 7575 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7576 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32); 7577 7578 } 7579 7580 /* brh */ 7581 static void gen_brh(DisasContext *ctx) 7582 { 7583 TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull); 7584 TCGv_i64 t1 = tcg_temp_new_i64(); 7585 TCGv_i64 t2 = tcg_temp_new_i64(); 7586 7587 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8); 7588 tcg_gen_and_i64(t2, t1, mask); 7589 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask); 7590 tcg_gen_shli_i64(t1, t1, 8); 7591 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2); 7592 7593 tcg_temp_free_i64(t1); 7594 tcg_temp_free_i64(t2); 7595 } 7596 #endif 7597 7598 static opcode_t opcodes[] = { 7599 #if defined(TARGET_PPC64) 7600 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310), 7601 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310), 7602 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310), 7603 #endif 7604 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 7605 #if defined(TARGET_PPC64) 7606 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 7607 #endif 7608 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 7609 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 7610 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 7611 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7612 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7613 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 7614 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 7615 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 7616 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 7617 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7618 #if defined(TARGET_PPC64) 7619 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 7620 #endif 7621 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 7622 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 7623 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7624 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7625 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7626 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 7627 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 7628 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 7629 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7630 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 7631 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 7632 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 7633 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7634 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7635 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7636 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7637 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 7638 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 7639 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 7640 #if defined(TARGET_PPC64) 7641 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 7642 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 7643 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 7644 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 7645 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 7646 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 7647 #endif 7648 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7649 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7650 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7651 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 7652 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 7653 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 7654 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 7655 #if defined(TARGET_PPC64) 7656 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 7657 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 7658 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 7659 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 7660 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 7661 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 7662 PPC_NONE, PPC2_ISA300), 7663 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 7664 PPC_NONE, PPC2_ISA300), 7665 #endif 7666 #if defined(TARGET_PPC64) 7667 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 7668 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 7669 #endif 7670 /* handles lfdp, lxsd, lxssp */ 7671 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7672 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 7673 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7674 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7675 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7676 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 7677 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 7678 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 7679 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 7680 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 7681 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 7682 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7683 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7684 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 7685 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 7686 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 7687 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7688 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7689 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 7690 #if defined(TARGET_PPC64) 7691 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 7692 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 7693 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 7694 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 7695 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 7696 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 7697 #endif 7698 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 7699 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 7700 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 7701 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7702 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7703 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 7704 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 7705 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 7706 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 7707 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 7708 #if defined(TARGET_PPC64) 7709 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 7710 #if !defined(CONFIG_USER_ONLY) 7711 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7712 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7713 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7714 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300), 7715 #endif 7716 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7717 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7718 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7719 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7720 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7721 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 7722 #endif 7723 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7724 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW), 7725 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW), 7726 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 7727 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7728 #if defined(TARGET_PPC64) 7729 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 7730 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 7731 #endif 7732 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 7733 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 7734 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 7735 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 7736 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 7737 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 7738 #if defined(TARGET_PPC64) 7739 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 7740 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 7741 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 7742 #endif 7743 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 7744 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 7745 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 7746 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7747 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 7748 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 7749 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7750 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 7751 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7752 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 7753 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7754 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 7755 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 7756 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7757 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 7758 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 7759 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 7760 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 7761 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7762 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 7763 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 7764 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 7765 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 7766 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 7767 #if defined(TARGET_PPC64) 7768 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 7769 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 7770 PPC_SEGMENT_64B), 7771 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 7772 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 7773 PPC_SEGMENT_64B), 7774 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 7775 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 7776 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 7777 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 7778 #endif 7779 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 7780 /* 7781 * XXX Those instructions will need to be handled differently for 7782 * different ISA versions 7783 */ 7784 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 7785 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 7786 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 7787 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 7788 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 7789 #if defined(TARGET_PPC64) 7790 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 7791 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 7792 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 7793 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7794 #endif 7795 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 7796 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 7797 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 7798 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 7799 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 7800 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 7801 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 7802 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 7803 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 7804 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 7805 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 7806 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7807 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 7808 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 7809 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 7810 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 7811 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 7812 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 7813 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 7814 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7815 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 7816 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 7817 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 7818 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 7819 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 7820 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 7821 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 7822 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 7823 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 7824 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 7825 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 7826 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 7827 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 7828 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 7829 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 7830 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 7831 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 7832 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 7833 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 7834 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 7835 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 7836 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 7837 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 7838 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 7839 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7840 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7841 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7842 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7843 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7844 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7845 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7846 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7847 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7848 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7849 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7850 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7851 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7852 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7853 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7854 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7855 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7856 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7857 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7858 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7859 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7860 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7861 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7862 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7863 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7864 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7865 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7866 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7867 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7868 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7869 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7870 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7871 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7872 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7873 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7874 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7875 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7876 PPC_NONE, PPC2_BOOKE206), 7877 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7878 PPC_NONE, PPC2_BOOKE206), 7879 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7880 PPC_NONE, PPC2_BOOKE206), 7881 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7882 PPC_NONE, PPC2_BOOKE206), 7883 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7884 PPC_NONE, PPC2_BOOKE206), 7885 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7886 PPC_NONE, PPC2_PRCNTL), 7887 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7888 PPC_NONE, PPC2_PRCNTL), 7889 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7890 PPC_NONE, PPC2_PRCNTL), 7891 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7892 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7893 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7894 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7895 PPC_BOOKE, PPC2_BOOKE206), 7896 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE), 7897 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7898 PPC_BOOKE, PPC2_BOOKE206), 7899 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7900 PPC_440_SPEC), 7901 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7902 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7903 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7904 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7905 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7906 #if defined(TARGET_PPC64) 7907 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7908 PPC2_ISA300), 7909 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7910 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001, 7911 PPC_NONE, PPC2_ISA207S), 7912 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001, 7913 PPC_NONE, PPC2_ISA207S), 7914 #endif 7915 7916 #undef GEN_INT_ARITH_ADD 7917 #undef GEN_INT_ARITH_ADD_CONST 7918 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7919 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7920 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7921 add_ca, compute_ca, compute_ov) \ 7922 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7923 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7924 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7925 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7926 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7927 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7928 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7929 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7930 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7931 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 7932 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7933 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7934 7935 #undef GEN_INT_ARITH_DIVW 7936 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7937 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7938 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7939 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7940 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7941 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7942 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7943 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7944 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7945 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7946 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7947 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7948 7949 #if defined(TARGET_PPC64) 7950 #undef GEN_INT_ARITH_DIVD 7951 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7952 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7953 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7954 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7955 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7956 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7957 7958 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7959 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7960 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7961 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7962 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7963 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7964 7965 #undef GEN_INT_ARITH_MUL_HELPER 7966 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7967 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7968 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7969 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7970 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7971 #endif 7972 7973 #undef GEN_INT_ARITH_SUBF 7974 #undef GEN_INT_ARITH_SUBF_CONST 7975 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7976 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7977 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7978 add_ca, compute_ca, compute_ov) \ 7979 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7980 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7981 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7982 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7983 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7984 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7985 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7986 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7987 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7988 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7989 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7990 7991 #undef GEN_LOGICAL1 7992 #undef GEN_LOGICAL2 7993 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7994 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7995 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7996 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7997 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7998 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7999 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 8000 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 8001 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 8002 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 8003 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 8004 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 8005 #if defined(TARGET_PPC64) 8006 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 8007 #endif 8008 8009 #if defined(TARGET_PPC64) 8010 #undef GEN_PPC64_R2 8011 #undef GEN_PPC64_R4 8012 #define GEN_PPC64_R2(name, opc1, opc2) \ 8013 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 8014 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 8015 PPC_64B) 8016 #define GEN_PPC64_R4(name, opc1, opc2) \ 8017 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 8018 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 8019 PPC_64B), \ 8020 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 8021 PPC_64B), \ 8022 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 8023 PPC_64B) 8024 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 8025 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 8026 GEN_PPC64_R4(rldic, 0x1E, 0x04), 8027 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 8028 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 8029 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 8030 #endif 8031 8032 #undef GEN_LDX_E 8033 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 8034 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 8035 8036 #if defined(TARGET_PPC64) 8037 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 8038 8039 /* HV/P7 and later only */ 8040 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 8041 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 8042 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 8043 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 8044 #endif 8045 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 8046 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 8047 8048 /* External PID based load */ 8049 #undef GEN_LDEPX 8050 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 8051 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8052 0x00000001, PPC_NONE, PPC2_BOOKE206), 8053 8054 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 8055 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 8056 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 8057 #if defined(TARGET_PPC64) 8058 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 8059 #endif 8060 8061 #undef GEN_STX_E 8062 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 8063 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 8064 8065 #if defined(TARGET_PPC64) 8066 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 8067 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 8068 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 8069 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 8070 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 8071 #endif 8072 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 8073 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 8074 8075 #undef GEN_STEPX 8076 #define GEN_STEPX(name, ldop, opc2, opc3) \ 8077 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8078 0x00000001, PPC_NONE, PPC2_BOOKE206), 8079 8080 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 8081 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 8082 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 8083 #if defined(TARGET_PPC64) 8084 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04) 8085 #endif 8086 8087 #undef GEN_CRLOGIC 8088 #define GEN_CRLOGIC(name, tcg_op, opc) \ 8089 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 8090 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 8091 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 8092 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 8093 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 8094 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 8095 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 8096 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 8097 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 8098 8099 #undef GEN_MAC_HANDLER 8100 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 8101 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 8102 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 8103 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 8104 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 8105 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 8106 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 8107 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 8108 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 8109 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 8110 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 8111 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 8112 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 8113 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 8114 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 8115 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 8116 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 8117 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 8118 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 8119 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 8120 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 8121 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 8122 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 8123 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 8124 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 8125 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 8126 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 8127 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 8128 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 8129 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 8130 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 8131 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 8132 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 8133 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 8134 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 8135 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 8136 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 8137 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 8138 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 8139 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 8140 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 8141 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 8142 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 8143 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 8144 8145 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 8146 PPC_NONE, PPC2_TM), 8147 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 8148 PPC_NONE, PPC2_TM), 8149 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 8150 PPC_NONE, PPC2_TM), 8151 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 8152 PPC_NONE, PPC2_TM), 8153 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 8154 PPC_NONE, PPC2_TM), 8155 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 8156 PPC_NONE, PPC2_TM), 8157 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 8158 PPC_NONE, PPC2_TM), 8159 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 8160 PPC_NONE, PPC2_TM), 8161 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 8162 PPC_NONE, PPC2_TM), 8163 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 8164 PPC_NONE, PPC2_TM), 8165 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 8166 PPC_NONE, PPC2_TM), 8167 8168 #include "translate/fp-ops.c.inc" 8169 8170 #include "translate/vmx-ops.c.inc" 8171 8172 #include "translate/vsx-ops.c.inc" 8173 8174 #include "translate/dfp-ops.c.inc" 8175 8176 #include "translate/spe-ops.c.inc" 8177 }; 8178 8179 /*****************************************************************************/ 8180 /* Opcode types */ 8181 enum { 8182 PPC_DIRECT = 0, /* Opcode routine */ 8183 PPC_INDIRECT = 1, /* Indirect opcode table */ 8184 }; 8185 8186 #define PPC_OPCODE_MASK 0x3 8187 8188 static inline int is_indirect_opcode(void *handler) 8189 { 8190 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT; 8191 } 8192 8193 static inline opc_handler_t **ind_table(void *handler) 8194 { 8195 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK); 8196 } 8197 8198 /* Instruction table creation */ 8199 /* Opcodes tables creation */ 8200 static void fill_new_table(opc_handler_t **table, int len) 8201 { 8202 int i; 8203 8204 for (i = 0; i < len; i++) { 8205 table[i] = &invalid_handler; 8206 } 8207 } 8208 8209 static int create_new_table(opc_handler_t **table, unsigned char idx) 8210 { 8211 opc_handler_t **tmp; 8212 8213 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN); 8214 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN); 8215 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT); 8216 8217 return 0; 8218 } 8219 8220 static int insert_in_table(opc_handler_t **table, unsigned char idx, 8221 opc_handler_t *handler) 8222 { 8223 if (table[idx] != &invalid_handler) { 8224 return -1; 8225 } 8226 table[idx] = handler; 8227 8228 return 0; 8229 } 8230 8231 static int register_direct_insn(opc_handler_t **ppc_opcodes, 8232 unsigned char idx, opc_handler_t *handler) 8233 { 8234 if (insert_in_table(ppc_opcodes, idx, handler) < 0) { 8235 printf("*** ERROR: opcode %02x already assigned in main " 8236 "opcode table\n", idx); 8237 return -1; 8238 } 8239 8240 return 0; 8241 } 8242 8243 static int register_ind_in_table(opc_handler_t **table, 8244 unsigned char idx1, unsigned char idx2, 8245 opc_handler_t *handler) 8246 { 8247 if (table[idx1] == &invalid_handler) { 8248 if (create_new_table(table, idx1) < 0) { 8249 printf("*** ERROR: unable to create indirect table " 8250 "idx=%02x\n", idx1); 8251 return -1; 8252 } 8253 } else { 8254 if (!is_indirect_opcode(table[idx1])) { 8255 printf("*** ERROR: idx %02x already assigned to a direct " 8256 "opcode\n", idx1); 8257 return -1; 8258 } 8259 } 8260 if (handler != NULL && 8261 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) { 8262 printf("*** ERROR: opcode %02x already assigned in " 8263 "opcode table %02x\n", idx2, idx1); 8264 return -1; 8265 } 8266 8267 return 0; 8268 } 8269 8270 static int register_ind_insn(opc_handler_t **ppc_opcodes, 8271 unsigned char idx1, unsigned char idx2, 8272 opc_handler_t *handler) 8273 { 8274 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler); 8275 } 8276 8277 static int register_dblind_insn(opc_handler_t **ppc_opcodes, 8278 unsigned char idx1, unsigned char idx2, 8279 unsigned char idx3, opc_handler_t *handler) 8280 { 8281 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8282 printf("*** ERROR: unable to join indirect table idx " 8283 "[%02x-%02x]\n", idx1, idx2); 8284 return -1; 8285 } 8286 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3, 8287 handler) < 0) { 8288 printf("*** ERROR: unable to insert opcode " 8289 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8290 return -1; 8291 } 8292 8293 return 0; 8294 } 8295 8296 static int register_trplind_insn(opc_handler_t **ppc_opcodes, 8297 unsigned char idx1, unsigned char idx2, 8298 unsigned char idx3, unsigned char idx4, 8299 opc_handler_t *handler) 8300 { 8301 opc_handler_t **table; 8302 8303 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8304 printf("*** ERROR: unable to join indirect table idx " 8305 "[%02x-%02x]\n", idx1, idx2); 8306 return -1; 8307 } 8308 table = ind_table(ppc_opcodes[idx1]); 8309 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) { 8310 printf("*** ERROR: unable to join 2nd-level indirect table idx " 8311 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8312 return -1; 8313 } 8314 table = ind_table(table[idx2]); 8315 if (register_ind_in_table(table, idx3, idx4, handler) < 0) { 8316 printf("*** ERROR: unable to insert opcode " 8317 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4); 8318 return -1; 8319 } 8320 return 0; 8321 } 8322 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn) 8323 { 8324 if (insn->opc2 != 0xFF) { 8325 if (insn->opc3 != 0xFF) { 8326 if (insn->opc4 != 0xFF) { 8327 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8328 insn->opc3, insn->opc4, 8329 &insn->handler) < 0) { 8330 return -1; 8331 } 8332 } else { 8333 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8334 insn->opc3, &insn->handler) < 0) { 8335 return -1; 8336 } 8337 } 8338 } else { 8339 if (register_ind_insn(ppc_opcodes, insn->opc1, 8340 insn->opc2, &insn->handler) < 0) { 8341 return -1; 8342 } 8343 } 8344 } else { 8345 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) { 8346 return -1; 8347 } 8348 } 8349 8350 return 0; 8351 } 8352 8353 static int test_opcode_table(opc_handler_t **table, int len) 8354 { 8355 int i, count, tmp; 8356 8357 for (i = 0, count = 0; i < len; i++) { 8358 /* Consistency fixup */ 8359 if (table[i] == NULL) { 8360 table[i] = &invalid_handler; 8361 } 8362 if (table[i] != &invalid_handler) { 8363 if (is_indirect_opcode(table[i])) { 8364 tmp = test_opcode_table(ind_table(table[i]), 8365 PPC_CPU_INDIRECT_OPCODES_LEN); 8366 if (tmp == 0) { 8367 free(table[i]); 8368 table[i] = &invalid_handler; 8369 } else { 8370 count++; 8371 } 8372 } else { 8373 count++; 8374 } 8375 } 8376 } 8377 8378 return count; 8379 } 8380 8381 static void fix_opcode_tables(opc_handler_t **ppc_opcodes) 8382 { 8383 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) { 8384 printf("*** WARNING: no opcode defined !\n"); 8385 } 8386 } 8387 8388 /*****************************************************************************/ 8389 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp) 8390 { 8391 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu); 8392 opcode_t *opc; 8393 8394 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN); 8395 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) { 8396 if (((opc->handler.type & pcc->insns_flags) != 0) || 8397 ((opc->handler.type2 & pcc->insns_flags2) != 0)) { 8398 if (register_insn(cpu->opcodes, opc) < 0) { 8399 error_setg(errp, "ERROR initializing PowerPC instruction " 8400 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2, 8401 opc->opc3); 8402 return; 8403 } 8404 } 8405 } 8406 fix_opcode_tables(cpu->opcodes); 8407 fflush(stdout); 8408 fflush(stderr); 8409 } 8410 8411 void destroy_ppc_opcodes(PowerPCCPU *cpu) 8412 { 8413 opc_handler_t **table, **table_2; 8414 int i, j, k; 8415 8416 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) { 8417 if (cpu->opcodes[i] == &invalid_handler) { 8418 continue; 8419 } 8420 if (is_indirect_opcode(cpu->opcodes[i])) { 8421 table = ind_table(cpu->opcodes[i]); 8422 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) { 8423 if (table[j] == &invalid_handler) { 8424 continue; 8425 } 8426 if (is_indirect_opcode(table[j])) { 8427 table_2 = ind_table(table[j]); 8428 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) { 8429 if (table_2[k] != &invalid_handler && 8430 is_indirect_opcode(table_2[k])) { 8431 g_free((opc_handler_t *)((uintptr_t)table_2[k] & 8432 ~PPC_INDIRECT)); 8433 } 8434 } 8435 g_free((opc_handler_t *)((uintptr_t)table[j] & 8436 ~PPC_INDIRECT)); 8437 } 8438 } 8439 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] & 8440 ~PPC_INDIRECT)); 8441 } 8442 } 8443 } 8444 8445 int ppc_fixup_cpu(PowerPCCPU *cpu) 8446 { 8447 CPUPPCState *env = &cpu->env; 8448 8449 /* 8450 * TCG doesn't (yet) emulate some groups of instructions that are 8451 * implemented on some otherwise supported CPUs (e.g. VSX and 8452 * decimal floating point instructions on POWER7). We remove 8453 * unsupported instruction groups from the cpu state's instruction 8454 * masks and hope the guest can cope. For at least the pseries 8455 * machine, the unavailability of these instructions can be 8456 * advertised to the guest via the device tree. 8457 */ 8458 if ((env->insns_flags & ~PPC_TCG_INSNS) 8459 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) { 8460 warn_report("Disabling some instructions which are not " 8461 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")", 8462 env->insns_flags & ~PPC_TCG_INSNS, 8463 env->insns_flags2 & ~PPC_TCG_INSNS2); 8464 } 8465 env->insns_flags &= PPC_TCG_INSNS; 8466 env->insns_flags2 &= PPC_TCG_INSNS2; 8467 return 0; 8468 } 8469 8470 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn) 8471 { 8472 opc_handler_t **table, *handler; 8473 uint32_t inval; 8474 8475 ctx->opcode = insn; 8476 8477 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 8478 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8479 ctx->le_mode ? "little" : "big"); 8480 8481 table = cpu->opcodes; 8482 handler = table[opc1(insn)]; 8483 if (is_indirect_opcode(handler)) { 8484 table = ind_table(handler); 8485 handler = table[opc2(insn)]; 8486 if (is_indirect_opcode(handler)) { 8487 table = ind_table(handler); 8488 handler = table[opc3(insn)]; 8489 if (is_indirect_opcode(handler)) { 8490 table = ind_table(handler); 8491 handler = table[opc4(insn)]; 8492 } 8493 } 8494 } 8495 8496 /* Is opcode *REALLY* valid ? */ 8497 if (unlikely(handler->handler == &gen_invalid)) { 8498 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 8499 "%02x - %02x - %02x - %02x (%08x) " 8500 TARGET_FMT_lx "\n", 8501 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8502 insn, ctx->cia); 8503 return false; 8504 } 8505 8506 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 8507 && Rc(insn))) { 8508 inval = handler->inval2; 8509 } else { 8510 inval = handler->inval1; 8511 } 8512 8513 if (unlikely((insn & inval) != 0)) { 8514 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 8515 "%02x - %02x - %02x - %02x (%08x) " 8516 TARGET_FMT_lx "\n", insn & inval, 8517 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8518 insn, ctx->cia); 8519 return false; 8520 } 8521 8522 handler->handler(ctx); 8523 return true; 8524 } 8525 8526 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 8527 { 8528 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8529 CPUPPCState *env = cs->env_ptr; 8530 uint32_t hflags = ctx->base.tb->flags; 8531 8532 ctx->spr_cb = env->spr_cb; 8533 ctx->pr = (hflags >> HFLAGS_PR) & 1; 8534 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7; 8535 ctx->dr = (hflags >> HFLAGS_DR) & 1; 8536 ctx->hv = (hflags >> HFLAGS_HV) & 1; 8537 ctx->insns_flags = env->insns_flags; 8538 ctx->insns_flags2 = env->insns_flags2; 8539 ctx->access_type = -1; 8540 ctx->need_access_type = !mmu_is_64bit(env->mmu_model); 8541 ctx->le_mode = (hflags >> HFLAGS_LE) & 1; 8542 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 8543 ctx->flags = env->flags; 8544 #if defined(TARGET_PPC64) 8545 ctx->sf_mode = (hflags >> HFLAGS_64) & 1; 8546 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 8547 #endif 8548 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 8549 || env->mmu_model == POWERPC_MMU_601 8550 || env->mmu_model & POWERPC_MMU_64; 8551 8552 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1; 8553 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1; 8554 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1; 8555 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1; 8556 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1; 8557 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1; 8558 ctx->hr = (hflags >> HFLAGS_HR) & 1; 8559 ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1; 8560 ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1; 8561 8562 ctx->singlestep_enabled = 0; 8563 if ((hflags >> HFLAGS_SE) & 1) { 8564 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 8565 ctx->base.max_insns = 1; 8566 } 8567 if ((hflags >> HFLAGS_BE) & 1) { 8568 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 8569 } 8570 } 8571 8572 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 8573 { 8574 } 8575 8576 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 8577 { 8578 tcg_gen_insn_start(dcbase->pc_next); 8579 } 8580 8581 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn) 8582 { 8583 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 8584 return opc1(insn) == 1; 8585 } 8586 8587 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 8588 { 8589 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8590 PowerPCCPU *cpu = POWERPC_CPU(cs); 8591 CPUPPCState *env = cs->env_ptr; 8592 target_ulong pc; 8593 uint32_t insn; 8594 bool ok; 8595 8596 LOG_DISAS("----------------\n"); 8597 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 8598 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 8599 8600 ctx->cia = pc = ctx->base.pc_next; 8601 insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx)); 8602 ctx->base.pc_next = pc += 4; 8603 8604 if (!is_prefix_insn(ctx, insn)) { 8605 ok = (decode_insn32(ctx, insn) || 8606 decode_legacy(cpu, ctx, insn)); 8607 } else if ((pc & 63) == 0) { 8608 /* 8609 * Power v3.1, section 1.9 Exceptions: 8610 * attempt to execute a prefixed instruction that crosses a 8611 * 64-byte address boundary (system alignment error). 8612 */ 8613 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN); 8614 ok = true; 8615 } else { 8616 uint32_t insn2 = translator_ldl_swap(env, dcbase, pc, 8617 need_byteswap(ctx)); 8618 ctx->base.pc_next = pc += 4; 8619 ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn)); 8620 } 8621 if (!ok) { 8622 gen_invalid(ctx); 8623 } 8624 8625 /* End the TB when crossing a page boundary. */ 8626 if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) { 8627 ctx->base.is_jmp = DISAS_TOO_MANY; 8628 } 8629 8630 translator_loop_temp_check(&ctx->base); 8631 } 8632 8633 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 8634 { 8635 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8636 DisasJumpType is_jmp = ctx->base.is_jmp; 8637 target_ulong nip = ctx->base.pc_next; 8638 8639 if (is_jmp == DISAS_NORETURN) { 8640 /* We have already exited the TB. */ 8641 return; 8642 } 8643 8644 /* Honor single stepping. */ 8645 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP) 8646 && (nip <= 0x100 || nip > 0xf00)) { 8647 switch (is_jmp) { 8648 case DISAS_TOO_MANY: 8649 case DISAS_EXIT_UPDATE: 8650 case DISAS_CHAIN_UPDATE: 8651 gen_update_nip(ctx, nip); 8652 break; 8653 case DISAS_EXIT: 8654 case DISAS_CHAIN: 8655 break; 8656 default: 8657 g_assert_not_reached(); 8658 } 8659 8660 gen_debug_exception(ctx); 8661 return; 8662 } 8663 8664 switch (is_jmp) { 8665 case DISAS_TOO_MANY: 8666 if (use_goto_tb(ctx, nip)) { 8667 tcg_gen_goto_tb(0); 8668 gen_update_nip(ctx, nip); 8669 tcg_gen_exit_tb(ctx->base.tb, 0); 8670 break; 8671 } 8672 /* fall through */ 8673 case DISAS_CHAIN_UPDATE: 8674 gen_update_nip(ctx, nip); 8675 /* fall through */ 8676 case DISAS_CHAIN: 8677 tcg_gen_lookup_and_goto_ptr(); 8678 break; 8679 8680 case DISAS_EXIT_UPDATE: 8681 gen_update_nip(ctx, nip); 8682 /* fall through */ 8683 case DISAS_EXIT: 8684 tcg_gen_exit_tb(NULL, 0); 8685 break; 8686 8687 default: 8688 g_assert_not_reached(); 8689 } 8690 } 8691 8692 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 8693 { 8694 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 8695 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 8696 } 8697 8698 static const TranslatorOps ppc_tr_ops = { 8699 .init_disas_context = ppc_tr_init_disas_context, 8700 .tb_start = ppc_tr_tb_start, 8701 .insn_start = ppc_tr_insn_start, 8702 .translate_insn = ppc_tr_translate_insn, 8703 .tb_stop = ppc_tr_tb_stop, 8704 .disas_log = ppc_tr_disas_log, 8705 }; 8706 8707 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns) 8708 { 8709 DisasContext ctx; 8710 8711 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns); 8712 } 8713 8714 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 8715 target_ulong *data) 8716 { 8717 env->nip = data[0]; 8718 } 8719