1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2.1 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg/tcg-op.h" 27 #include "tcg/tcg-op-gvec.h" 28 #include "qemu/host-utils.h" 29 #include "qemu/main-loop.h" 30 #include "exec/cpu_ldst.h" 31 32 #include "exec/helper-proto.h" 33 #include "exec/helper-gen.h" 34 35 #include "exec/translator.h" 36 #include "exec/log.h" 37 #include "qemu/atomic128.h" 38 #include "spr_tcg.h" 39 40 #include "qemu/qemu-print.h" 41 #include "qapi/error.h" 42 43 #define CPU_SINGLE_STEP 0x1 44 #define CPU_BRANCH_STEP 0x2 45 #define GDBSTUB_SINGLE_STEP 0x4 46 47 /* Include definitions for instructions classes and implementations flags */ 48 /* #define PPC_DEBUG_DISAS */ 49 50 #ifdef PPC_DEBUG_DISAS 51 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 52 #else 53 # define LOG_DISAS(...) do { } while (0) 54 #endif 55 /*****************************************************************************/ 56 /* Code translation helpers */ 57 58 /* global register indexes */ 59 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */ 60 + 10 * 4 + 22 * 5 /* SPE GPRh */ 61 + 8 * 5 /* CRF */]; 62 static TCGv cpu_gpr[32]; 63 static TCGv cpu_gprh[32]; 64 static TCGv_i32 cpu_crf[8]; 65 static TCGv cpu_nip; 66 static TCGv cpu_msr; 67 static TCGv cpu_ctr; 68 static TCGv cpu_lr; 69 #if defined(TARGET_PPC64) 70 static TCGv cpu_cfar; 71 #endif 72 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 73 static TCGv cpu_reserve; 74 static TCGv cpu_reserve_val; 75 static TCGv cpu_fpscr; 76 static TCGv_i32 cpu_access_type; 77 78 #include "exec/gen-icount.h" 79 80 void ppc_translate_init(void) 81 { 82 int i; 83 char *p; 84 size_t cpu_reg_names_size; 85 86 p = cpu_reg_names; 87 cpu_reg_names_size = sizeof(cpu_reg_names); 88 89 for (i = 0; i < 8; i++) { 90 snprintf(p, cpu_reg_names_size, "crf%d", i); 91 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 92 offsetof(CPUPPCState, crf[i]), p); 93 p += 5; 94 cpu_reg_names_size -= 5; 95 } 96 97 for (i = 0; i < 32; i++) { 98 snprintf(p, cpu_reg_names_size, "r%d", i); 99 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 100 offsetof(CPUPPCState, gpr[i]), p); 101 p += (i < 10) ? 3 : 4; 102 cpu_reg_names_size -= (i < 10) ? 3 : 4; 103 snprintf(p, cpu_reg_names_size, "r%dH", i); 104 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 105 offsetof(CPUPPCState, gprh[i]), p); 106 p += (i < 10) ? 4 : 5; 107 cpu_reg_names_size -= (i < 10) ? 4 : 5; 108 } 109 110 cpu_nip = tcg_global_mem_new(cpu_env, 111 offsetof(CPUPPCState, nip), "nip"); 112 113 cpu_msr = tcg_global_mem_new(cpu_env, 114 offsetof(CPUPPCState, msr), "msr"); 115 116 cpu_ctr = tcg_global_mem_new(cpu_env, 117 offsetof(CPUPPCState, ctr), "ctr"); 118 119 cpu_lr = tcg_global_mem_new(cpu_env, 120 offsetof(CPUPPCState, lr), "lr"); 121 122 #if defined(TARGET_PPC64) 123 cpu_cfar = tcg_global_mem_new(cpu_env, 124 offsetof(CPUPPCState, cfar), "cfar"); 125 #endif 126 127 cpu_xer = tcg_global_mem_new(cpu_env, 128 offsetof(CPUPPCState, xer), "xer"); 129 cpu_so = tcg_global_mem_new(cpu_env, 130 offsetof(CPUPPCState, so), "SO"); 131 cpu_ov = tcg_global_mem_new(cpu_env, 132 offsetof(CPUPPCState, ov), "OV"); 133 cpu_ca = tcg_global_mem_new(cpu_env, 134 offsetof(CPUPPCState, ca), "CA"); 135 cpu_ov32 = tcg_global_mem_new(cpu_env, 136 offsetof(CPUPPCState, ov32), "OV32"); 137 cpu_ca32 = tcg_global_mem_new(cpu_env, 138 offsetof(CPUPPCState, ca32), "CA32"); 139 140 cpu_reserve = tcg_global_mem_new(cpu_env, 141 offsetof(CPUPPCState, reserve_addr), 142 "reserve_addr"); 143 cpu_reserve_val = tcg_global_mem_new(cpu_env, 144 offsetof(CPUPPCState, reserve_val), 145 "reserve_val"); 146 147 cpu_fpscr = tcg_global_mem_new(cpu_env, 148 offsetof(CPUPPCState, fpscr), "fpscr"); 149 150 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 151 offsetof(CPUPPCState, access_type), 152 "access_type"); 153 } 154 155 /* internal defines */ 156 struct DisasContext { 157 DisasContextBase base; 158 target_ulong cia; /* current instruction address */ 159 uint32_t opcode; 160 /* Routine used to access memory */ 161 bool pr, hv, dr, le_mode; 162 bool lazy_tlb_flush; 163 bool need_access_type; 164 int mem_idx; 165 int access_type; 166 /* Translation flags */ 167 MemOp default_tcg_memop_mask; 168 #if defined(TARGET_PPC64) 169 bool sf_mode; 170 bool has_cfar; 171 #endif 172 bool fpu_enabled; 173 bool altivec_enabled; 174 bool vsx_enabled; 175 bool spe_enabled; 176 bool tm_enabled; 177 bool gtse; 178 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 179 int singlestep_enabled; 180 uint32_t flags; 181 uint64_t insns_flags; 182 uint64_t insns_flags2; 183 }; 184 185 #define DISAS_EXIT DISAS_TARGET_0 /* exit to main loop, pc updated */ 186 #define DISAS_EXIT_UPDATE DISAS_TARGET_1 /* exit to main loop, pc stale */ 187 #define DISAS_CHAIN DISAS_TARGET_2 /* lookup next tb, pc updated */ 188 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3 /* lookup next tb, pc stale */ 189 190 /* Return true iff byteswap is needed in a scalar memop */ 191 static inline bool need_byteswap(const DisasContext *ctx) 192 { 193 #if defined(TARGET_WORDS_BIGENDIAN) 194 return ctx->le_mode; 195 #else 196 return !ctx->le_mode; 197 #endif 198 } 199 200 /* True when active word size < size of target_long. */ 201 #ifdef TARGET_PPC64 202 # define NARROW_MODE(C) (!(C)->sf_mode) 203 #else 204 # define NARROW_MODE(C) 0 205 #endif 206 207 struct opc_handler_t { 208 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 209 uint32_t inval1; 210 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 211 uint32_t inval2; 212 /* instruction type */ 213 uint64_t type; 214 /* extended instruction type */ 215 uint64_t type2; 216 /* handler */ 217 void (*handler)(DisasContext *ctx); 218 }; 219 220 /* SPR load/store helpers */ 221 static inline void gen_load_spr(TCGv t, int reg) 222 { 223 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 224 } 225 226 static inline void gen_store_spr(int reg, TCGv t) 227 { 228 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 229 } 230 231 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 232 { 233 if (ctx->need_access_type && ctx->access_type != access_type) { 234 tcg_gen_movi_i32(cpu_access_type, access_type); 235 ctx->access_type = access_type; 236 } 237 } 238 239 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 240 { 241 if (NARROW_MODE(ctx)) { 242 nip = (uint32_t)nip; 243 } 244 tcg_gen_movi_tl(cpu_nip, nip); 245 } 246 247 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 248 { 249 TCGv_i32 t0, t1; 250 251 /* 252 * These are all synchronous exceptions, we set the PC back to the 253 * faulting instruction 254 */ 255 gen_update_nip(ctx, ctx->cia); 256 t0 = tcg_const_i32(excp); 257 t1 = tcg_const_i32(error); 258 gen_helper_raise_exception_err(cpu_env, t0, t1); 259 tcg_temp_free_i32(t0); 260 tcg_temp_free_i32(t1); 261 ctx->base.is_jmp = DISAS_NORETURN; 262 } 263 264 static void gen_exception(DisasContext *ctx, uint32_t excp) 265 { 266 TCGv_i32 t0; 267 268 /* 269 * These are all synchronous exceptions, we set the PC back to the 270 * faulting instruction 271 */ 272 gen_update_nip(ctx, ctx->cia); 273 t0 = tcg_const_i32(excp); 274 gen_helper_raise_exception(cpu_env, t0); 275 tcg_temp_free_i32(t0); 276 ctx->base.is_jmp = DISAS_NORETURN; 277 } 278 279 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 280 target_ulong nip) 281 { 282 TCGv_i32 t0; 283 284 gen_update_nip(ctx, nip); 285 t0 = tcg_const_i32(excp); 286 gen_helper_raise_exception(cpu_env, t0); 287 tcg_temp_free_i32(t0); 288 ctx->base.is_jmp = DISAS_NORETURN; 289 } 290 291 static void gen_icount_io_start(DisasContext *ctx) 292 { 293 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 294 gen_io_start(); 295 /* 296 * An I/O instruction must be last in the TB. 297 * Chain to the next TB, and let the code from gen_tb_start 298 * decide if we need to return to the main loop. 299 * Doing this first also allows this value to be overridden. 300 */ 301 ctx->base.is_jmp = DISAS_TOO_MANY; 302 } 303 } 304 305 /* 306 * Tells the caller what is the appropriate exception to generate and prepares 307 * SPR registers for this exception. 308 * 309 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or 310 * POWERPC_EXCP_DEBUG (on BookE). 311 */ 312 static uint32_t gen_prep_dbgex(DisasContext *ctx) 313 { 314 if (ctx->flags & POWERPC_FLAG_DE) { 315 target_ulong dbsr = 0; 316 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) { 317 dbsr = DBCR0_ICMP; 318 } else { 319 /* Must have been branch */ 320 dbsr = DBCR0_BRT; 321 } 322 TCGv t0 = tcg_temp_new(); 323 gen_load_spr(t0, SPR_BOOKE_DBSR); 324 tcg_gen_ori_tl(t0, t0, dbsr); 325 gen_store_spr(SPR_BOOKE_DBSR, t0); 326 tcg_temp_free(t0); 327 return POWERPC_EXCP_DEBUG; 328 } else { 329 return POWERPC_EXCP_TRACE; 330 } 331 } 332 333 static void gen_debug_exception(DisasContext *ctx) 334 { 335 gen_helper_raise_exception(cpu_env, tcg_constant_i32(EXCP_DEBUG)); 336 ctx->base.is_jmp = DISAS_NORETURN; 337 } 338 339 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 340 { 341 /* Will be converted to program check if needed */ 342 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 343 } 344 345 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 346 { 347 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 348 } 349 350 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 351 { 352 /* Will be converted to program check if needed */ 353 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 354 } 355 356 /*****************************************************************************/ 357 /* SPR READ/WRITE CALLBACKS */ 358 359 void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 360 { 361 #if 0 362 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 363 printf("ERROR: try to access SPR %d !\n", sprn); 364 #endif 365 } 366 367 /* #define PPC_DUMP_SPR_ACCESSES */ 368 369 /* 370 * Generic callbacks: 371 * do nothing but store/retrieve spr value 372 */ 373 static void spr_load_dump_spr(int sprn) 374 { 375 #ifdef PPC_DUMP_SPR_ACCESSES 376 TCGv_i32 t0 = tcg_const_i32(sprn); 377 gen_helper_load_dump_spr(cpu_env, t0); 378 tcg_temp_free_i32(t0); 379 #endif 380 } 381 382 void spr_read_generic(DisasContext *ctx, int gprn, int sprn) 383 { 384 gen_load_spr(cpu_gpr[gprn], sprn); 385 spr_load_dump_spr(sprn); 386 } 387 388 static void spr_store_dump_spr(int sprn) 389 { 390 #ifdef PPC_DUMP_SPR_ACCESSES 391 TCGv_i32 t0 = tcg_const_i32(sprn); 392 gen_helper_store_dump_spr(cpu_env, t0); 393 tcg_temp_free_i32(t0); 394 #endif 395 } 396 397 void spr_write_generic(DisasContext *ctx, int sprn, int gprn) 398 { 399 gen_store_spr(sprn, cpu_gpr[gprn]); 400 spr_store_dump_spr(sprn); 401 } 402 403 #if !defined(CONFIG_USER_ONLY) 404 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn) 405 { 406 #ifdef TARGET_PPC64 407 TCGv t0 = tcg_temp_new(); 408 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]); 409 gen_store_spr(sprn, t0); 410 tcg_temp_free(t0); 411 spr_store_dump_spr(sprn); 412 #else 413 spr_write_generic(ctx, sprn, gprn); 414 #endif 415 } 416 417 void spr_write_clear(DisasContext *ctx, int sprn, int gprn) 418 { 419 TCGv t0 = tcg_temp_new(); 420 TCGv t1 = tcg_temp_new(); 421 gen_load_spr(t0, sprn); 422 tcg_gen_neg_tl(t1, cpu_gpr[gprn]); 423 tcg_gen_and_tl(t0, t0, t1); 424 gen_store_spr(sprn, t0); 425 tcg_temp_free(t0); 426 tcg_temp_free(t1); 427 } 428 429 void spr_access_nop(DisasContext *ctx, int sprn, int gprn) 430 { 431 } 432 433 #endif 434 435 /* SPR common to all PowerPC */ 436 /* XER */ 437 void spr_read_xer(DisasContext *ctx, int gprn, int sprn) 438 { 439 TCGv dst = cpu_gpr[gprn]; 440 TCGv t0 = tcg_temp_new(); 441 TCGv t1 = tcg_temp_new(); 442 TCGv t2 = tcg_temp_new(); 443 tcg_gen_mov_tl(dst, cpu_xer); 444 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 445 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 446 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 447 tcg_gen_or_tl(t0, t0, t1); 448 tcg_gen_or_tl(dst, dst, t2); 449 tcg_gen_or_tl(dst, dst, t0); 450 if (is_isa300(ctx)) { 451 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 452 tcg_gen_or_tl(dst, dst, t0); 453 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 454 tcg_gen_or_tl(dst, dst, t0); 455 } 456 tcg_temp_free(t0); 457 tcg_temp_free(t1); 458 tcg_temp_free(t2); 459 } 460 461 void spr_write_xer(DisasContext *ctx, int sprn, int gprn) 462 { 463 TCGv src = cpu_gpr[gprn]; 464 /* Write all flags, while reading back check for isa300 */ 465 tcg_gen_andi_tl(cpu_xer, src, 466 ~((1u << XER_SO) | 467 (1u << XER_OV) | (1u << XER_OV32) | 468 (1u << XER_CA) | (1u << XER_CA32))); 469 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 470 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 471 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 472 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 473 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 474 } 475 476 /* LR */ 477 void spr_read_lr(DisasContext *ctx, int gprn, int sprn) 478 { 479 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr); 480 } 481 482 void spr_write_lr(DisasContext *ctx, int sprn, int gprn) 483 { 484 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]); 485 } 486 487 /* CFAR */ 488 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 489 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn) 490 { 491 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar); 492 } 493 494 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn) 495 { 496 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]); 497 } 498 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */ 499 500 /* CTR */ 501 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn) 502 { 503 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr); 504 } 505 506 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn) 507 { 508 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]); 509 } 510 511 /* User read access to SPR */ 512 /* USPRx */ 513 /* UMMCRx */ 514 /* UPMCx */ 515 /* USIA */ 516 /* UDECR */ 517 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn) 518 { 519 gen_load_spr(cpu_gpr[gprn], sprn + 0x10); 520 } 521 522 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 523 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn) 524 { 525 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]); 526 } 527 #endif 528 529 /* SPR common to all non-embedded PowerPC */ 530 /* DECR */ 531 #if !defined(CONFIG_USER_ONLY) 532 void spr_read_decr(DisasContext *ctx, int gprn, int sprn) 533 { 534 gen_icount_io_start(ctx); 535 gen_helper_load_decr(cpu_gpr[gprn], cpu_env); 536 } 537 538 void spr_write_decr(DisasContext *ctx, int sprn, int gprn) 539 { 540 gen_icount_io_start(ctx); 541 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]); 542 } 543 #endif 544 545 /* SPR common to all non-embedded PowerPC, except 601 */ 546 /* Time base */ 547 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn) 548 { 549 gen_icount_io_start(ctx); 550 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env); 551 } 552 553 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn) 554 { 555 gen_icount_io_start(ctx); 556 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env); 557 } 558 559 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn) 560 { 561 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env); 562 } 563 564 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn) 565 { 566 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env); 567 } 568 569 #if !defined(CONFIG_USER_ONLY) 570 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn) 571 { 572 gen_icount_io_start(ctx); 573 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]); 574 } 575 576 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn) 577 { 578 gen_icount_io_start(ctx); 579 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]); 580 } 581 582 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn) 583 { 584 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]); 585 } 586 587 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn) 588 { 589 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]); 590 } 591 592 #if defined(TARGET_PPC64) 593 void spr_read_purr(DisasContext *ctx, int gprn, int sprn) 594 { 595 gen_icount_io_start(ctx); 596 gen_helper_load_purr(cpu_gpr[gprn], cpu_env); 597 } 598 599 void spr_write_purr(DisasContext *ctx, int sprn, int gprn) 600 { 601 gen_icount_io_start(ctx); 602 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]); 603 } 604 605 /* HDECR */ 606 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn) 607 { 608 gen_icount_io_start(ctx); 609 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env); 610 } 611 612 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn) 613 { 614 gen_icount_io_start(ctx); 615 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]); 616 } 617 618 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn) 619 { 620 gen_icount_io_start(ctx); 621 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env); 622 } 623 624 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn) 625 { 626 gen_icount_io_start(ctx); 627 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]); 628 } 629 630 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn) 631 { 632 gen_icount_io_start(ctx); 633 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]); 634 } 635 636 #endif 637 #endif 638 639 #if !defined(CONFIG_USER_ONLY) 640 /* IBAT0U...IBAT0U */ 641 /* IBAT0L...IBAT7L */ 642 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn) 643 { 644 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 645 offsetof(CPUPPCState, 646 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 647 } 648 649 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn) 650 { 651 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 652 offsetof(CPUPPCState, 653 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4])); 654 } 655 656 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn) 657 { 658 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 659 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 660 tcg_temp_free_i32(t0); 661 } 662 663 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn) 664 { 665 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4); 666 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 667 tcg_temp_free_i32(t0); 668 } 669 670 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn) 671 { 672 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2); 673 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 674 tcg_temp_free_i32(t0); 675 } 676 677 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn) 678 { 679 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4); 680 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 681 tcg_temp_free_i32(t0); 682 } 683 684 /* DBAT0U...DBAT7U */ 685 /* DBAT0L...DBAT7L */ 686 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn) 687 { 688 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 689 offsetof(CPUPPCState, 690 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2])); 691 } 692 693 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn) 694 { 695 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 696 offsetof(CPUPPCState, 697 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4])); 698 } 699 700 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn) 701 { 702 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2); 703 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 704 tcg_temp_free_i32(t0); 705 } 706 707 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn) 708 { 709 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4); 710 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 711 tcg_temp_free_i32(t0); 712 } 713 714 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn) 715 { 716 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2); 717 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 718 tcg_temp_free_i32(t0); 719 } 720 721 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn) 722 { 723 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4); 724 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 725 tcg_temp_free_i32(t0); 726 } 727 728 /* SDR1 */ 729 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn) 730 { 731 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]); 732 } 733 734 #if defined(TARGET_PPC64) 735 /* 64 bits PowerPC specific SPRs */ 736 /* PIDR */ 737 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn) 738 { 739 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]); 740 } 741 742 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn) 743 { 744 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]); 745 } 746 747 void spr_read_hior(DisasContext *ctx, int gprn, int sprn) 748 { 749 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix)); 750 } 751 752 void spr_write_hior(DisasContext *ctx, int sprn, int gprn) 753 { 754 TCGv t0 = tcg_temp_new(); 755 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL); 756 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 757 tcg_temp_free(t0); 758 } 759 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn) 760 { 761 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]); 762 } 763 764 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn) 765 { 766 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]); 767 } 768 769 /* DPDES */ 770 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn) 771 { 772 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env); 773 } 774 775 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn) 776 { 777 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]); 778 } 779 #endif 780 #endif 781 782 /* PowerPC 601 specific registers */ 783 /* RTC */ 784 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn) 785 { 786 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env); 787 } 788 789 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn) 790 { 791 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env); 792 } 793 794 #if !defined(CONFIG_USER_ONLY) 795 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn) 796 { 797 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]); 798 } 799 800 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn) 801 { 802 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]); 803 } 804 805 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn) 806 { 807 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]); 808 /* Must stop the translation as endianness may have changed */ 809 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 810 } 811 #endif 812 813 /* Unified bats */ 814 #if !defined(CONFIG_USER_ONLY) 815 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn) 816 { 817 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 818 offsetof(CPUPPCState, 819 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 820 } 821 822 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn) 823 { 824 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 825 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]); 826 tcg_temp_free_i32(t0); 827 } 828 829 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn) 830 { 831 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 832 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]); 833 tcg_temp_free_i32(t0); 834 } 835 #endif 836 837 /* PowerPC 40x specific registers */ 838 #if !defined(CONFIG_USER_ONLY) 839 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn) 840 { 841 gen_icount_io_start(ctx); 842 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env); 843 } 844 845 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn) 846 { 847 gen_icount_io_start(ctx); 848 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]); 849 } 850 851 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn) 852 { 853 gen_icount_io_start(ctx); 854 gen_store_spr(sprn, cpu_gpr[gprn]); 855 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]); 856 /* We must stop translation as we may have rebooted */ 857 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 858 } 859 860 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn) 861 { 862 gen_icount_io_start(ctx); 863 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]); 864 } 865 866 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn) 867 { 868 gen_icount_io_start(ctx); 869 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]); 870 } 871 872 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn) 873 { 874 gen_icount_io_start(ctx); 875 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]); 876 } 877 #endif 878 879 /* PowerPC 403 specific registers */ 880 /* PBL1 / PBU1 / PBL2 / PBU2 */ 881 #if !defined(CONFIG_USER_ONLY) 882 void spr_read_403_pbr(DisasContext *ctx, int gprn, int sprn) 883 { 884 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 885 offsetof(CPUPPCState, pb[sprn - SPR_403_PBL1])); 886 } 887 888 void spr_write_403_pbr(DisasContext *ctx, int sprn, int gprn) 889 { 890 TCGv_i32 t0 = tcg_const_i32(sprn - SPR_403_PBL1); 891 gen_helper_store_403_pbr(cpu_env, t0, cpu_gpr[gprn]); 892 tcg_temp_free_i32(t0); 893 } 894 895 void spr_write_pir(DisasContext *ctx, int sprn, int gprn) 896 { 897 TCGv t0 = tcg_temp_new(); 898 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF); 899 gen_store_spr(SPR_PIR, t0); 900 tcg_temp_free(t0); 901 } 902 #endif 903 904 /* SPE specific registers */ 905 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn) 906 { 907 TCGv_i32 t0 = tcg_temp_new_i32(); 908 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 909 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0); 910 tcg_temp_free_i32(t0); 911 } 912 913 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn) 914 { 915 TCGv_i32 t0 = tcg_temp_new_i32(); 916 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]); 917 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 918 tcg_temp_free_i32(t0); 919 } 920 921 #if !defined(CONFIG_USER_ONLY) 922 /* Callback used to write the exception vector base */ 923 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn) 924 { 925 TCGv t0 = tcg_temp_new(); 926 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask)); 927 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 928 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 929 gen_store_spr(sprn, t0); 930 tcg_temp_free(t0); 931 } 932 933 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn) 934 { 935 int sprn_offs; 936 937 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) { 938 sprn_offs = sprn - SPR_BOOKE_IVOR0; 939 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) { 940 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32; 941 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) { 942 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38; 943 } else { 944 printf("Trying to write an unknown exception vector %d %03x\n", 945 sprn, sprn); 946 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 947 return; 948 } 949 950 TCGv t0 = tcg_temp_new(); 951 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask)); 952 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 953 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs])); 954 gen_store_spr(sprn, t0); 955 tcg_temp_free(t0); 956 } 957 #endif 958 959 #ifdef TARGET_PPC64 960 #ifndef CONFIG_USER_ONLY 961 void spr_write_amr(DisasContext *ctx, int sprn, int gprn) 962 { 963 TCGv t0 = tcg_temp_new(); 964 TCGv t1 = tcg_temp_new(); 965 TCGv t2 = tcg_temp_new(); 966 967 /* 968 * Note, the HV=1 PR=0 case is handled earlier by simply using 969 * spr_write_generic for HV mode in the SPR table 970 */ 971 972 /* Build insertion mask into t1 based on context */ 973 if (ctx->pr) { 974 gen_load_spr(t1, SPR_UAMOR); 975 } else { 976 gen_load_spr(t1, SPR_AMOR); 977 } 978 979 /* Mask new bits into t2 */ 980 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 981 982 /* Load AMR and clear new bits in t0 */ 983 gen_load_spr(t0, SPR_AMR); 984 tcg_gen_andc_tl(t0, t0, t1); 985 986 /* Or'in new bits and write it out */ 987 tcg_gen_or_tl(t0, t0, t2); 988 gen_store_spr(SPR_AMR, t0); 989 spr_store_dump_spr(SPR_AMR); 990 991 tcg_temp_free(t0); 992 tcg_temp_free(t1); 993 tcg_temp_free(t2); 994 } 995 996 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn) 997 { 998 TCGv t0 = tcg_temp_new(); 999 TCGv t1 = tcg_temp_new(); 1000 TCGv t2 = tcg_temp_new(); 1001 1002 /* 1003 * Note, the HV=1 case is handled earlier by simply using 1004 * spr_write_generic for HV mode in the SPR table 1005 */ 1006 1007 /* Build insertion mask into t1 based on context */ 1008 gen_load_spr(t1, SPR_AMOR); 1009 1010 /* Mask new bits into t2 */ 1011 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1012 1013 /* Load AMR and clear new bits in t0 */ 1014 gen_load_spr(t0, SPR_UAMOR); 1015 tcg_gen_andc_tl(t0, t0, t1); 1016 1017 /* Or'in new bits and write it out */ 1018 tcg_gen_or_tl(t0, t0, t2); 1019 gen_store_spr(SPR_UAMOR, t0); 1020 spr_store_dump_spr(SPR_UAMOR); 1021 1022 tcg_temp_free(t0); 1023 tcg_temp_free(t1); 1024 tcg_temp_free(t2); 1025 } 1026 1027 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn) 1028 { 1029 TCGv t0 = tcg_temp_new(); 1030 TCGv t1 = tcg_temp_new(); 1031 TCGv t2 = tcg_temp_new(); 1032 1033 /* 1034 * Note, the HV=1 case is handled earlier by simply using 1035 * spr_write_generic for HV mode in the SPR table 1036 */ 1037 1038 /* Build insertion mask into t1 based on context */ 1039 gen_load_spr(t1, SPR_AMOR); 1040 1041 /* Mask new bits into t2 */ 1042 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1043 1044 /* Load AMR and clear new bits in t0 */ 1045 gen_load_spr(t0, SPR_IAMR); 1046 tcg_gen_andc_tl(t0, t0, t1); 1047 1048 /* Or'in new bits and write it out */ 1049 tcg_gen_or_tl(t0, t0, t2); 1050 gen_store_spr(SPR_IAMR, t0); 1051 spr_store_dump_spr(SPR_IAMR); 1052 1053 tcg_temp_free(t0); 1054 tcg_temp_free(t1); 1055 tcg_temp_free(t2); 1056 } 1057 #endif 1058 #endif 1059 1060 #ifndef CONFIG_USER_ONLY 1061 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn) 1062 { 1063 gen_helper_fixup_thrm(cpu_env); 1064 gen_load_spr(cpu_gpr[gprn], sprn); 1065 spr_load_dump_spr(sprn); 1066 } 1067 #endif /* !CONFIG_USER_ONLY */ 1068 1069 #if !defined(CONFIG_USER_ONLY) 1070 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn) 1071 { 1072 TCGv t0 = tcg_temp_new(); 1073 1074 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE); 1075 gen_store_spr(sprn, t0); 1076 tcg_temp_free(t0); 1077 } 1078 1079 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn) 1080 { 1081 TCGv t0 = tcg_temp_new(); 1082 1083 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE); 1084 gen_store_spr(sprn, t0); 1085 tcg_temp_free(t0); 1086 } 1087 1088 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn) 1089 { 1090 TCGv t0 = tcg_temp_new(); 1091 1092 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 1093 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC)); 1094 gen_store_spr(sprn, t0); 1095 tcg_temp_free(t0); 1096 } 1097 1098 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn) 1099 { 1100 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]); 1101 } 1102 1103 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn) 1104 { 1105 TCGv_i32 t0 = tcg_const_i32(sprn); 1106 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]); 1107 tcg_temp_free_i32(t0); 1108 } 1109 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn) 1110 { 1111 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]); 1112 } 1113 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn) 1114 { 1115 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]); 1116 } 1117 1118 #endif 1119 1120 #if !defined(CONFIG_USER_ONLY) 1121 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn) 1122 { 1123 TCGv val = tcg_temp_new(); 1124 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]); 1125 gen_store_spr(SPR_BOOKE_MAS3, val); 1126 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32); 1127 gen_store_spr(SPR_BOOKE_MAS7, val); 1128 tcg_temp_free(val); 1129 } 1130 1131 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn) 1132 { 1133 TCGv mas7 = tcg_temp_new(); 1134 TCGv mas3 = tcg_temp_new(); 1135 gen_load_spr(mas7, SPR_BOOKE_MAS7); 1136 tcg_gen_shli_tl(mas7, mas7, 32); 1137 gen_load_spr(mas3, SPR_BOOKE_MAS3); 1138 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7); 1139 tcg_temp_free(mas3); 1140 tcg_temp_free(mas7); 1141 } 1142 1143 #endif 1144 1145 #ifdef TARGET_PPC64 1146 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn, 1147 int bit, int sprn, int cause) 1148 { 1149 TCGv_i32 t1 = tcg_const_i32(bit); 1150 TCGv_i32 t2 = tcg_const_i32(sprn); 1151 TCGv_i32 t3 = tcg_const_i32(cause); 1152 1153 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3); 1154 1155 tcg_temp_free_i32(t3); 1156 tcg_temp_free_i32(t2); 1157 tcg_temp_free_i32(t1); 1158 } 1159 1160 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn, 1161 int bit, int sprn, int cause) 1162 { 1163 TCGv_i32 t1 = tcg_const_i32(bit); 1164 TCGv_i32 t2 = tcg_const_i32(sprn); 1165 TCGv_i32 t3 = tcg_const_i32(cause); 1166 1167 gen_helper_msr_facility_check(cpu_env, t1, t2, t3); 1168 1169 tcg_temp_free_i32(t3); 1170 tcg_temp_free_i32(t2); 1171 tcg_temp_free_i32(t1); 1172 } 1173 1174 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn) 1175 { 1176 TCGv spr_up = tcg_temp_new(); 1177 TCGv spr = tcg_temp_new(); 1178 1179 gen_load_spr(spr, sprn - 1); 1180 tcg_gen_shri_tl(spr_up, spr, 32); 1181 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up); 1182 1183 tcg_temp_free(spr); 1184 tcg_temp_free(spr_up); 1185 } 1186 1187 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn) 1188 { 1189 TCGv spr = tcg_temp_new(); 1190 1191 gen_load_spr(spr, sprn - 1); 1192 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32); 1193 gen_store_spr(sprn - 1, spr); 1194 1195 tcg_temp_free(spr); 1196 } 1197 1198 #if !defined(CONFIG_USER_ONLY) 1199 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn) 1200 { 1201 TCGv hmer = tcg_temp_new(); 1202 1203 gen_load_spr(hmer, sprn); 1204 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer); 1205 gen_store_spr(sprn, hmer); 1206 spr_store_dump_spr(sprn); 1207 tcg_temp_free(hmer); 1208 } 1209 1210 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn) 1211 { 1212 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]); 1213 } 1214 #endif /* !defined(CONFIG_USER_ONLY) */ 1215 1216 void spr_read_tar(DisasContext *ctx, int gprn, int sprn) 1217 { 1218 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1219 spr_read_generic(ctx, gprn, sprn); 1220 } 1221 1222 void spr_write_tar(DisasContext *ctx, int sprn, int gprn) 1223 { 1224 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1225 spr_write_generic(ctx, sprn, gprn); 1226 } 1227 1228 void spr_read_tm(DisasContext *ctx, int gprn, int sprn) 1229 { 1230 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1231 spr_read_generic(ctx, gprn, sprn); 1232 } 1233 1234 void spr_write_tm(DisasContext *ctx, int sprn, int gprn) 1235 { 1236 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1237 spr_write_generic(ctx, sprn, gprn); 1238 } 1239 1240 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn) 1241 { 1242 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1243 spr_read_prev_upper32(ctx, gprn, sprn); 1244 } 1245 1246 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn) 1247 { 1248 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1249 spr_write_prev_upper32(ctx, sprn, gprn); 1250 } 1251 1252 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn) 1253 { 1254 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1255 spr_read_generic(ctx, gprn, sprn); 1256 } 1257 1258 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn) 1259 { 1260 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1261 spr_write_generic(ctx, sprn, gprn); 1262 } 1263 1264 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn) 1265 { 1266 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1267 spr_read_prev_upper32(ctx, gprn, sprn); 1268 } 1269 1270 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn) 1271 { 1272 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1273 spr_write_prev_upper32(ctx, sprn, gprn); 1274 } 1275 #endif 1276 1277 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 1278 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 1279 1280 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 1281 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 1282 1283 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 1284 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 1285 1286 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 1287 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 1288 1289 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 1290 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 1291 1292 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 1293 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 1294 1295 typedef struct opcode_t { 1296 unsigned char opc1, opc2, opc3, opc4; 1297 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 1298 unsigned char pad[4]; 1299 #endif 1300 opc_handler_t handler; 1301 const char *oname; 1302 } opcode_t; 1303 1304 /* Helpers for priv. check */ 1305 #define GEN_PRIV \ 1306 do { \ 1307 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 1308 } while (0) 1309 1310 #if defined(CONFIG_USER_ONLY) 1311 #define CHK_HV GEN_PRIV 1312 #define CHK_SV GEN_PRIV 1313 #define CHK_HVRM GEN_PRIV 1314 #else 1315 #define CHK_HV \ 1316 do { \ 1317 if (unlikely(ctx->pr || !ctx->hv)) { \ 1318 GEN_PRIV; \ 1319 } \ 1320 } while (0) 1321 #define CHK_SV \ 1322 do { \ 1323 if (unlikely(ctx->pr)) { \ 1324 GEN_PRIV; \ 1325 } \ 1326 } while (0) 1327 #define CHK_HVRM \ 1328 do { \ 1329 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 1330 GEN_PRIV; \ 1331 } \ 1332 } while (0) 1333 #endif 1334 1335 #define CHK_NONE 1336 1337 /*****************************************************************************/ 1338 /* PowerPC instructions table */ 1339 1340 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 1341 { \ 1342 .opc1 = op1, \ 1343 .opc2 = op2, \ 1344 .opc3 = op3, \ 1345 .opc4 = 0xff, \ 1346 .handler = { \ 1347 .inval1 = invl, \ 1348 .type = _typ, \ 1349 .type2 = _typ2, \ 1350 .handler = &gen_##name, \ 1351 }, \ 1352 .oname = stringify(name), \ 1353 } 1354 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 1355 { \ 1356 .opc1 = op1, \ 1357 .opc2 = op2, \ 1358 .opc3 = op3, \ 1359 .opc4 = 0xff, \ 1360 .handler = { \ 1361 .inval1 = invl1, \ 1362 .inval2 = invl2, \ 1363 .type = _typ, \ 1364 .type2 = _typ2, \ 1365 .handler = &gen_##name, \ 1366 }, \ 1367 .oname = stringify(name), \ 1368 } 1369 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 1370 { \ 1371 .opc1 = op1, \ 1372 .opc2 = op2, \ 1373 .opc3 = op3, \ 1374 .opc4 = 0xff, \ 1375 .handler = { \ 1376 .inval1 = invl, \ 1377 .type = _typ, \ 1378 .type2 = _typ2, \ 1379 .handler = &gen_##name, \ 1380 }, \ 1381 .oname = onam, \ 1382 } 1383 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 1384 { \ 1385 .opc1 = op1, \ 1386 .opc2 = op2, \ 1387 .opc3 = op3, \ 1388 .opc4 = op4, \ 1389 .handler = { \ 1390 .inval1 = invl, \ 1391 .type = _typ, \ 1392 .type2 = _typ2, \ 1393 .handler = &gen_##name, \ 1394 }, \ 1395 .oname = stringify(name), \ 1396 } 1397 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 1398 { \ 1399 .opc1 = op1, \ 1400 .opc2 = op2, \ 1401 .opc3 = op3, \ 1402 .opc4 = op4, \ 1403 .handler = { \ 1404 .inval1 = invl, \ 1405 .type = _typ, \ 1406 .type2 = _typ2, \ 1407 .handler = &gen_##name, \ 1408 }, \ 1409 .oname = onam, \ 1410 } 1411 1412 /* Invalid instruction */ 1413 static void gen_invalid(DisasContext *ctx) 1414 { 1415 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 1416 } 1417 1418 static opc_handler_t invalid_handler = { 1419 .inval1 = 0xFFFFFFFF, 1420 .inval2 = 0xFFFFFFFF, 1421 .type = PPC_NONE, 1422 .type2 = PPC_NONE, 1423 .handler = gen_invalid, 1424 }; 1425 1426 /*** Integer comparison ***/ 1427 1428 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 1429 { 1430 TCGv t0 = tcg_temp_new(); 1431 TCGv t1 = tcg_temp_new(); 1432 TCGv_i32 t = tcg_temp_new_i32(); 1433 1434 tcg_gen_movi_tl(t0, CRF_EQ); 1435 tcg_gen_movi_tl(t1, CRF_LT); 1436 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), 1437 t0, arg0, arg1, t1, t0); 1438 tcg_gen_movi_tl(t1, CRF_GT); 1439 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), 1440 t0, arg0, arg1, t1, t0); 1441 1442 tcg_gen_trunc_tl_i32(t, t0); 1443 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 1444 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 1445 1446 tcg_temp_free(t0); 1447 tcg_temp_free(t1); 1448 tcg_temp_free_i32(t); 1449 } 1450 1451 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 1452 { 1453 TCGv t0 = tcg_const_tl(arg1); 1454 gen_op_cmp(arg0, t0, s, crf); 1455 tcg_temp_free(t0); 1456 } 1457 1458 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 1459 { 1460 TCGv t0, t1; 1461 t0 = tcg_temp_new(); 1462 t1 = tcg_temp_new(); 1463 if (s) { 1464 tcg_gen_ext32s_tl(t0, arg0); 1465 tcg_gen_ext32s_tl(t1, arg1); 1466 } else { 1467 tcg_gen_ext32u_tl(t0, arg0); 1468 tcg_gen_ext32u_tl(t1, arg1); 1469 } 1470 gen_op_cmp(t0, t1, s, crf); 1471 tcg_temp_free(t1); 1472 tcg_temp_free(t0); 1473 } 1474 1475 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 1476 { 1477 TCGv t0 = tcg_const_tl(arg1); 1478 gen_op_cmp32(arg0, t0, s, crf); 1479 tcg_temp_free(t0); 1480 } 1481 1482 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 1483 { 1484 if (NARROW_MODE(ctx)) { 1485 gen_op_cmpi32(reg, 0, 1, 0); 1486 } else { 1487 gen_op_cmpi(reg, 0, 1, 0); 1488 } 1489 } 1490 1491 /* cmprb - range comparison: isupper, isaplha, islower*/ 1492 static void gen_cmprb(DisasContext *ctx) 1493 { 1494 TCGv_i32 src1 = tcg_temp_new_i32(); 1495 TCGv_i32 src2 = tcg_temp_new_i32(); 1496 TCGv_i32 src2lo = tcg_temp_new_i32(); 1497 TCGv_i32 src2hi = tcg_temp_new_i32(); 1498 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 1499 1500 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 1501 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 1502 1503 tcg_gen_andi_i32(src1, src1, 0xFF); 1504 tcg_gen_ext8u_i32(src2lo, src2); 1505 tcg_gen_shri_i32(src2, src2, 8); 1506 tcg_gen_ext8u_i32(src2hi, src2); 1507 1508 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1509 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1510 tcg_gen_and_i32(crf, src2lo, src2hi); 1511 1512 if (ctx->opcode & 0x00200000) { 1513 tcg_gen_shri_i32(src2, src2, 8); 1514 tcg_gen_ext8u_i32(src2lo, src2); 1515 tcg_gen_shri_i32(src2, src2, 8); 1516 tcg_gen_ext8u_i32(src2hi, src2); 1517 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1518 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1519 tcg_gen_and_i32(src2lo, src2lo, src2hi); 1520 tcg_gen_or_i32(crf, crf, src2lo); 1521 } 1522 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 1523 tcg_temp_free_i32(src1); 1524 tcg_temp_free_i32(src2); 1525 tcg_temp_free_i32(src2lo); 1526 tcg_temp_free_i32(src2hi); 1527 } 1528 1529 #if defined(TARGET_PPC64) 1530 /* cmpeqb */ 1531 static void gen_cmpeqb(DisasContext *ctx) 1532 { 1533 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1534 cpu_gpr[rB(ctx->opcode)]); 1535 } 1536 #endif 1537 1538 /* isel (PowerPC 2.03 specification) */ 1539 static void gen_isel(DisasContext *ctx) 1540 { 1541 uint32_t bi = rC(ctx->opcode); 1542 uint32_t mask = 0x08 >> (bi & 0x03); 1543 TCGv t0 = tcg_temp_new(); 1544 TCGv zr; 1545 1546 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 1547 tcg_gen_andi_tl(t0, t0, mask); 1548 1549 zr = tcg_const_tl(0); 1550 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 1551 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 1552 cpu_gpr[rB(ctx->opcode)]); 1553 tcg_temp_free(zr); 1554 tcg_temp_free(t0); 1555 } 1556 1557 /* cmpb: PowerPC 2.05 specification */ 1558 static void gen_cmpb(DisasContext *ctx) 1559 { 1560 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 1561 cpu_gpr[rB(ctx->opcode)]); 1562 } 1563 1564 /*** Integer arithmetic ***/ 1565 1566 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 1567 TCGv arg1, TCGv arg2, int sub) 1568 { 1569 TCGv t0 = tcg_temp_new(); 1570 1571 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 1572 tcg_gen_xor_tl(t0, arg1, arg2); 1573 if (sub) { 1574 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 1575 } else { 1576 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 1577 } 1578 tcg_temp_free(t0); 1579 if (NARROW_MODE(ctx)) { 1580 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 1581 if (is_isa300(ctx)) { 1582 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1583 } 1584 } else { 1585 if (is_isa300(ctx)) { 1586 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 1587 } 1588 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 1589 } 1590 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1591 } 1592 1593 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 1594 TCGv res, TCGv arg0, TCGv arg1, 1595 TCGv ca32, int sub) 1596 { 1597 TCGv t0; 1598 1599 if (!is_isa300(ctx)) { 1600 return; 1601 } 1602 1603 t0 = tcg_temp_new(); 1604 if (sub) { 1605 tcg_gen_eqv_tl(t0, arg0, arg1); 1606 } else { 1607 tcg_gen_xor_tl(t0, arg0, arg1); 1608 } 1609 tcg_gen_xor_tl(t0, t0, res); 1610 tcg_gen_extract_tl(ca32, t0, 32, 1); 1611 tcg_temp_free(t0); 1612 } 1613 1614 /* Common add function */ 1615 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 1616 TCGv arg2, TCGv ca, TCGv ca32, 1617 bool add_ca, bool compute_ca, 1618 bool compute_ov, bool compute_rc0) 1619 { 1620 TCGv t0 = ret; 1621 1622 if (compute_ca || compute_ov) { 1623 t0 = tcg_temp_new(); 1624 } 1625 1626 if (compute_ca) { 1627 if (NARROW_MODE(ctx)) { 1628 /* 1629 * Caution: a non-obvious corner case of the spec is that 1630 * we must produce the *entire* 64-bit addition, but 1631 * produce the carry into bit 32. 1632 */ 1633 TCGv t1 = tcg_temp_new(); 1634 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 1635 tcg_gen_add_tl(t0, arg1, arg2); 1636 if (add_ca) { 1637 tcg_gen_add_tl(t0, t0, ca); 1638 } 1639 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 1640 tcg_temp_free(t1); 1641 tcg_gen_extract_tl(ca, ca, 32, 1); 1642 if (is_isa300(ctx)) { 1643 tcg_gen_mov_tl(ca32, ca); 1644 } 1645 } else { 1646 TCGv zero = tcg_const_tl(0); 1647 if (add_ca) { 1648 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 1649 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 1650 } else { 1651 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 1652 } 1653 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 1654 tcg_temp_free(zero); 1655 } 1656 } else { 1657 tcg_gen_add_tl(t0, arg1, arg2); 1658 if (add_ca) { 1659 tcg_gen_add_tl(t0, t0, ca); 1660 } 1661 } 1662 1663 if (compute_ov) { 1664 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 1665 } 1666 if (unlikely(compute_rc0)) { 1667 gen_set_Rc0(ctx, t0); 1668 } 1669 1670 if (t0 != ret) { 1671 tcg_gen_mov_tl(ret, t0); 1672 tcg_temp_free(t0); 1673 } 1674 } 1675 /* Add functions with two operands */ 1676 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 1677 static void glue(gen_, name)(DisasContext *ctx) \ 1678 { \ 1679 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1680 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1681 ca, glue(ca, 32), \ 1682 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1683 } 1684 /* Add functions with one operand and one immediate */ 1685 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 1686 add_ca, compute_ca, compute_ov) \ 1687 static void glue(gen_, name)(DisasContext *ctx) \ 1688 { \ 1689 TCGv t0 = tcg_const_tl(const_val); \ 1690 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1691 cpu_gpr[rA(ctx->opcode)], t0, \ 1692 ca, glue(ca, 32), \ 1693 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1694 tcg_temp_free(t0); \ 1695 } 1696 1697 /* add add. addo addo. */ 1698 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 1699 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 1700 /* addc addc. addco addco. */ 1701 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 1702 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 1703 /* adde adde. addeo addeo. */ 1704 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 1705 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 1706 /* addme addme. addmeo addmeo. */ 1707 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 1708 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 1709 /* addex */ 1710 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 1711 /* addze addze. addzeo addzeo.*/ 1712 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 1713 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 1714 /* addic addic.*/ 1715 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 1716 { 1717 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1718 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1719 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 1720 tcg_temp_free(c); 1721 } 1722 1723 static void gen_addic(DisasContext *ctx) 1724 { 1725 gen_op_addic(ctx, 0); 1726 } 1727 1728 static void gen_addic_(DisasContext *ctx) 1729 { 1730 gen_op_addic(ctx, 1); 1731 } 1732 1733 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1734 TCGv arg2, int sign, int compute_ov) 1735 { 1736 TCGv_i32 t0 = tcg_temp_new_i32(); 1737 TCGv_i32 t1 = tcg_temp_new_i32(); 1738 TCGv_i32 t2 = tcg_temp_new_i32(); 1739 TCGv_i32 t3 = tcg_temp_new_i32(); 1740 1741 tcg_gen_trunc_tl_i32(t0, arg1); 1742 tcg_gen_trunc_tl_i32(t1, arg2); 1743 if (sign) { 1744 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1745 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1746 tcg_gen_and_i32(t2, t2, t3); 1747 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1748 tcg_gen_or_i32(t2, t2, t3); 1749 tcg_gen_movi_i32(t3, 0); 1750 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1751 tcg_gen_div_i32(t3, t0, t1); 1752 tcg_gen_extu_i32_tl(ret, t3); 1753 } else { 1754 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1755 tcg_gen_movi_i32(t3, 0); 1756 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1757 tcg_gen_divu_i32(t3, t0, t1); 1758 tcg_gen_extu_i32_tl(ret, t3); 1759 } 1760 if (compute_ov) { 1761 tcg_gen_extu_i32_tl(cpu_ov, t2); 1762 if (is_isa300(ctx)) { 1763 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1764 } 1765 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1766 } 1767 tcg_temp_free_i32(t0); 1768 tcg_temp_free_i32(t1); 1769 tcg_temp_free_i32(t2); 1770 tcg_temp_free_i32(t3); 1771 1772 if (unlikely(Rc(ctx->opcode) != 0)) { 1773 gen_set_Rc0(ctx, ret); 1774 } 1775 } 1776 /* Div functions */ 1777 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1778 static void glue(gen_, name)(DisasContext *ctx) \ 1779 { \ 1780 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1781 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1782 sign, compute_ov); \ 1783 } 1784 /* divwu divwu. divwuo divwuo. */ 1785 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1786 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1787 /* divw divw. divwo divwo. */ 1788 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1789 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1790 1791 /* div[wd]eu[o][.] */ 1792 #define GEN_DIVE(name, hlpr, compute_ov) \ 1793 static void gen_##name(DisasContext *ctx) \ 1794 { \ 1795 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1796 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1797 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1798 tcg_temp_free_i32(t0); \ 1799 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1800 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1801 } \ 1802 } 1803 1804 GEN_DIVE(divweu, divweu, 0); 1805 GEN_DIVE(divweuo, divweu, 1); 1806 GEN_DIVE(divwe, divwe, 0); 1807 GEN_DIVE(divweo, divwe, 1); 1808 1809 #if defined(TARGET_PPC64) 1810 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1811 TCGv arg2, int sign, int compute_ov) 1812 { 1813 TCGv_i64 t0 = tcg_temp_new_i64(); 1814 TCGv_i64 t1 = tcg_temp_new_i64(); 1815 TCGv_i64 t2 = tcg_temp_new_i64(); 1816 TCGv_i64 t3 = tcg_temp_new_i64(); 1817 1818 tcg_gen_mov_i64(t0, arg1); 1819 tcg_gen_mov_i64(t1, arg2); 1820 if (sign) { 1821 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1822 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1823 tcg_gen_and_i64(t2, t2, t3); 1824 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1825 tcg_gen_or_i64(t2, t2, t3); 1826 tcg_gen_movi_i64(t3, 0); 1827 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1828 tcg_gen_div_i64(ret, t0, t1); 1829 } else { 1830 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1831 tcg_gen_movi_i64(t3, 0); 1832 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1833 tcg_gen_divu_i64(ret, t0, t1); 1834 } 1835 if (compute_ov) { 1836 tcg_gen_mov_tl(cpu_ov, t2); 1837 if (is_isa300(ctx)) { 1838 tcg_gen_mov_tl(cpu_ov32, t2); 1839 } 1840 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1841 } 1842 tcg_temp_free_i64(t0); 1843 tcg_temp_free_i64(t1); 1844 tcg_temp_free_i64(t2); 1845 tcg_temp_free_i64(t3); 1846 1847 if (unlikely(Rc(ctx->opcode) != 0)) { 1848 gen_set_Rc0(ctx, ret); 1849 } 1850 } 1851 1852 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1853 static void glue(gen_, name)(DisasContext *ctx) \ 1854 { \ 1855 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1856 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1857 sign, compute_ov); \ 1858 } 1859 /* divdu divdu. divduo divduo. */ 1860 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1861 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1862 /* divd divd. divdo divdo. */ 1863 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1864 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1865 1866 GEN_DIVE(divdeu, divdeu, 0); 1867 GEN_DIVE(divdeuo, divdeu, 1); 1868 GEN_DIVE(divde, divde, 0); 1869 GEN_DIVE(divdeo, divde, 1); 1870 #endif 1871 1872 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1873 TCGv arg2, int sign) 1874 { 1875 TCGv_i32 t0 = tcg_temp_new_i32(); 1876 TCGv_i32 t1 = tcg_temp_new_i32(); 1877 1878 tcg_gen_trunc_tl_i32(t0, arg1); 1879 tcg_gen_trunc_tl_i32(t1, arg2); 1880 if (sign) { 1881 TCGv_i32 t2 = tcg_temp_new_i32(); 1882 TCGv_i32 t3 = tcg_temp_new_i32(); 1883 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1884 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1885 tcg_gen_and_i32(t2, t2, t3); 1886 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1887 tcg_gen_or_i32(t2, t2, t3); 1888 tcg_gen_movi_i32(t3, 0); 1889 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1890 tcg_gen_rem_i32(t3, t0, t1); 1891 tcg_gen_ext_i32_tl(ret, t3); 1892 tcg_temp_free_i32(t2); 1893 tcg_temp_free_i32(t3); 1894 } else { 1895 TCGv_i32 t2 = tcg_const_i32(1); 1896 TCGv_i32 t3 = tcg_const_i32(0); 1897 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1898 tcg_gen_remu_i32(t3, t0, t1); 1899 tcg_gen_extu_i32_tl(ret, t3); 1900 tcg_temp_free_i32(t2); 1901 tcg_temp_free_i32(t3); 1902 } 1903 tcg_temp_free_i32(t0); 1904 tcg_temp_free_i32(t1); 1905 } 1906 1907 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1908 static void glue(gen_, name)(DisasContext *ctx) \ 1909 { \ 1910 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1911 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1912 sign); \ 1913 } 1914 1915 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1916 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1917 1918 #if defined(TARGET_PPC64) 1919 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1920 TCGv arg2, int sign) 1921 { 1922 TCGv_i64 t0 = tcg_temp_new_i64(); 1923 TCGv_i64 t1 = tcg_temp_new_i64(); 1924 1925 tcg_gen_mov_i64(t0, arg1); 1926 tcg_gen_mov_i64(t1, arg2); 1927 if (sign) { 1928 TCGv_i64 t2 = tcg_temp_new_i64(); 1929 TCGv_i64 t3 = tcg_temp_new_i64(); 1930 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1931 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1932 tcg_gen_and_i64(t2, t2, t3); 1933 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1934 tcg_gen_or_i64(t2, t2, t3); 1935 tcg_gen_movi_i64(t3, 0); 1936 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1937 tcg_gen_rem_i64(ret, t0, t1); 1938 tcg_temp_free_i64(t2); 1939 tcg_temp_free_i64(t3); 1940 } else { 1941 TCGv_i64 t2 = tcg_const_i64(1); 1942 TCGv_i64 t3 = tcg_const_i64(0); 1943 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1944 tcg_gen_remu_i64(ret, t0, t1); 1945 tcg_temp_free_i64(t2); 1946 tcg_temp_free_i64(t3); 1947 } 1948 tcg_temp_free_i64(t0); 1949 tcg_temp_free_i64(t1); 1950 } 1951 1952 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1953 static void glue(gen_, name)(DisasContext *ctx) \ 1954 { \ 1955 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1956 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1957 sign); \ 1958 } 1959 1960 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1961 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1962 #endif 1963 1964 /* mulhw mulhw. */ 1965 static void gen_mulhw(DisasContext *ctx) 1966 { 1967 TCGv_i32 t0 = tcg_temp_new_i32(); 1968 TCGv_i32 t1 = tcg_temp_new_i32(); 1969 1970 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1971 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1972 tcg_gen_muls2_i32(t0, t1, t0, t1); 1973 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1974 tcg_temp_free_i32(t0); 1975 tcg_temp_free_i32(t1); 1976 if (unlikely(Rc(ctx->opcode) != 0)) { 1977 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1978 } 1979 } 1980 1981 /* mulhwu mulhwu. */ 1982 static void gen_mulhwu(DisasContext *ctx) 1983 { 1984 TCGv_i32 t0 = tcg_temp_new_i32(); 1985 TCGv_i32 t1 = tcg_temp_new_i32(); 1986 1987 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1988 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1989 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1990 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1991 tcg_temp_free_i32(t0); 1992 tcg_temp_free_i32(t1); 1993 if (unlikely(Rc(ctx->opcode) != 0)) { 1994 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1995 } 1996 } 1997 1998 /* mullw mullw. */ 1999 static void gen_mullw(DisasContext *ctx) 2000 { 2001 #if defined(TARGET_PPC64) 2002 TCGv_i64 t0, t1; 2003 t0 = tcg_temp_new_i64(); 2004 t1 = tcg_temp_new_i64(); 2005 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 2006 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 2007 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2008 tcg_temp_free(t0); 2009 tcg_temp_free(t1); 2010 #else 2011 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2012 cpu_gpr[rB(ctx->opcode)]); 2013 #endif 2014 if (unlikely(Rc(ctx->opcode) != 0)) { 2015 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2016 } 2017 } 2018 2019 /* mullwo mullwo. */ 2020 static void gen_mullwo(DisasContext *ctx) 2021 { 2022 TCGv_i32 t0 = tcg_temp_new_i32(); 2023 TCGv_i32 t1 = tcg_temp_new_i32(); 2024 2025 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2026 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2027 tcg_gen_muls2_i32(t0, t1, t0, t1); 2028 #if defined(TARGET_PPC64) 2029 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2030 #else 2031 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 2032 #endif 2033 2034 tcg_gen_sari_i32(t0, t0, 31); 2035 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 2036 tcg_gen_extu_i32_tl(cpu_ov, t0); 2037 if (is_isa300(ctx)) { 2038 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2039 } 2040 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2041 2042 tcg_temp_free_i32(t0); 2043 tcg_temp_free_i32(t1); 2044 if (unlikely(Rc(ctx->opcode) != 0)) { 2045 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2046 } 2047 } 2048 2049 /* mulli */ 2050 static void gen_mulli(DisasContext *ctx) 2051 { 2052 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2053 SIMM(ctx->opcode)); 2054 } 2055 2056 #if defined(TARGET_PPC64) 2057 /* mulhd mulhd. */ 2058 static void gen_mulhd(DisasContext *ctx) 2059 { 2060 TCGv lo = tcg_temp_new(); 2061 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2062 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2063 tcg_temp_free(lo); 2064 if (unlikely(Rc(ctx->opcode) != 0)) { 2065 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2066 } 2067 } 2068 2069 /* mulhdu mulhdu. */ 2070 static void gen_mulhdu(DisasContext *ctx) 2071 { 2072 TCGv lo = tcg_temp_new(); 2073 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2074 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2075 tcg_temp_free(lo); 2076 if (unlikely(Rc(ctx->opcode) != 0)) { 2077 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2078 } 2079 } 2080 2081 /* mulld mulld. */ 2082 static void gen_mulld(DisasContext *ctx) 2083 { 2084 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2085 cpu_gpr[rB(ctx->opcode)]); 2086 if (unlikely(Rc(ctx->opcode) != 0)) { 2087 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2088 } 2089 } 2090 2091 /* mulldo mulldo. */ 2092 static void gen_mulldo(DisasContext *ctx) 2093 { 2094 TCGv_i64 t0 = tcg_temp_new_i64(); 2095 TCGv_i64 t1 = tcg_temp_new_i64(); 2096 2097 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 2098 cpu_gpr[rB(ctx->opcode)]); 2099 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 2100 2101 tcg_gen_sari_i64(t0, t0, 63); 2102 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 2103 if (is_isa300(ctx)) { 2104 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2105 } 2106 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2107 2108 tcg_temp_free_i64(t0); 2109 tcg_temp_free_i64(t1); 2110 2111 if (unlikely(Rc(ctx->opcode) != 0)) { 2112 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2113 } 2114 } 2115 #endif 2116 2117 /* Common subf function */ 2118 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 2119 TCGv arg2, bool add_ca, bool compute_ca, 2120 bool compute_ov, bool compute_rc0) 2121 { 2122 TCGv t0 = ret; 2123 2124 if (compute_ca || compute_ov) { 2125 t0 = tcg_temp_new(); 2126 } 2127 2128 if (compute_ca) { 2129 /* dest = ~arg1 + arg2 [+ ca]. */ 2130 if (NARROW_MODE(ctx)) { 2131 /* 2132 * Caution: a non-obvious corner case of the spec is that 2133 * we must produce the *entire* 64-bit addition, but 2134 * produce the carry into bit 32. 2135 */ 2136 TCGv inv1 = tcg_temp_new(); 2137 TCGv t1 = tcg_temp_new(); 2138 tcg_gen_not_tl(inv1, arg1); 2139 if (add_ca) { 2140 tcg_gen_add_tl(t0, arg2, cpu_ca); 2141 } else { 2142 tcg_gen_addi_tl(t0, arg2, 1); 2143 } 2144 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 2145 tcg_gen_add_tl(t0, t0, inv1); 2146 tcg_temp_free(inv1); 2147 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 2148 tcg_temp_free(t1); 2149 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 2150 if (is_isa300(ctx)) { 2151 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2152 } 2153 } else if (add_ca) { 2154 TCGv zero, inv1 = tcg_temp_new(); 2155 tcg_gen_not_tl(inv1, arg1); 2156 zero = tcg_const_tl(0); 2157 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 2158 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 2159 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 2160 tcg_temp_free(zero); 2161 tcg_temp_free(inv1); 2162 } else { 2163 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 2164 tcg_gen_sub_tl(t0, arg2, arg1); 2165 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 2166 } 2167 } else if (add_ca) { 2168 /* 2169 * Since we're ignoring carry-out, we can simplify the 2170 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. 2171 */ 2172 tcg_gen_sub_tl(t0, arg2, arg1); 2173 tcg_gen_add_tl(t0, t0, cpu_ca); 2174 tcg_gen_subi_tl(t0, t0, 1); 2175 } else { 2176 tcg_gen_sub_tl(t0, arg2, arg1); 2177 } 2178 2179 if (compute_ov) { 2180 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 2181 } 2182 if (unlikely(compute_rc0)) { 2183 gen_set_Rc0(ctx, t0); 2184 } 2185 2186 if (t0 != ret) { 2187 tcg_gen_mov_tl(ret, t0); 2188 tcg_temp_free(t0); 2189 } 2190 } 2191 /* Sub functions with Two operands functions */ 2192 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 2193 static void glue(gen_, name)(DisasContext *ctx) \ 2194 { \ 2195 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2196 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 2197 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2198 } 2199 /* Sub functions with one operand and one immediate */ 2200 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 2201 add_ca, compute_ca, compute_ov) \ 2202 static void glue(gen_, name)(DisasContext *ctx) \ 2203 { \ 2204 TCGv t0 = tcg_const_tl(const_val); \ 2205 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2206 cpu_gpr[rA(ctx->opcode)], t0, \ 2207 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2208 tcg_temp_free(t0); \ 2209 } 2210 /* subf subf. subfo subfo. */ 2211 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 2212 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 2213 /* subfc subfc. subfco subfco. */ 2214 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 2215 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 2216 /* subfe subfe. subfeo subfo. */ 2217 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 2218 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 2219 /* subfme subfme. subfmeo subfmeo. */ 2220 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 2221 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 2222 /* subfze subfze. subfzeo subfzeo.*/ 2223 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 2224 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 2225 2226 /* subfic */ 2227 static void gen_subfic(DisasContext *ctx) 2228 { 2229 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 2230 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2231 c, 0, 1, 0, 0); 2232 tcg_temp_free(c); 2233 } 2234 2235 /* neg neg. nego nego. */ 2236 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 2237 { 2238 TCGv zero = tcg_const_tl(0); 2239 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2240 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 2241 tcg_temp_free(zero); 2242 } 2243 2244 static void gen_neg(DisasContext *ctx) 2245 { 2246 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2247 if (unlikely(Rc(ctx->opcode))) { 2248 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2249 } 2250 } 2251 2252 static void gen_nego(DisasContext *ctx) 2253 { 2254 gen_op_arith_neg(ctx, 1); 2255 } 2256 2257 /*** Integer logical ***/ 2258 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 2259 static void glue(gen_, name)(DisasContext *ctx) \ 2260 { \ 2261 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 2262 cpu_gpr[rB(ctx->opcode)]); \ 2263 if (unlikely(Rc(ctx->opcode) != 0)) \ 2264 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2265 } 2266 2267 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 2268 static void glue(gen_, name)(DisasContext *ctx) \ 2269 { \ 2270 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 2271 if (unlikely(Rc(ctx->opcode) != 0)) \ 2272 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2273 } 2274 2275 /* and & and. */ 2276 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 2277 /* andc & andc. */ 2278 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 2279 2280 /* andi. */ 2281 static void gen_andi_(DisasContext *ctx) 2282 { 2283 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2284 UIMM(ctx->opcode)); 2285 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2286 } 2287 2288 /* andis. */ 2289 static void gen_andis_(DisasContext *ctx) 2290 { 2291 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2292 UIMM(ctx->opcode) << 16); 2293 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2294 } 2295 2296 /* cntlzw */ 2297 static void gen_cntlzw(DisasContext *ctx) 2298 { 2299 TCGv_i32 t = tcg_temp_new_i32(); 2300 2301 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2302 tcg_gen_clzi_i32(t, t, 32); 2303 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2304 tcg_temp_free_i32(t); 2305 2306 if (unlikely(Rc(ctx->opcode) != 0)) { 2307 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2308 } 2309 } 2310 2311 /* cnttzw */ 2312 static void gen_cnttzw(DisasContext *ctx) 2313 { 2314 TCGv_i32 t = tcg_temp_new_i32(); 2315 2316 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2317 tcg_gen_ctzi_i32(t, t, 32); 2318 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2319 tcg_temp_free_i32(t); 2320 2321 if (unlikely(Rc(ctx->opcode) != 0)) { 2322 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2323 } 2324 } 2325 2326 /* eqv & eqv. */ 2327 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 2328 /* extsb & extsb. */ 2329 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 2330 /* extsh & extsh. */ 2331 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 2332 /* nand & nand. */ 2333 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 2334 /* nor & nor. */ 2335 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 2336 2337 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 2338 static void gen_pause(DisasContext *ctx) 2339 { 2340 TCGv_i32 t0 = tcg_const_i32(0); 2341 tcg_gen_st_i32(t0, cpu_env, 2342 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 2343 tcg_temp_free_i32(t0); 2344 2345 /* Stop translation, this gives other CPUs a chance to run */ 2346 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 2347 } 2348 #endif /* defined(TARGET_PPC64) */ 2349 2350 /* or & or. */ 2351 static void gen_or(DisasContext *ctx) 2352 { 2353 int rs, ra, rb; 2354 2355 rs = rS(ctx->opcode); 2356 ra = rA(ctx->opcode); 2357 rb = rB(ctx->opcode); 2358 /* Optimisation for mr. ri case */ 2359 if (rs != ra || rs != rb) { 2360 if (rs != rb) { 2361 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 2362 } else { 2363 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 2364 } 2365 if (unlikely(Rc(ctx->opcode) != 0)) { 2366 gen_set_Rc0(ctx, cpu_gpr[ra]); 2367 } 2368 } else if (unlikely(Rc(ctx->opcode) != 0)) { 2369 gen_set_Rc0(ctx, cpu_gpr[rs]); 2370 #if defined(TARGET_PPC64) 2371 } else if (rs != 0) { /* 0 is nop */ 2372 int prio = 0; 2373 2374 switch (rs) { 2375 case 1: 2376 /* Set process priority to low */ 2377 prio = 2; 2378 break; 2379 case 6: 2380 /* Set process priority to medium-low */ 2381 prio = 3; 2382 break; 2383 case 2: 2384 /* Set process priority to normal */ 2385 prio = 4; 2386 break; 2387 #if !defined(CONFIG_USER_ONLY) 2388 case 31: 2389 if (!ctx->pr) { 2390 /* Set process priority to very low */ 2391 prio = 1; 2392 } 2393 break; 2394 case 5: 2395 if (!ctx->pr) { 2396 /* Set process priority to medium-hight */ 2397 prio = 5; 2398 } 2399 break; 2400 case 3: 2401 if (!ctx->pr) { 2402 /* Set process priority to high */ 2403 prio = 6; 2404 } 2405 break; 2406 case 7: 2407 if (ctx->hv && !ctx->pr) { 2408 /* Set process priority to very high */ 2409 prio = 7; 2410 } 2411 break; 2412 #endif 2413 default: 2414 break; 2415 } 2416 if (prio) { 2417 TCGv t0 = tcg_temp_new(); 2418 gen_load_spr(t0, SPR_PPR); 2419 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 2420 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 2421 gen_store_spr(SPR_PPR, t0); 2422 tcg_temp_free(t0); 2423 } 2424 #if !defined(CONFIG_USER_ONLY) 2425 /* 2426 * Pause out of TCG otherwise spin loops with smt_low eat too 2427 * much CPU and the kernel hangs. This applies to all 2428 * encodings other than no-op, e.g., miso(rs=26), yield(27), 2429 * mdoio(29), mdoom(30), and all currently undefined. 2430 */ 2431 gen_pause(ctx); 2432 #endif 2433 #endif 2434 } 2435 } 2436 /* orc & orc. */ 2437 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 2438 2439 /* xor & xor. */ 2440 static void gen_xor(DisasContext *ctx) 2441 { 2442 /* Optimisation for "set to zero" case */ 2443 if (rS(ctx->opcode) != rB(ctx->opcode)) { 2444 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2445 cpu_gpr[rB(ctx->opcode)]); 2446 } else { 2447 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 2448 } 2449 if (unlikely(Rc(ctx->opcode) != 0)) { 2450 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2451 } 2452 } 2453 2454 /* ori */ 2455 static void gen_ori(DisasContext *ctx) 2456 { 2457 target_ulong uimm = UIMM(ctx->opcode); 2458 2459 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2460 return; 2461 } 2462 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2463 } 2464 2465 /* oris */ 2466 static void gen_oris(DisasContext *ctx) 2467 { 2468 target_ulong uimm = UIMM(ctx->opcode); 2469 2470 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2471 /* NOP */ 2472 return; 2473 } 2474 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2475 uimm << 16); 2476 } 2477 2478 /* xori */ 2479 static void gen_xori(DisasContext *ctx) 2480 { 2481 target_ulong uimm = UIMM(ctx->opcode); 2482 2483 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2484 /* NOP */ 2485 return; 2486 } 2487 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2488 } 2489 2490 /* xoris */ 2491 static void gen_xoris(DisasContext *ctx) 2492 { 2493 target_ulong uimm = UIMM(ctx->opcode); 2494 2495 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2496 /* NOP */ 2497 return; 2498 } 2499 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2500 uimm << 16); 2501 } 2502 2503 /* popcntb : PowerPC 2.03 specification */ 2504 static void gen_popcntb(DisasContext *ctx) 2505 { 2506 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2507 } 2508 2509 static void gen_popcntw(DisasContext *ctx) 2510 { 2511 #if defined(TARGET_PPC64) 2512 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2513 #else 2514 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2515 #endif 2516 } 2517 2518 #if defined(TARGET_PPC64) 2519 /* popcntd: PowerPC 2.06 specification */ 2520 static void gen_popcntd(DisasContext *ctx) 2521 { 2522 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2523 } 2524 #endif 2525 2526 /* prtyw: PowerPC 2.05 specification */ 2527 static void gen_prtyw(DisasContext *ctx) 2528 { 2529 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2530 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2531 TCGv t0 = tcg_temp_new(); 2532 tcg_gen_shri_tl(t0, rs, 16); 2533 tcg_gen_xor_tl(ra, rs, t0); 2534 tcg_gen_shri_tl(t0, ra, 8); 2535 tcg_gen_xor_tl(ra, ra, t0); 2536 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 2537 tcg_temp_free(t0); 2538 } 2539 2540 #if defined(TARGET_PPC64) 2541 /* prtyd: PowerPC 2.05 specification */ 2542 static void gen_prtyd(DisasContext *ctx) 2543 { 2544 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2545 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2546 TCGv t0 = tcg_temp_new(); 2547 tcg_gen_shri_tl(t0, rs, 32); 2548 tcg_gen_xor_tl(ra, rs, t0); 2549 tcg_gen_shri_tl(t0, ra, 16); 2550 tcg_gen_xor_tl(ra, ra, t0); 2551 tcg_gen_shri_tl(t0, ra, 8); 2552 tcg_gen_xor_tl(ra, ra, t0); 2553 tcg_gen_andi_tl(ra, ra, 1); 2554 tcg_temp_free(t0); 2555 } 2556 #endif 2557 2558 #if defined(TARGET_PPC64) 2559 /* bpermd */ 2560 static void gen_bpermd(DisasContext *ctx) 2561 { 2562 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 2563 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2564 } 2565 #endif 2566 2567 #if defined(TARGET_PPC64) 2568 /* extsw & extsw. */ 2569 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 2570 2571 /* cntlzd */ 2572 static void gen_cntlzd(DisasContext *ctx) 2573 { 2574 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2575 if (unlikely(Rc(ctx->opcode) != 0)) { 2576 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2577 } 2578 } 2579 2580 /* cnttzd */ 2581 static void gen_cnttzd(DisasContext *ctx) 2582 { 2583 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2584 if (unlikely(Rc(ctx->opcode) != 0)) { 2585 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2586 } 2587 } 2588 2589 /* darn */ 2590 static void gen_darn(DisasContext *ctx) 2591 { 2592 int l = L(ctx->opcode); 2593 2594 if (l > 2) { 2595 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 2596 } else { 2597 gen_icount_io_start(ctx); 2598 if (l == 0) { 2599 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 2600 } else { 2601 /* Return 64-bit random for both CRN and RRN */ 2602 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 2603 } 2604 } 2605 } 2606 #endif 2607 2608 /*** Integer rotate ***/ 2609 2610 /* rlwimi & rlwimi. */ 2611 static void gen_rlwimi(DisasContext *ctx) 2612 { 2613 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2614 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2615 uint32_t sh = SH(ctx->opcode); 2616 uint32_t mb = MB(ctx->opcode); 2617 uint32_t me = ME(ctx->opcode); 2618 2619 if (sh == (31 - me) && mb <= me) { 2620 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2621 } else { 2622 target_ulong mask; 2623 bool mask_in_32b = true; 2624 TCGv t1; 2625 2626 #if defined(TARGET_PPC64) 2627 mb += 32; 2628 me += 32; 2629 #endif 2630 mask = MASK(mb, me); 2631 2632 #if defined(TARGET_PPC64) 2633 if (mask > 0xffffffffu) { 2634 mask_in_32b = false; 2635 } 2636 #endif 2637 t1 = tcg_temp_new(); 2638 if (mask_in_32b) { 2639 TCGv_i32 t0 = tcg_temp_new_i32(); 2640 tcg_gen_trunc_tl_i32(t0, t_rs); 2641 tcg_gen_rotli_i32(t0, t0, sh); 2642 tcg_gen_extu_i32_tl(t1, t0); 2643 tcg_temp_free_i32(t0); 2644 } else { 2645 #if defined(TARGET_PPC64) 2646 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 2647 tcg_gen_rotli_i64(t1, t1, sh); 2648 #else 2649 g_assert_not_reached(); 2650 #endif 2651 } 2652 2653 tcg_gen_andi_tl(t1, t1, mask); 2654 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2655 tcg_gen_or_tl(t_ra, t_ra, t1); 2656 tcg_temp_free(t1); 2657 } 2658 if (unlikely(Rc(ctx->opcode) != 0)) { 2659 gen_set_Rc0(ctx, t_ra); 2660 } 2661 } 2662 2663 /* rlwinm & rlwinm. */ 2664 static void gen_rlwinm(DisasContext *ctx) 2665 { 2666 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2667 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2668 int sh = SH(ctx->opcode); 2669 int mb = MB(ctx->opcode); 2670 int me = ME(ctx->opcode); 2671 int len = me - mb + 1; 2672 int rsh = (32 - sh) & 31; 2673 2674 if (sh != 0 && len > 0 && me == (31 - sh)) { 2675 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2676 } else if (me == 31 && rsh + len <= 32) { 2677 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2678 } else { 2679 target_ulong mask; 2680 bool mask_in_32b = true; 2681 #if defined(TARGET_PPC64) 2682 mb += 32; 2683 me += 32; 2684 #endif 2685 mask = MASK(mb, me); 2686 #if defined(TARGET_PPC64) 2687 if (mask > 0xffffffffu) { 2688 mask_in_32b = false; 2689 } 2690 #endif 2691 if (mask_in_32b) { 2692 if (sh == 0) { 2693 tcg_gen_andi_tl(t_ra, t_rs, mask); 2694 } else { 2695 TCGv_i32 t0 = tcg_temp_new_i32(); 2696 tcg_gen_trunc_tl_i32(t0, t_rs); 2697 tcg_gen_rotli_i32(t0, t0, sh); 2698 tcg_gen_andi_i32(t0, t0, mask); 2699 tcg_gen_extu_i32_tl(t_ra, t0); 2700 tcg_temp_free_i32(t0); 2701 } 2702 } else { 2703 #if defined(TARGET_PPC64) 2704 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2705 tcg_gen_rotli_i64(t_ra, t_ra, sh); 2706 tcg_gen_andi_i64(t_ra, t_ra, mask); 2707 #else 2708 g_assert_not_reached(); 2709 #endif 2710 } 2711 } 2712 if (unlikely(Rc(ctx->opcode) != 0)) { 2713 gen_set_Rc0(ctx, t_ra); 2714 } 2715 } 2716 2717 /* rlwnm & rlwnm. */ 2718 static void gen_rlwnm(DisasContext *ctx) 2719 { 2720 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2721 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2722 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2723 uint32_t mb = MB(ctx->opcode); 2724 uint32_t me = ME(ctx->opcode); 2725 target_ulong mask; 2726 bool mask_in_32b = true; 2727 2728 #if defined(TARGET_PPC64) 2729 mb += 32; 2730 me += 32; 2731 #endif 2732 mask = MASK(mb, me); 2733 2734 #if defined(TARGET_PPC64) 2735 if (mask > 0xffffffffu) { 2736 mask_in_32b = false; 2737 } 2738 #endif 2739 if (mask_in_32b) { 2740 TCGv_i32 t0 = tcg_temp_new_i32(); 2741 TCGv_i32 t1 = tcg_temp_new_i32(); 2742 tcg_gen_trunc_tl_i32(t0, t_rb); 2743 tcg_gen_trunc_tl_i32(t1, t_rs); 2744 tcg_gen_andi_i32(t0, t0, 0x1f); 2745 tcg_gen_rotl_i32(t1, t1, t0); 2746 tcg_gen_extu_i32_tl(t_ra, t1); 2747 tcg_temp_free_i32(t0); 2748 tcg_temp_free_i32(t1); 2749 } else { 2750 #if defined(TARGET_PPC64) 2751 TCGv_i64 t0 = tcg_temp_new_i64(); 2752 tcg_gen_andi_i64(t0, t_rb, 0x1f); 2753 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2754 tcg_gen_rotl_i64(t_ra, t_ra, t0); 2755 tcg_temp_free_i64(t0); 2756 #else 2757 g_assert_not_reached(); 2758 #endif 2759 } 2760 2761 tcg_gen_andi_tl(t_ra, t_ra, mask); 2762 2763 if (unlikely(Rc(ctx->opcode) != 0)) { 2764 gen_set_Rc0(ctx, t_ra); 2765 } 2766 } 2767 2768 #if defined(TARGET_PPC64) 2769 #define GEN_PPC64_R2(name, opc1, opc2) \ 2770 static void glue(gen_, name##0)(DisasContext *ctx) \ 2771 { \ 2772 gen_##name(ctx, 0); \ 2773 } \ 2774 \ 2775 static void glue(gen_, name##1)(DisasContext *ctx) \ 2776 { \ 2777 gen_##name(ctx, 1); \ 2778 } 2779 #define GEN_PPC64_R4(name, opc1, opc2) \ 2780 static void glue(gen_, name##0)(DisasContext *ctx) \ 2781 { \ 2782 gen_##name(ctx, 0, 0); \ 2783 } \ 2784 \ 2785 static void glue(gen_, name##1)(DisasContext *ctx) \ 2786 { \ 2787 gen_##name(ctx, 0, 1); \ 2788 } \ 2789 \ 2790 static void glue(gen_, name##2)(DisasContext *ctx) \ 2791 { \ 2792 gen_##name(ctx, 1, 0); \ 2793 } \ 2794 \ 2795 static void glue(gen_, name##3)(DisasContext *ctx) \ 2796 { \ 2797 gen_##name(ctx, 1, 1); \ 2798 } 2799 2800 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2801 { 2802 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2803 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2804 int len = me - mb + 1; 2805 int rsh = (64 - sh) & 63; 2806 2807 if (sh != 0 && len > 0 && me == (63 - sh)) { 2808 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2809 } else if (me == 63 && rsh + len <= 64) { 2810 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2811 } else { 2812 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2813 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2814 } 2815 if (unlikely(Rc(ctx->opcode) != 0)) { 2816 gen_set_Rc0(ctx, t_ra); 2817 } 2818 } 2819 2820 /* rldicl - rldicl. */ 2821 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2822 { 2823 uint32_t sh, mb; 2824 2825 sh = SH(ctx->opcode) | (shn << 5); 2826 mb = MB(ctx->opcode) | (mbn << 5); 2827 gen_rldinm(ctx, mb, 63, sh); 2828 } 2829 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2830 2831 /* rldicr - rldicr. */ 2832 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2833 { 2834 uint32_t sh, me; 2835 2836 sh = SH(ctx->opcode) | (shn << 5); 2837 me = MB(ctx->opcode) | (men << 5); 2838 gen_rldinm(ctx, 0, me, sh); 2839 } 2840 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2841 2842 /* rldic - rldic. */ 2843 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2844 { 2845 uint32_t sh, mb; 2846 2847 sh = SH(ctx->opcode) | (shn << 5); 2848 mb = MB(ctx->opcode) | (mbn << 5); 2849 gen_rldinm(ctx, mb, 63 - sh, sh); 2850 } 2851 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2852 2853 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2854 { 2855 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2856 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2857 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2858 TCGv t0; 2859 2860 t0 = tcg_temp_new(); 2861 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2862 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2863 tcg_temp_free(t0); 2864 2865 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2866 if (unlikely(Rc(ctx->opcode) != 0)) { 2867 gen_set_Rc0(ctx, t_ra); 2868 } 2869 } 2870 2871 /* rldcl - rldcl. */ 2872 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2873 { 2874 uint32_t mb; 2875 2876 mb = MB(ctx->opcode) | (mbn << 5); 2877 gen_rldnm(ctx, mb, 63); 2878 } 2879 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2880 2881 /* rldcr - rldcr. */ 2882 static inline void gen_rldcr(DisasContext *ctx, int men) 2883 { 2884 uint32_t me; 2885 2886 me = MB(ctx->opcode) | (men << 5); 2887 gen_rldnm(ctx, 0, me); 2888 } 2889 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2890 2891 /* rldimi - rldimi. */ 2892 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2893 { 2894 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2895 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2896 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2897 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2898 uint32_t me = 63 - sh; 2899 2900 if (mb <= me) { 2901 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2902 } else { 2903 target_ulong mask = MASK(mb, me); 2904 TCGv t1 = tcg_temp_new(); 2905 2906 tcg_gen_rotli_tl(t1, t_rs, sh); 2907 tcg_gen_andi_tl(t1, t1, mask); 2908 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2909 tcg_gen_or_tl(t_ra, t_ra, t1); 2910 tcg_temp_free(t1); 2911 } 2912 if (unlikely(Rc(ctx->opcode) != 0)) { 2913 gen_set_Rc0(ctx, t_ra); 2914 } 2915 } 2916 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2917 #endif 2918 2919 /*** Integer shift ***/ 2920 2921 /* slw & slw. */ 2922 static void gen_slw(DisasContext *ctx) 2923 { 2924 TCGv t0, t1; 2925 2926 t0 = tcg_temp_new(); 2927 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2928 #if defined(TARGET_PPC64) 2929 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2930 tcg_gen_sari_tl(t0, t0, 0x3f); 2931 #else 2932 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2933 tcg_gen_sari_tl(t0, t0, 0x1f); 2934 #endif 2935 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2936 t1 = tcg_temp_new(); 2937 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2938 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2939 tcg_temp_free(t1); 2940 tcg_temp_free(t0); 2941 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2942 if (unlikely(Rc(ctx->opcode) != 0)) { 2943 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2944 } 2945 } 2946 2947 /* sraw & sraw. */ 2948 static void gen_sraw(DisasContext *ctx) 2949 { 2950 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2951 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2952 if (unlikely(Rc(ctx->opcode) != 0)) { 2953 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2954 } 2955 } 2956 2957 /* srawi & srawi. */ 2958 static void gen_srawi(DisasContext *ctx) 2959 { 2960 int sh = SH(ctx->opcode); 2961 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2962 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2963 if (sh == 0) { 2964 tcg_gen_ext32s_tl(dst, src); 2965 tcg_gen_movi_tl(cpu_ca, 0); 2966 if (is_isa300(ctx)) { 2967 tcg_gen_movi_tl(cpu_ca32, 0); 2968 } 2969 } else { 2970 TCGv t0; 2971 tcg_gen_ext32s_tl(dst, src); 2972 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2973 t0 = tcg_temp_new(); 2974 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2975 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2976 tcg_temp_free(t0); 2977 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2978 if (is_isa300(ctx)) { 2979 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2980 } 2981 tcg_gen_sari_tl(dst, dst, sh); 2982 } 2983 if (unlikely(Rc(ctx->opcode) != 0)) { 2984 gen_set_Rc0(ctx, dst); 2985 } 2986 } 2987 2988 /* srw & srw. */ 2989 static void gen_srw(DisasContext *ctx) 2990 { 2991 TCGv t0, t1; 2992 2993 t0 = tcg_temp_new(); 2994 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2995 #if defined(TARGET_PPC64) 2996 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2997 tcg_gen_sari_tl(t0, t0, 0x3f); 2998 #else 2999 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 3000 tcg_gen_sari_tl(t0, t0, 0x1f); 3001 #endif 3002 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3003 tcg_gen_ext32u_tl(t0, t0); 3004 t1 = tcg_temp_new(); 3005 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 3006 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3007 tcg_temp_free(t1); 3008 tcg_temp_free(t0); 3009 if (unlikely(Rc(ctx->opcode) != 0)) { 3010 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3011 } 3012 } 3013 3014 #if defined(TARGET_PPC64) 3015 /* sld & sld. */ 3016 static void gen_sld(DisasContext *ctx) 3017 { 3018 TCGv t0, t1; 3019 3020 t0 = tcg_temp_new(); 3021 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3022 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3023 tcg_gen_sari_tl(t0, t0, 0x3f); 3024 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3025 t1 = tcg_temp_new(); 3026 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3027 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3028 tcg_temp_free(t1); 3029 tcg_temp_free(t0); 3030 if (unlikely(Rc(ctx->opcode) != 0)) { 3031 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3032 } 3033 } 3034 3035 /* srad & srad. */ 3036 static void gen_srad(DisasContext *ctx) 3037 { 3038 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 3039 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3040 if (unlikely(Rc(ctx->opcode) != 0)) { 3041 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3042 } 3043 } 3044 /* sradi & sradi. */ 3045 static inline void gen_sradi(DisasContext *ctx, int n) 3046 { 3047 int sh = SH(ctx->opcode) + (n << 5); 3048 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3049 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3050 if (sh == 0) { 3051 tcg_gen_mov_tl(dst, src); 3052 tcg_gen_movi_tl(cpu_ca, 0); 3053 if (is_isa300(ctx)) { 3054 tcg_gen_movi_tl(cpu_ca32, 0); 3055 } 3056 } else { 3057 TCGv t0; 3058 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 3059 t0 = tcg_temp_new(); 3060 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 3061 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 3062 tcg_temp_free(t0); 3063 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 3064 if (is_isa300(ctx)) { 3065 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 3066 } 3067 tcg_gen_sari_tl(dst, src, sh); 3068 } 3069 if (unlikely(Rc(ctx->opcode) != 0)) { 3070 gen_set_Rc0(ctx, dst); 3071 } 3072 } 3073 3074 static void gen_sradi0(DisasContext *ctx) 3075 { 3076 gen_sradi(ctx, 0); 3077 } 3078 3079 static void gen_sradi1(DisasContext *ctx) 3080 { 3081 gen_sradi(ctx, 1); 3082 } 3083 3084 /* extswsli & extswsli. */ 3085 static inline void gen_extswsli(DisasContext *ctx, int n) 3086 { 3087 int sh = SH(ctx->opcode) + (n << 5); 3088 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3089 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3090 3091 tcg_gen_ext32s_tl(dst, src); 3092 tcg_gen_shli_tl(dst, dst, sh); 3093 if (unlikely(Rc(ctx->opcode) != 0)) { 3094 gen_set_Rc0(ctx, dst); 3095 } 3096 } 3097 3098 static void gen_extswsli0(DisasContext *ctx) 3099 { 3100 gen_extswsli(ctx, 0); 3101 } 3102 3103 static void gen_extswsli1(DisasContext *ctx) 3104 { 3105 gen_extswsli(ctx, 1); 3106 } 3107 3108 /* srd & srd. */ 3109 static void gen_srd(DisasContext *ctx) 3110 { 3111 TCGv t0, t1; 3112 3113 t0 = tcg_temp_new(); 3114 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3115 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3116 tcg_gen_sari_tl(t0, t0, 0x3f); 3117 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3118 t1 = tcg_temp_new(); 3119 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3120 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3121 tcg_temp_free(t1); 3122 tcg_temp_free(t0); 3123 if (unlikely(Rc(ctx->opcode) != 0)) { 3124 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3125 } 3126 } 3127 #endif 3128 3129 /*** Addressing modes ***/ 3130 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 3131 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 3132 target_long maskl) 3133 { 3134 target_long simm = SIMM(ctx->opcode); 3135 3136 simm &= ~maskl; 3137 if (rA(ctx->opcode) == 0) { 3138 if (NARROW_MODE(ctx)) { 3139 simm = (uint32_t)simm; 3140 } 3141 tcg_gen_movi_tl(EA, simm); 3142 } else if (likely(simm != 0)) { 3143 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 3144 if (NARROW_MODE(ctx)) { 3145 tcg_gen_ext32u_tl(EA, EA); 3146 } 3147 } else { 3148 if (NARROW_MODE(ctx)) { 3149 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3150 } else { 3151 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3152 } 3153 } 3154 } 3155 3156 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 3157 { 3158 if (rA(ctx->opcode) == 0) { 3159 if (NARROW_MODE(ctx)) { 3160 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3161 } else { 3162 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3163 } 3164 } else { 3165 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3166 if (NARROW_MODE(ctx)) { 3167 tcg_gen_ext32u_tl(EA, EA); 3168 } 3169 } 3170 } 3171 3172 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 3173 { 3174 if (rA(ctx->opcode) == 0) { 3175 tcg_gen_movi_tl(EA, 0); 3176 } else if (NARROW_MODE(ctx)) { 3177 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3178 } else { 3179 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3180 } 3181 } 3182 3183 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 3184 target_long val) 3185 { 3186 tcg_gen_addi_tl(ret, arg1, val); 3187 if (NARROW_MODE(ctx)) { 3188 tcg_gen_ext32u_tl(ret, ret); 3189 } 3190 } 3191 3192 static inline void gen_align_no_le(DisasContext *ctx) 3193 { 3194 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 3195 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 3196 } 3197 3198 /*** Integer load ***/ 3199 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 3200 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 3201 3202 #define GEN_QEMU_LOAD_TL(ldop, op) \ 3203 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 3204 TCGv val, \ 3205 TCGv addr) \ 3206 { \ 3207 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 3208 } 3209 3210 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 3211 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 3212 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 3213 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 3214 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 3215 3216 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 3217 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 3218 3219 #define GEN_QEMU_LOAD_64(ldop, op) \ 3220 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 3221 TCGv_i64 val, \ 3222 TCGv addr) \ 3223 { \ 3224 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 3225 } 3226 3227 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 3228 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 3229 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 3230 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 3231 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 3232 3233 #if defined(TARGET_PPC64) 3234 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 3235 #endif 3236 3237 #define GEN_QEMU_STORE_TL(stop, op) \ 3238 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 3239 TCGv val, \ 3240 TCGv addr) \ 3241 { \ 3242 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 3243 } 3244 3245 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY) 3246 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 3247 #endif 3248 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 3249 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 3250 3251 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 3252 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 3253 3254 #define GEN_QEMU_STORE_64(stop, op) \ 3255 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 3256 TCGv_i64 val, \ 3257 TCGv addr) \ 3258 { \ 3259 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 3260 } 3261 3262 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 3263 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 3264 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 3265 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 3266 3267 #if defined(TARGET_PPC64) 3268 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 3269 #endif 3270 3271 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 3272 static void glue(gen_, name##x)(DisasContext *ctx) \ 3273 { \ 3274 TCGv EA; \ 3275 chk; \ 3276 gen_set_access_type(ctx, ACCESS_INT); \ 3277 EA = tcg_temp_new(); \ 3278 gen_addr_reg_index(ctx, EA); \ 3279 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 3280 tcg_temp_free(EA); \ 3281 } 3282 3283 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 3284 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3285 3286 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 3287 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3288 3289 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 3290 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3291 { \ 3292 TCGv EA; \ 3293 CHK_SV; \ 3294 gen_set_access_type(ctx, ACCESS_INT); \ 3295 EA = tcg_temp_new(); \ 3296 gen_addr_reg_index(ctx, EA); \ 3297 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 3298 tcg_temp_free(EA); \ 3299 } 3300 3301 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 3302 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 3303 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 3304 #if defined(TARGET_PPC64) 3305 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 3306 #endif 3307 3308 #if defined(TARGET_PPC64) 3309 /* CI load/store variants */ 3310 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 3311 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 3312 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 3313 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 3314 3315 /* lq */ 3316 static void gen_lq(DisasContext *ctx) 3317 { 3318 int ra, rd; 3319 TCGv EA, hi, lo; 3320 3321 /* lq is a legal user mode instruction starting in ISA 2.07 */ 3322 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3323 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3324 3325 if (!legal_in_user_mode && ctx->pr) { 3326 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 3327 return; 3328 } 3329 3330 if (!le_is_supported && ctx->le_mode) { 3331 gen_align_no_le(ctx); 3332 return; 3333 } 3334 ra = rA(ctx->opcode); 3335 rd = rD(ctx->opcode); 3336 if (unlikely((rd & 1) || rd == ra)) { 3337 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3338 return; 3339 } 3340 3341 gen_set_access_type(ctx, ACCESS_INT); 3342 EA = tcg_temp_new(); 3343 gen_addr_imm_index(ctx, EA, 0x0F); 3344 3345 /* Note that the low part is always in RD+1, even in LE mode. */ 3346 lo = cpu_gpr[rd + 1]; 3347 hi = cpu_gpr[rd]; 3348 3349 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3350 if (HAVE_ATOMIC128) { 3351 TCGv_i32 oi = tcg_temp_new_i32(); 3352 if (ctx->le_mode) { 3353 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 3354 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3355 } else { 3356 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 3357 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3358 } 3359 tcg_temp_free_i32(oi); 3360 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3361 } else { 3362 /* Restart with exclusive lock. */ 3363 gen_helper_exit_atomic(cpu_env); 3364 ctx->base.is_jmp = DISAS_NORETURN; 3365 } 3366 } else if (ctx->le_mode) { 3367 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 3368 gen_addr_add(ctx, EA, EA, 8); 3369 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3370 } else { 3371 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 3372 gen_addr_add(ctx, EA, EA, 8); 3373 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3374 } 3375 tcg_temp_free(EA); 3376 } 3377 #endif 3378 3379 /*** Integer store ***/ 3380 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 3381 static void glue(gen_, name##x)(DisasContext *ctx) \ 3382 { \ 3383 TCGv EA; \ 3384 chk; \ 3385 gen_set_access_type(ctx, ACCESS_INT); \ 3386 EA = tcg_temp_new(); \ 3387 gen_addr_reg_index(ctx, EA); \ 3388 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 3389 tcg_temp_free(EA); \ 3390 } 3391 #define GEN_STX(name, stop, opc2, opc3, type) \ 3392 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3393 3394 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 3395 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3396 3397 #define GEN_STEPX(name, stop, opc2, opc3) \ 3398 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3399 { \ 3400 TCGv EA; \ 3401 CHK_SV; \ 3402 gen_set_access_type(ctx, ACCESS_INT); \ 3403 EA = tcg_temp_new(); \ 3404 gen_addr_reg_index(ctx, EA); \ 3405 tcg_gen_qemu_st_tl( \ 3406 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 3407 tcg_temp_free(EA); \ 3408 } 3409 3410 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 3411 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 3412 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 3413 #if defined(TARGET_PPC64) 3414 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04) 3415 #endif 3416 3417 #if defined(TARGET_PPC64) 3418 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 3419 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 3420 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 3421 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 3422 3423 static void gen_std(DisasContext *ctx) 3424 { 3425 int rs; 3426 TCGv EA; 3427 3428 rs = rS(ctx->opcode); 3429 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 3430 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3431 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3432 TCGv hi, lo; 3433 3434 if (!(ctx->insns_flags & PPC_64BX)) { 3435 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3436 } 3437 3438 if (!legal_in_user_mode && ctx->pr) { 3439 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 3440 return; 3441 } 3442 3443 if (!le_is_supported && ctx->le_mode) { 3444 gen_align_no_le(ctx); 3445 return; 3446 } 3447 3448 if (unlikely(rs & 1)) { 3449 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3450 return; 3451 } 3452 gen_set_access_type(ctx, ACCESS_INT); 3453 EA = tcg_temp_new(); 3454 gen_addr_imm_index(ctx, EA, 0x03); 3455 3456 /* Note that the low part is always in RS+1, even in LE mode. */ 3457 lo = cpu_gpr[rs + 1]; 3458 hi = cpu_gpr[rs]; 3459 3460 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3461 if (HAVE_ATOMIC128) { 3462 TCGv_i32 oi = tcg_temp_new_i32(); 3463 if (ctx->le_mode) { 3464 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 3465 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 3466 } else { 3467 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 3468 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 3469 } 3470 tcg_temp_free_i32(oi); 3471 } else { 3472 /* Restart with exclusive lock. */ 3473 gen_helper_exit_atomic(cpu_env); 3474 ctx->base.is_jmp = DISAS_NORETURN; 3475 } 3476 } else if (ctx->le_mode) { 3477 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 3478 gen_addr_add(ctx, EA, EA, 8); 3479 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3480 } else { 3481 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 3482 gen_addr_add(ctx, EA, EA, 8); 3483 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3484 } 3485 tcg_temp_free(EA); 3486 } else { 3487 /* std / stdu */ 3488 if (Rc(ctx->opcode)) { 3489 if (unlikely(rA(ctx->opcode) == 0)) { 3490 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3491 return; 3492 } 3493 } 3494 gen_set_access_type(ctx, ACCESS_INT); 3495 EA = tcg_temp_new(); 3496 gen_addr_imm_index(ctx, EA, 0x03); 3497 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 3498 if (Rc(ctx->opcode)) { 3499 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 3500 } 3501 tcg_temp_free(EA); 3502 } 3503 } 3504 #endif 3505 /*** Integer load and store with byte reverse ***/ 3506 3507 /* lhbrx */ 3508 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 3509 3510 /* lwbrx */ 3511 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 3512 3513 #if defined(TARGET_PPC64) 3514 /* ldbrx */ 3515 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 3516 /* stdbrx */ 3517 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 3518 #endif /* TARGET_PPC64 */ 3519 3520 /* sthbrx */ 3521 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 3522 /* stwbrx */ 3523 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 3524 3525 /*** Integer load and store multiple ***/ 3526 3527 /* lmw */ 3528 static void gen_lmw(DisasContext *ctx) 3529 { 3530 TCGv t0; 3531 TCGv_i32 t1; 3532 3533 if (ctx->le_mode) { 3534 gen_align_no_le(ctx); 3535 return; 3536 } 3537 gen_set_access_type(ctx, ACCESS_INT); 3538 t0 = tcg_temp_new(); 3539 t1 = tcg_const_i32(rD(ctx->opcode)); 3540 gen_addr_imm_index(ctx, t0, 0); 3541 gen_helper_lmw(cpu_env, t0, t1); 3542 tcg_temp_free(t0); 3543 tcg_temp_free_i32(t1); 3544 } 3545 3546 /* stmw */ 3547 static void gen_stmw(DisasContext *ctx) 3548 { 3549 TCGv t0; 3550 TCGv_i32 t1; 3551 3552 if (ctx->le_mode) { 3553 gen_align_no_le(ctx); 3554 return; 3555 } 3556 gen_set_access_type(ctx, ACCESS_INT); 3557 t0 = tcg_temp_new(); 3558 t1 = tcg_const_i32(rS(ctx->opcode)); 3559 gen_addr_imm_index(ctx, t0, 0); 3560 gen_helper_stmw(cpu_env, t0, t1); 3561 tcg_temp_free(t0); 3562 tcg_temp_free_i32(t1); 3563 } 3564 3565 /*** Integer load and store strings ***/ 3566 3567 /* lswi */ 3568 /* 3569 * PowerPC32 specification says we must generate an exception if rA is 3570 * in the range of registers to be loaded. In an other hand, IBM says 3571 * this is valid, but rA won't be loaded. For now, I'll follow the 3572 * spec... 3573 */ 3574 static void gen_lswi(DisasContext *ctx) 3575 { 3576 TCGv t0; 3577 TCGv_i32 t1, t2; 3578 int nb = NB(ctx->opcode); 3579 int start = rD(ctx->opcode); 3580 int ra = rA(ctx->opcode); 3581 int nr; 3582 3583 if (ctx->le_mode) { 3584 gen_align_no_le(ctx); 3585 return; 3586 } 3587 if (nb == 0) { 3588 nb = 32; 3589 } 3590 nr = DIV_ROUND_UP(nb, 4); 3591 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 3592 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 3593 return; 3594 } 3595 gen_set_access_type(ctx, ACCESS_INT); 3596 t0 = tcg_temp_new(); 3597 gen_addr_register(ctx, t0); 3598 t1 = tcg_const_i32(nb); 3599 t2 = tcg_const_i32(start); 3600 gen_helper_lsw(cpu_env, t0, t1, t2); 3601 tcg_temp_free(t0); 3602 tcg_temp_free_i32(t1); 3603 tcg_temp_free_i32(t2); 3604 } 3605 3606 /* lswx */ 3607 static void gen_lswx(DisasContext *ctx) 3608 { 3609 TCGv t0; 3610 TCGv_i32 t1, t2, t3; 3611 3612 if (ctx->le_mode) { 3613 gen_align_no_le(ctx); 3614 return; 3615 } 3616 gen_set_access_type(ctx, ACCESS_INT); 3617 t0 = tcg_temp_new(); 3618 gen_addr_reg_index(ctx, t0); 3619 t1 = tcg_const_i32(rD(ctx->opcode)); 3620 t2 = tcg_const_i32(rA(ctx->opcode)); 3621 t3 = tcg_const_i32(rB(ctx->opcode)); 3622 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 3623 tcg_temp_free(t0); 3624 tcg_temp_free_i32(t1); 3625 tcg_temp_free_i32(t2); 3626 tcg_temp_free_i32(t3); 3627 } 3628 3629 /* stswi */ 3630 static void gen_stswi(DisasContext *ctx) 3631 { 3632 TCGv t0; 3633 TCGv_i32 t1, t2; 3634 int nb = NB(ctx->opcode); 3635 3636 if (ctx->le_mode) { 3637 gen_align_no_le(ctx); 3638 return; 3639 } 3640 gen_set_access_type(ctx, ACCESS_INT); 3641 t0 = tcg_temp_new(); 3642 gen_addr_register(ctx, t0); 3643 if (nb == 0) { 3644 nb = 32; 3645 } 3646 t1 = tcg_const_i32(nb); 3647 t2 = tcg_const_i32(rS(ctx->opcode)); 3648 gen_helper_stsw(cpu_env, t0, t1, t2); 3649 tcg_temp_free(t0); 3650 tcg_temp_free_i32(t1); 3651 tcg_temp_free_i32(t2); 3652 } 3653 3654 /* stswx */ 3655 static void gen_stswx(DisasContext *ctx) 3656 { 3657 TCGv t0; 3658 TCGv_i32 t1, t2; 3659 3660 if (ctx->le_mode) { 3661 gen_align_no_le(ctx); 3662 return; 3663 } 3664 gen_set_access_type(ctx, ACCESS_INT); 3665 t0 = tcg_temp_new(); 3666 gen_addr_reg_index(ctx, t0); 3667 t1 = tcg_temp_new_i32(); 3668 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3669 tcg_gen_andi_i32(t1, t1, 0x7F); 3670 t2 = tcg_const_i32(rS(ctx->opcode)); 3671 gen_helper_stsw(cpu_env, t0, t1, t2); 3672 tcg_temp_free(t0); 3673 tcg_temp_free_i32(t1); 3674 tcg_temp_free_i32(t2); 3675 } 3676 3677 /*** Memory synchronisation ***/ 3678 /* eieio */ 3679 static void gen_eieio(DisasContext *ctx) 3680 { 3681 TCGBar bar = TCG_MO_LD_ST; 3682 3683 /* 3684 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3685 * tell the CPU it is a store-forwarding barrier. 3686 */ 3687 if (ctx->opcode & 0x2000000) { 3688 /* 3689 * ISA says that "Reserved fields in instructions are ignored 3690 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3691 * as this is not an instruction software should be using, 3692 * complain to the user. 3693 */ 3694 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3695 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3696 TARGET_FMT_lx "\n", ctx->cia); 3697 } else { 3698 bar = TCG_MO_ST_LD; 3699 } 3700 } 3701 3702 tcg_gen_mb(bar | TCG_BAR_SC); 3703 } 3704 3705 #if !defined(CONFIG_USER_ONLY) 3706 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3707 { 3708 TCGv_i32 t; 3709 TCGLabel *l; 3710 3711 if (!ctx->lazy_tlb_flush) { 3712 return; 3713 } 3714 l = gen_new_label(); 3715 t = tcg_temp_new_i32(); 3716 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3717 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3718 if (global) { 3719 gen_helper_check_tlb_flush_global(cpu_env); 3720 } else { 3721 gen_helper_check_tlb_flush_local(cpu_env); 3722 } 3723 gen_set_label(l); 3724 tcg_temp_free_i32(t); 3725 } 3726 #else 3727 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3728 #endif 3729 3730 /* isync */ 3731 static void gen_isync(DisasContext *ctx) 3732 { 3733 /* 3734 * We need to check for a pending TLB flush. This can only happen in 3735 * kernel mode however so check MSR_PR 3736 */ 3737 if (!ctx->pr) { 3738 gen_check_tlb_flush(ctx, false); 3739 } 3740 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3741 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 3742 } 3743 3744 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3745 3746 static void gen_load_locked(DisasContext *ctx, MemOp memop) 3747 { 3748 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3749 TCGv t0 = tcg_temp_new(); 3750 3751 gen_set_access_type(ctx, ACCESS_RES); 3752 gen_addr_reg_index(ctx, t0); 3753 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3754 tcg_gen_mov_tl(cpu_reserve, t0); 3755 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3756 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3757 tcg_temp_free(t0); 3758 } 3759 3760 #define LARX(name, memop) \ 3761 static void gen_##name(DisasContext *ctx) \ 3762 { \ 3763 gen_load_locked(ctx, memop); \ 3764 } 3765 3766 /* lwarx */ 3767 LARX(lbarx, DEF_MEMOP(MO_UB)) 3768 LARX(lharx, DEF_MEMOP(MO_UW)) 3769 LARX(lwarx, DEF_MEMOP(MO_UL)) 3770 3771 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop, 3772 TCGv EA, TCGCond cond, int addend) 3773 { 3774 TCGv t = tcg_temp_new(); 3775 TCGv t2 = tcg_temp_new(); 3776 TCGv u = tcg_temp_new(); 3777 3778 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3779 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3780 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3781 tcg_gen_addi_tl(u, t, addend); 3782 3783 /* E.g. for fetch and increment bounded... */ 3784 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3785 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3786 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3787 3788 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3789 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3790 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3791 3792 tcg_temp_free(t); 3793 tcg_temp_free(t2); 3794 tcg_temp_free(u); 3795 } 3796 3797 static void gen_ld_atomic(DisasContext *ctx, MemOp memop) 3798 { 3799 uint32_t gpr_FC = FC(ctx->opcode); 3800 TCGv EA = tcg_temp_new(); 3801 int rt = rD(ctx->opcode); 3802 bool need_serial; 3803 TCGv src, dst; 3804 3805 gen_addr_register(ctx, EA); 3806 dst = cpu_gpr[rt]; 3807 src = cpu_gpr[(rt + 1) & 31]; 3808 3809 need_serial = false; 3810 memop |= MO_ALIGN; 3811 switch (gpr_FC) { 3812 case 0: /* Fetch and add */ 3813 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3814 break; 3815 case 1: /* Fetch and xor */ 3816 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3817 break; 3818 case 2: /* Fetch and or */ 3819 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3820 break; 3821 case 3: /* Fetch and 'and' */ 3822 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3823 break; 3824 case 4: /* Fetch and max unsigned */ 3825 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3826 break; 3827 case 5: /* Fetch and max signed */ 3828 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3829 break; 3830 case 6: /* Fetch and min unsigned */ 3831 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3832 break; 3833 case 7: /* Fetch and min signed */ 3834 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3835 break; 3836 case 8: /* Swap */ 3837 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3838 break; 3839 3840 case 16: /* Compare and swap not equal */ 3841 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3842 need_serial = true; 3843 } else { 3844 TCGv t0 = tcg_temp_new(); 3845 TCGv t1 = tcg_temp_new(); 3846 3847 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3848 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3849 tcg_gen_mov_tl(t1, src); 3850 } else { 3851 tcg_gen_ext32u_tl(t1, src); 3852 } 3853 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3854 cpu_gpr[(rt + 2) & 31], t0); 3855 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3856 tcg_gen_mov_tl(dst, t0); 3857 3858 tcg_temp_free(t0); 3859 tcg_temp_free(t1); 3860 } 3861 break; 3862 3863 case 24: /* Fetch and increment bounded */ 3864 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3865 need_serial = true; 3866 } else { 3867 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3868 } 3869 break; 3870 case 25: /* Fetch and increment equal */ 3871 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3872 need_serial = true; 3873 } else { 3874 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3875 } 3876 break; 3877 case 28: /* Fetch and decrement bounded */ 3878 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3879 need_serial = true; 3880 } else { 3881 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3882 } 3883 break; 3884 3885 default: 3886 /* invoke data storage error handler */ 3887 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3888 } 3889 tcg_temp_free(EA); 3890 3891 if (need_serial) { 3892 /* Restart with exclusive lock. */ 3893 gen_helper_exit_atomic(cpu_env); 3894 ctx->base.is_jmp = DISAS_NORETURN; 3895 } 3896 } 3897 3898 static void gen_lwat(DisasContext *ctx) 3899 { 3900 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3901 } 3902 3903 #ifdef TARGET_PPC64 3904 static void gen_ldat(DisasContext *ctx) 3905 { 3906 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3907 } 3908 #endif 3909 3910 static void gen_st_atomic(DisasContext *ctx, MemOp memop) 3911 { 3912 uint32_t gpr_FC = FC(ctx->opcode); 3913 TCGv EA = tcg_temp_new(); 3914 TCGv src, discard; 3915 3916 gen_addr_register(ctx, EA); 3917 src = cpu_gpr[rD(ctx->opcode)]; 3918 discard = tcg_temp_new(); 3919 3920 memop |= MO_ALIGN; 3921 switch (gpr_FC) { 3922 case 0: /* add and Store */ 3923 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3924 break; 3925 case 1: /* xor and Store */ 3926 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3927 break; 3928 case 2: /* Or and Store */ 3929 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3930 break; 3931 case 3: /* 'and' and Store */ 3932 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3933 break; 3934 case 4: /* Store max unsigned */ 3935 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3936 break; 3937 case 5: /* Store max signed */ 3938 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3939 break; 3940 case 6: /* Store min unsigned */ 3941 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3942 break; 3943 case 7: /* Store min signed */ 3944 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3945 break; 3946 case 24: /* Store twin */ 3947 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3948 /* Restart with exclusive lock. */ 3949 gen_helper_exit_atomic(cpu_env); 3950 ctx->base.is_jmp = DISAS_NORETURN; 3951 } else { 3952 TCGv t = tcg_temp_new(); 3953 TCGv t2 = tcg_temp_new(); 3954 TCGv s = tcg_temp_new(); 3955 TCGv s2 = tcg_temp_new(); 3956 TCGv ea_plus_s = tcg_temp_new(); 3957 3958 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3959 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3960 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3961 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3962 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3963 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3964 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3965 3966 tcg_temp_free(ea_plus_s); 3967 tcg_temp_free(s2); 3968 tcg_temp_free(s); 3969 tcg_temp_free(t2); 3970 tcg_temp_free(t); 3971 } 3972 break; 3973 default: 3974 /* invoke data storage error handler */ 3975 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3976 } 3977 tcg_temp_free(discard); 3978 tcg_temp_free(EA); 3979 } 3980 3981 static void gen_stwat(DisasContext *ctx) 3982 { 3983 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3984 } 3985 3986 #ifdef TARGET_PPC64 3987 static void gen_stdat(DisasContext *ctx) 3988 { 3989 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3990 } 3991 #endif 3992 3993 static void gen_conditional_store(DisasContext *ctx, MemOp memop) 3994 { 3995 TCGLabel *l1 = gen_new_label(); 3996 TCGLabel *l2 = gen_new_label(); 3997 TCGv t0 = tcg_temp_new(); 3998 int reg = rS(ctx->opcode); 3999 4000 gen_set_access_type(ctx, ACCESS_RES); 4001 gen_addr_reg_index(ctx, t0); 4002 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 4003 tcg_temp_free(t0); 4004 4005 t0 = tcg_temp_new(); 4006 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 4007 cpu_gpr[reg], ctx->mem_idx, 4008 DEF_MEMOP(memop) | MO_ALIGN); 4009 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 4010 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 4011 tcg_gen_or_tl(t0, t0, cpu_so); 4012 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 4013 tcg_temp_free(t0); 4014 tcg_gen_br(l2); 4015 4016 gen_set_label(l1); 4017 4018 /* 4019 * Address mismatch implies failure. But we still need to provide 4020 * the memory barrier semantics of the instruction. 4021 */ 4022 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 4023 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4024 4025 gen_set_label(l2); 4026 tcg_gen_movi_tl(cpu_reserve, -1); 4027 } 4028 4029 #define STCX(name, memop) \ 4030 static void gen_##name(DisasContext *ctx) \ 4031 { \ 4032 gen_conditional_store(ctx, memop); \ 4033 } 4034 4035 STCX(stbcx_, DEF_MEMOP(MO_UB)) 4036 STCX(sthcx_, DEF_MEMOP(MO_UW)) 4037 STCX(stwcx_, DEF_MEMOP(MO_UL)) 4038 4039 #if defined(TARGET_PPC64) 4040 /* ldarx */ 4041 LARX(ldarx, DEF_MEMOP(MO_Q)) 4042 /* stdcx. */ 4043 STCX(stdcx_, DEF_MEMOP(MO_Q)) 4044 4045 /* lqarx */ 4046 static void gen_lqarx(DisasContext *ctx) 4047 { 4048 int rd = rD(ctx->opcode); 4049 TCGv EA, hi, lo; 4050 4051 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 4052 (rd == rB(ctx->opcode)))) { 4053 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4054 return; 4055 } 4056 4057 gen_set_access_type(ctx, ACCESS_RES); 4058 EA = tcg_temp_new(); 4059 gen_addr_reg_index(ctx, EA); 4060 4061 /* Note that the low part is always in RD+1, even in LE mode. */ 4062 lo = cpu_gpr[rd + 1]; 4063 hi = cpu_gpr[rd]; 4064 4065 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4066 if (HAVE_ATOMIC128) { 4067 TCGv_i32 oi = tcg_temp_new_i32(); 4068 if (ctx->le_mode) { 4069 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16, 4070 ctx->mem_idx)); 4071 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 4072 } else { 4073 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16, 4074 ctx->mem_idx)); 4075 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 4076 } 4077 tcg_temp_free_i32(oi); 4078 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 4079 } else { 4080 /* Restart with exclusive lock. */ 4081 gen_helper_exit_atomic(cpu_env); 4082 ctx->base.is_jmp = DISAS_NORETURN; 4083 tcg_temp_free(EA); 4084 return; 4085 } 4086 } else if (ctx->le_mode) { 4087 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 4088 tcg_gen_mov_tl(cpu_reserve, EA); 4089 gen_addr_add(ctx, EA, EA, 8); 4090 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 4091 } else { 4092 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 4093 tcg_gen_mov_tl(cpu_reserve, EA); 4094 gen_addr_add(ctx, EA, EA, 8); 4095 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 4096 } 4097 tcg_temp_free(EA); 4098 4099 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 4100 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 4101 } 4102 4103 /* stqcx. */ 4104 static void gen_stqcx_(DisasContext *ctx) 4105 { 4106 int rs = rS(ctx->opcode); 4107 TCGv EA, hi, lo; 4108 4109 if (unlikely(rs & 1)) { 4110 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4111 return; 4112 } 4113 4114 gen_set_access_type(ctx, ACCESS_RES); 4115 EA = tcg_temp_new(); 4116 gen_addr_reg_index(ctx, EA); 4117 4118 /* Note that the low part is always in RS+1, even in LE mode. */ 4119 lo = cpu_gpr[rs + 1]; 4120 hi = cpu_gpr[rs]; 4121 4122 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4123 if (HAVE_CMPXCHG128) { 4124 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16); 4125 if (ctx->le_mode) { 4126 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 4127 EA, lo, hi, oi); 4128 } else { 4129 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 4130 EA, lo, hi, oi); 4131 } 4132 tcg_temp_free_i32(oi); 4133 } else { 4134 /* Restart with exclusive lock. */ 4135 gen_helper_exit_atomic(cpu_env); 4136 ctx->base.is_jmp = DISAS_NORETURN; 4137 } 4138 tcg_temp_free(EA); 4139 } else { 4140 TCGLabel *lab_fail = gen_new_label(); 4141 TCGLabel *lab_over = gen_new_label(); 4142 TCGv_i64 t0 = tcg_temp_new_i64(); 4143 TCGv_i64 t1 = tcg_temp_new_i64(); 4144 4145 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 4146 tcg_temp_free(EA); 4147 4148 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 4149 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4150 ? offsetof(CPUPPCState, reserve_val2) 4151 : offsetof(CPUPPCState, reserve_val))); 4152 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4153 4154 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4155 gen_qemu_ld64_i64(ctx, t0, t0); 4156 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4157 ? offsetof(CPUPPCState, reserve_val) 4158 : offsetof(CPUPPCState, reserve_val2))); 4159 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4160 4161 /* Success */ 4162 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 4163 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4164 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 4165 4166 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4167 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4168 tcg_gen_br(lab_over); 4169 4170 gen_set_label(lab_fail); 4171 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4172 4173 gen_set_label(lab_over); 4174 tcg_gen_movi_tl(cpu_reserve, -1); 4175 tcg_temp_free_i64(t0); 4176 tcg_temp_free_i64(t1); 4177 } 4178 } 4179 #endif /* defined(TARGET_PPC64) */ 4180 4181 /* sync */ 4182 static void gen_sync(DisasContext *ctx) 4183 { 4184 uint32_t l = (ctx->opcode >> 21) & 3; 4185 4186 /* 4187 * We may need to check for a pending TLB flush. 4188 * 4189 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 4190 * 4191 * Additionally, this can only happen in kernel mode however so 4192 * check MSR_PR as well. 4193 */ 4194 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 4195 gen_check_tlb_flush(ctx, true); 4196 } 4197 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 4198 } 4199 4200 /* wait */ 4201 static void gen_wait(DisasContext *ctx) 4202 { 4203 TCGv_i32 t0 = tcg_const_i32(1); 4204 tcg_gen_st_i32(t0, cpu_env, 4205 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 4206 tcg_temp_free_i32(t0); 4207 /* Stop translation, as the CPU is supposed to sleep from now */ 4208 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4209 } 4210 4211 #if defined(TARGET_PPC64) 4212 static void gen_doze(DisasContext *ctx) 4213 { 4214 #if defined(CONFIG_USER_ONLY) 4215 GEN_PRIV; 4216 #else 4217 TCGv_i32 t; 4218 4219 CHK_HV; 4220 t = tcg_const_i32(PPC_PM_DOZE); 4221 gen_helper_pminsn(cpu_env, t); 4222 tcg_temp_free_i32(t); 4223 /* Stop translation, as the CPU is supposed to sleep from now */ 4224 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4225 #endif /* defined(CONFIG_USER_ONLY) */ 4226 } 4227 4228 static void gen_nap(DisasContext *ctx) 4229 { 4230 #if defined(CONFIG_USER_ONLY) 4231 GEN_PRIV; 4232 #else 4233 TCGv_i32 t; 4234 4235 CHK_HV; 4236 t = tcg_const_i32(PPC_PM_NAP); 4237 gen_helper_pminsn(cpu_env, t); 4238 tcg_temp_free_i32(t); 4239 /* Stop translation, as the CPU is supposed to sleep from now */ 4240 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4241 #endif /* defined(CONFIG_USER_ONLY) */ 4242 } 4243 4244 static void gen_stop(DisasContext *ctx) 4245 { 4246 #if defined(CONFIG_USER_ONLY) 4247 GEN_PRIV; 4248 #else 4249 TCGv_i32 t; 4250 4251 CHK_HV; 4252 t = tcg_const_i32(PPC_PM_STOP); 4253 gen_helper_pminsn(cpu_env, t); 4254 tcg_temp_free_i32(t); 4255 /* Stop translation, as the CPU is supposed to sleep from now */ 4256 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4257 #endif /* defined(CONFIG_USER_ONLY) */ 4258 } 4259 4260 static void gen_sleep(DisasContext *ctx) 4261 { 4262 #if defined(CONFIG_USER_ONLY) 4263 GEN_PRIV; 4264 #else 4265 TCGv_i32 t; 4266 4267 CHK_HV; 4268 t = tcg_const_i32(PPC_PM_SLEEP); 4269 gen_helper_pminsn(cpu_env, t); 4270 tcg_temp_free_i32(t); 4271 /* Stop translation, as the CPU is supposed to sleep from now */ 4272 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4273 #endif /* defined(CONFIG_USER_ONLY) */ 4274 } 4275 4276 static void gen_rvwinkle(DisasContext *ctx) 4277 { 4278 #if defined(CONFIG_USER_ONLY) 4279 GEN_PRIV; 4280 #else 4281 TCGv_i32 t; 4282 4283 CHK_HV; 4284 t = tcg_const_i32(PPC_PM_RVWINKLE); 4285 gen_helper_pminsn(cpu_env, t); 4286 tcg_temp_free_i32(t); 4287 /* Stop translation, as the CPU is supposed to sleep from now */ 4288 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4289 #endif /* defined(CONFIG_USER_ONLY) */ 4290 } 4291 #endif /* #if defined(TARGET_PPC64) */ 4292 4293 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 4294 { 4295 #if defined(TARGET_PPC64) 4296 if (ctx->has_cfar) { 4297 tcg_gen_movi_tl(cpu_cfar, nip); 4298 } 4299 #endif 4300 } 4301 4302 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 4303 { 4304 return translator_use_goto_tb(&ctx->base, dest); 4305 } 4306 4307 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 4308 { 4309 int sse = ctx->singlestep_enabled; 4310 if (unlikely(sse)) { 4311 if (sse & GDBSTUB_SINGLE_STEP) { 4312 gen_debug_exception(ctx); 4313 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) { 4314 gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx))); 4315 } else { 4316 tcg_gen_exit_tb(NULL, 0); 4317 } 4318 } else { 4319 tcg_gen_lookup_and_goto_ptr(); 4320 } 4321 } 4322 4323 /*** Branch ***/ 4324 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 4325 { 4326 if (NARROW_MODE(ctx)) { 4327 dest = (uint32_t) dest; 4328 } 4329 if (use_goto_tb(ctx, dest)) { 4330 tcg_gen_goto_tb(n); 4331 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4332 tcg_gen_exit_tb(ctx->base.tb, n); 4333 } else { 4334 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4335 gen_lookup_and_goto_ptr(ctx); 4336 } 4337 } 4338 4339 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 4340 { 4341 if (NARROW_MODE(ctx)) { 4342 nip = (uint32_t)nip; 4343 } 4344 tcg_gen_movi_tl(cpu_lr, nip); 4345 } 4346 4347 /* b ba bl bla */ 4348 static void gen_b(DisasContext *ctx) 4349 { 4350 target_ulong li, target; 4351 4352 /* sign extend LI */ 4353 li = LI(ctx->opcode); 4354 li = (li ^ 0x02000000) - 0x02000000; 4355 if (likely(AA(ctx->opcode) == 0)) { 4356 target = ctx->cia + li; 4357 } else { 4358 target = li; 4359 } 4360 if (LK(ctx->opcode)) { 4361 gen_setlr(ctx, ctx->base.pc_next); 4362 } 4363 gen_update_cfar(ctx, ctx->cia); 4364 gen_goto_tb(ctx, 0, target); 4365 ctx->base.is_jmp = DISAS_NORETURN; 4366 } 4367 4368 #define BCOND_IM 0 4369 #define BCOND_LR 1 4370 #define BCOND_CTR 2 4371 #define BCOND_TAR 3 4372 4373 static void gen_bcond(DisasContext *ctx, int type) 4374 { 4375 uint32_t bo = BO(ctx->opcode); 4376 TCGLabel *l1; 4377 TCGv target; 4378 4379 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 4380 target = tcg_temp_local_new(); 4381 if (type == BCOND_CTR) { 4382 tcg_gen_mov_tl(target, cpu_ctr); 4383 } else if (type == BCOND_TAR) { 4384 gen_load_spr(target, SPR_TAR); 4385 } else { 4386 tcg_gen_mov_tl(target, cpu_lr); 4387 } 4388 } else { 4389 target = NULL; 4390 } 4391 if (LK(ctx->opcode)) { 4392 gen_setlr(ctx, ctx->base.pc_next); 4393 } 4394 l1 = gen_new_label(); 4395 if ((bo & 0x4) == 0) { 4396 /* Decrement and test CTR */ 4397 TCGv temp = tcg_temp_new(); 4398 4399 if (type == BCOND_CTR) { 4400 /* 4401 * All ISAs up to v3 describe this form of bcctr as invalid but 4402 * some processors, ie. 64-bit server processors compliant with 4403 * arch 2.x, do implement a "test and decrement" logic instead, 4404 * as described in their respective UMs. This logic involves CTR 4405 * to act as both the branch target and a counter, which makes 4406 * it basically useless and thus never used in real code. 4407 * 4408 * This form was hence chosen to trigger extra micro-architectural 4409 * side-effect on real HW needed for the Spectre v2 workaround. 4410 * It is up to guests that implement such workaround, ie. linux, to 4411 * use this form in a way it just triggers the side-effect without 4412 * doing anything else harmful. 4413 */ 4414 if (unlikely(!is_book3s_arch2x(ctx))) { 4415 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4416 tcg_temp_free(temp); 4417 tcg_temp_free(target); 4418 return; 4419 } 4420 4421 if (NARROW_MODE(ctx)) { 4422 tcg_gen_ext32u_tl(temp, cpu_ctr); 4423 } else { 4424 tcg_gen_mov_tl(temp, cpu_ctr); 4425 } 4426 if (bo & 0x2) { 4427 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4428 } else { 4429 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4430 } 4431 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4432 } else { 4433 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4434 if (NARROW_MODE(ctx)) { 4435 tcg_gen_ext32u_tl(temp, cpu_ctr); 4436 } else { 4437 tcg_gen_mov_tl(temp, cpu_ctr); 4438 } 4439 if (bo & 0x2) { 4440 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4441 } else { 4442 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4443 } 4444 } 4445 tcg_temp_free(temp); 4446 } 4447 if ((bo & 0x10) == 0) { 4448 /* Test CR */ 4449 uint32_t bi = BI(ctx->opcode); 4450 uint32_t mask = 0x08 >> (bi & 0x03); 4451 TCGv_i32 temp = tcg_temp_new_i32(); 4452 4453 if (bo & 0x8) { 4454 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4455 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 4456 } else { 4457 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4458 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 4459 } 4460 tcg_temp_free_i32(temp); 4461 } 4462 gen_update_cfar(ctx, ctx->cia); 4463 if (type == BCOND_IM) { 4464 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 4465 if (likely(AA(ctx->opcode) == 0)) { 4466 gen_goto_tb(ctx, 0, ctx->cia + li); 4467 } else { 4468 gen_goto_tb(ctx, 0, li); 4469 } 4470 } else { 4471 if (NARROW_MODE(ctx)) { 4472 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 4473 } else { 4474 tcg_gen_andi_tl(cpu_nip, target, ~3); 4475 } 4476 gen_lookup_and_goto_ptr(ctx); 4477 tcg_temp_free(target); 4478 } 4479 if ((bo & 0x14) != 0x14) { 4480 /* fallthrough case */ 4481 gen_set_label(l1); 4482 gen_goto_tb(ctx, 1, ctx->base.pc_next); 4483 } 4484 ctx->base.is_jmp = DISAS_NORETURN; 4485 } 4486 4487 static void gen_bc(DisasContext *ctx) 4488 { 4489 gen_bcond(ctx, BCOND_IM); 4490 } 4491 4492 static void gen_bcctr(DisasContext *ctx) 4493 { 4494 gen_bcond(ctx, BCOND_CTR); 4495 } 4496 4497 static void gen_bclr(DisasContext *ctx) 4498 { 4499 gen_bcond(ctx, BCOND_LR); 4500 } 4501 4502 static void gen_bctar(DisasContext *ctx) 4503 { 4504 gen_bcond(ctx, BCOND_TAR); 4505 } 4506 4507 /*** Condition register logical ***/ 4508 #define GEN_CRLOGIC(name, tcg_op, opc) \ 4509 static void glue(gen_, name)(DisasContext *ctx) \ 4510 { \ 4511 uint8_t bitmask; \ 4512 int sh; \ 4513 TCGv_i32 t0, t1; \ 4514 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 4515 t0 = tcg_temp_new_i32(); \ 4516 if (sh > 0) \ 4517 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 4518 else if (sh < 0) \ 4519 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 4520 else \ 4521 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 4522 t1 = tcg_temp_new_i32(); \ 4523 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 4524 if (sh > 0) \ 4525 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 4526 else if (sh < 0) \ 4527 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 4528 else \ 4529 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 4530 tcg_op(t0, t0, t1); \ 4531 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 4532 tcg_gen_andi_i32(t0, t0, bitmask); \ 4533 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 4534 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 4535 tcg_temp_free_i32(t0); \ 4536 tcg_temp_free_i32(t1); \ 4537 } 4538 4539 /* crand */ 4540 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 4541 /* crandc */ 4542 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 4543 /* creqv */ 4544 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 4545 /* crnand */ 4546 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 4547 /* crnor */ 4548 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 4549 /* cror */ 4550 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 4551 /* crorc */ 4552 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 4553 /* crxor */ 4554 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 4555 4556 /* mcrf */ 4557 static void gen_mcrf(DisasContext *ctx) 4558 { 4559 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 4560 } 4561 4562 /*** System linkage ***/ 4563 4564 /* rfi (supervisor only) */ 4565 static void gen_rfi(DisasContext *ctx) 4566 { 4567 #if defined(CONFIG_USER_ONLY) 4568 GEN_PRIV; 4569 #else 4570 /* 4571 * This instruction doesn't exist anymore on 64-bit server 4572 * processors compliant with arch 2.x 4573 */ 4574 if (is_book3s_arch2x(ctx)) { 4575 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4576 return; 4577 } 4578 /* Restore CPU state */ 4579 CHK_SV; 4580 gen_icount_io_start(ctx); 4581 gen_update_cfar(ctx, ctx->cia); 4582 gen_helper_rfi(cpu_env); 4583 ctx->base.is_jmp = DISAS_EXIT; 4584 #endif 4585 } 4586 4587 #if defined(TARGET_PPC64) 4588 static void gen_rfid(DisasContext *ctx) 4589 { 4590 #if defined(CONFIG_USER_ONLY) 4591 GEN_PRIV; 4592 #else 4593 /* Restore CPU state */ 4594 CHK_SV; 4595 gen_icount_io_start(ctx); 4596 gen_update_cfar(ctx, ctx->cia); 4597 gen_helper_rfid(cpu_env); 4598 ctx->base.is_jmp = DISAS_EXIT; 4599 #endif 4600 } 4601 4602 #if !defined(CONFIG_USER_ONLY) 4603 static void gen_rfscv(DisasContext *ctx) 4604 { 4605 #if defined(CONFIG_USER_ONLY) 4606 GEN_PRIV; 4607 #else 4608 /* Restore CPU state */ 4609 CHK_SV; 4610 gen_icount_io_start(ctx); 4611 gen_update_cfar(ctx, ctx->cia); 4612 gen_helper_rfscv(cpu_env); 4613 ctx->base.is_jmp = DISAS_EXIT; 4614 #endif 4615 } 4616 #endif 4617 4618 static void gen_hrfid(DisasContext *ctx) 4619 { 4620 #if defined(CONFIG_USER_ONLY) 4621 GEN_PRIV; 4622 #else 4623 /* Restore CPU state */ 4624 CHK_HV; 4625 gen_helper_hrfid(cpu_env); 4626 ctx->base.is_jmp = DISAS_EXIT; 4627 #endif 4628 } 4629 #endif 4630 4631 /* sc */ 4632 #if defined(CONFIG_USER_ONLY) 4633 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 4634 #else 4635 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 4636 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED 4637 #endif 4638 static void gen_sc(DisasContext *ctx) 4639 { 4640 uint32_t lev; 4641 4642 lev = (ctx->opcode >> 5) & 0x7F; 4643 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 4644 } 4645 4646 #if defined(TARGET_PPC64) 4647 #if !defined(CONFIG_USER_ONLY) 4648 static void gen_scv(DisasContext *ctx) 4649 { 4650 uint32_t lev = (ctx->opcode >> 5) & 0x7F; 4651 4652 /* Set the PC back to the faulting instruction. */ 4653 gen_update_nip(ctx, ctx->cia); 4654 gen_helper_scv(cpu_env, tcg_constant_i32(lev)); 4655 4656 ctx->base.is_jmp = DISAS_NORETURN; 4657 } 4658 #endif 4659 #endif 4660 4661 /*** Trap ***/ 4662 4663 /* Check for unconditional traps (always or never) */ 4664 static bool check_unconditional_trap(DisasContext *ctx) 4665 { 4666 /* Trap never */ 4667 if (TO(ctx->opcode) == 0) { 4668 return true; 4669 } 4670 /* Trap always */ 4671 if (TO(ctx->opcode) == 31) { 4672 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 4673 return true; 4674 } 4675 return false; 4676 } 4677 4678 /* tw */ 4679 static void gen_tw(DisasContext *ctx) 4680 { 4681 TCGv_i32 t0; 4682 4683 if (check_unconditional_trap(ctx)) { 4684 return; 4685 } 4686 t0 = tcg_const_i32(TO(ctx->opcode)); 4687 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4688 t0); 4689 tcg_temp_free_i32(t0); 4690 } 4691 4692 /* twi */ 4693 static void gen_twi(DisasContext *ctx) 4694 { 4695 TCGv t0; 4696 TCGv_i32 t1; 4697 4698 if (check_unconditional_trap(ctx)) { 4699 return; 4700 } 4701 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4702 t1 = tcg_const_i32(TO(ctx->opcode)); 4703 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4704 tcg_temp_free(t0); 4705 tcg_temp_free_i32(t1); 4706 } 4707 4708 #if defined(TARGET_PPC64) 4709 /* td */ 4710 static void gen_td(DisasContext *ctx) 4711 { 4712 TCGv_i32 t0; 4713 4714 if (check_unconditional_trap(ctx)) { 4715 return; 4716 } 4717 t0 = tcg_const_i32(TO(ctx->opcode)); 4718 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4719 t0); 4720 tcg_temp_free_i32(t0); 4721 } 4722 4723 /* tdi */ 4724 static void gen_tdi(DisasContext *ctx) 4725 { 4726 TCGv t0; 4727 TCGv_i32 t1; 4728 4729 if (check_unconditional_trap(ctx)) { 4730 return; 4731 } 4732 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4733 t1 = tcg_const_i32(TO(ctx->opcode)); 4734 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4735 tcg_temp_free(t0); 4736 tcg_temp_free_i32(t1); 4737 } 4738 #endif 4739 4740 /*** Processor control ***/ 4741 4742 /* mcrxr */ 4743 static void gen_mcrxr(DisasContext *ctx) 4744 { 4745 TCGv_i32 t0 = tcg_temp_new_i32(); 4746 TCGv_i32 t1 = tcg_temp_new_i32(); 4747 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4748 4749 tcg_gen_trunc_tl_i32(t0, cpu_so); 4750 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4751 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4752 tcg_gen_shli_i32(t0, t0, 3); 4753 tcg_gen_shli_i32(t1, t1, 2); 4754 tcg_gen_shli_i32(dst, dst, 1); 4755 tcg_gen_or_i32(dst, dst, t0); 4756 tcg_gen_or_i32(dst, dst, t1); 4757 tcg_temp_free_i32(t0); 4758 tcg_temp_free_i32(t1); 4759 4760 tcg_gen_movi_tl(cpu_so, 0); 4761 tcg_gen_movi_tl(cpu_ov, 0); 4762 tcg_gen_movi_tl(cpu_ca, 0); 4763 } 4764 4765 #ifdef TARGET_PPC64 4766 /* mcrxrx */ 4767 static void gen_mcrxrx(DisasContext *ctx) 4768 { 4769 TCGv t0 = tcg_temp_new(); 4770 TCGv t1 = tcg_temp_new(); 4771 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4772 4773 /* copy OV and OV32 */ 4774 tcg_gen_shli_tl(t0, cpu_ov, 1); 4775 tcg_gen_or_tl(t0, t0, cpu_ov32); 4776 tcg_gen_shli_tl(t0, t0, 2); 4777 /* copy CA and CA32 */ 4778 tcg_gen_shli_tl(t1, cpu_ca, 1); 4779 tcg_gen_or_tl(t1, t1, cpu_ca32); 4780 tcg_gen_or_tl(t0, t0, t1); 4781 tcg_gen_trunc_tl_i32(dst, t0); 4782 tcg_temp_free(t0); 4783 tcg_temp_free(t1); 4784 } 4785 #endif 4786 4787 /* mfcr mfocrf */ 4788 static void gen_mfcr(DisasContext *ctx) 4789 { 4790 uint32_t crm, crn; 4791 4792 if (likely(ctx->opcode & 0x00100000)) { 4793 crm = CRM(ctx->opcode); 4794 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4795 crn = ctz32(crm); 4796 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4797 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4798 cpu_gpr[rD(ctx->opcode)], crn * 4); 4799 } 4800 } else { 4801 TCGv_i32 t0 = tcg_temp_new_i32(); 4802 tcg_gen_mov_i32(t0, cpu_crf[0]); 4803 tcg_gen_shli_i32(t0, t0, 4); 4804 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4805 tcg_gen_shli_i32(t0, t0, 4); 4806 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4807 tcg_gen_shli_i32(t0, t0, 4); 4808 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4809 tcg_gen_shli_i32(t0, t0, 4); 4810 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4811 tcg_gen_shli_i32(t0, t0, 4); 4812 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4813 tcg_gen_shli_i32(t0, t0, 4); 4814 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4815 tcg_gen_shli_i32(t0, t0, 4); 4816 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4817 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4818 tcg_temp_free_i32(t0); 4819 } 4820 } 4821 4822 /* mfmsr */ 4823 static void gen_mfmsr(DisasContext *ctx) 4824 { 4825 CHK_SV; 4826 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4827 } 4828 4829 /* mfspr */ 4830 static inline void gen_op_mfspr(DisasContext *ctx) 4831 { 4832 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4833 uint32_t sprn = SPR(ctx->opcode); 4834 4835 #if defined(CONFIG_USER_ONLY) 4836 read_cb = ctx->spr_cb[sprn].uea_read; 4837 #else 4838 if (ctx->pr) { 4839 read_cb = ctx->spr_cb[sprn].uea_read; 4840 } else if (ctx->hv) { 4841 read_cb = ctx->spr_cb[sprn].hea_read; 4842 } else { 4843 read_cb = ctx->spr_cb[sprn].oea_read; 4844 } 4845 #endif 4846 if (likely(read_cb != NULL)) { 4847 if (likely(read_cb != SPR_NOACCESS)) { 4848 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4849 } else { 4850 /* Privilege exception */ 4851 /* 4852 * This is a hack to avoid warnings when running Linux: 4853 * this OS breaks the PowerPC virtualisation model, 4854 * allowing userland application to read the PVR 4855 */ 4856 if (sprn != SPR_PVR) { 4857 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4858 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4859 ctx->cia); 4860 } 4861 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4862 } 4863 } else { 4864 /* ISA 2.07 defines these as no-ops */ 4865 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4866 (sprn >= 808 && sprn <= 811)) { 4867 /* This is a nop */ 4868 return; 4869 } 4870 /* Not defined */ 4871 qemu_log_mask(LOG_GUEST_ERROR, 4872 "Trying to read invalid spr %d (0x%03x) at " 4873 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 4874 4875 /* 4876 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 4877 * generate a priv, a hv emu or a no-op 4878 */ 4879 if (sprn & 0x10) { 4880 if (ctx->pr) { 4881 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4882 } 4883 } else { 4884 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4885 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4886 } 4887 } 4888 } 4889 } 4890 4891 static void gen_mfspr(DisasContext *ctx) 4892 { 4893 gen_op_mfspr(ctx); 4894 } 4895 4896 /* mftb */ 4897 static void gen_mftb(DisasContext *ctx) 4898 { 4899 gen_op_mfspr(ctx); 4900 } 4901 4902 /* mtcrf mtocrf*/ 4903 static void gen_mtcrf(DisasContext *ctx) 4904 { 4905 uint32_t crm, crn; 4906 4907 crm = CRM(ctx->opcode); 4908 if (likely((ctx->opcode & 0x00100000))) { 4909 if (crm && ((crm & (crm - 1)) == 0)) { 4910 TCGv_i32 temp = tcg_temp_new_i32(); 4911 crn = ctz32(crm); 4912 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4913 tcg_gen_shri_i32(temp, temp, crn * 4); 4914 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4915 tcg_temp_free_i32(temp); 4916 } 4917 } else { 4918 TCGv_i32 temp = tcg_temp_new_i32(); 4919 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4920 for (crn = 0 ; crn < 8 ; crn++) { 4921 if (crm & (1 << crn)) { 4922 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4923 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4924 } 4925 } 4926 tcg_temp_free_i32(temp); 4927 } 4928 } 4929 4930 /* mtmsr */ 4931 #if defined(TARGET_PPC64) 4932 static void gen_mtmsrd(DisasContext *ctx) 4933 { 4934 if (unlikely(!is_book3s_arch2x(ctx))) { 4935 gen_invalid(ctx); 4936 return; 4937 } 4938 4939 CHK_SV; 4940 4941 #if !defined(CONFIG_USER_ONLY) 4942 gen_icount_io_start(ctx); 4943 if (ctx->opcode & 0x00010000) { 4944 /* L=1 form only updates EE and RI */ 4945 TCGv t0 = tcg_temp_new(); 4946 TCGv t1 = tcg_temp_new(); 4947 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], 4948 (1 << MSR_RI) | (1 << MSR_EE)); 4949 tcg_gen_andi_tl(t1, cpu_msr, 4950 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4951 tcg_gen_or_tl(t1, t1, t0); 4952 4953 gen_helper_store_msr(cpu_env, t1); 4954 tcg_temp_free(t0); 4955 tcg_temp_free(t1); 4956 4957 } else { 4958 /* 4959 * XXX: we need to update nip before the store if we enter 4960 * power saving mode, we will exit the loop directly from 4961 * ppc_store_msr 4962 */ 4963 gen_update_nip(ctx, ctx->base.pc_next); 4964 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4965 } 4966 /* Must stop the translation as machine state (may have) changed */ 4967 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 4968 #endif /* !defined(CONFIG_USER_ONLY) */ 4969 } 4970 #endif /* defined(TARGET_PPC64) */ 4971 4972 static void gen_mtmsr(DisasContext *ctx) 4973 { 4974 CHK_SV; 4975 4976 #if !defined(CONFIG_USER_ONLY) 4977 gen_icount_io_start(ctx); 4978 if (ctx->opcode & 0x00010000) { 4979 /* L=1 form only updates EE and RI */ 4980 TCGv t0 = tcg_temp_new(); 4981 TCGv t1 = tcg_temp_new(); 4982 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], 4983 (1 << MSR_RI) | (1 << MSR_EE)); 4984 tcg_gen_andi_tl(t1, cpu_msr, 4985 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4986 tcg_gen_or_tl(t1, t1, t0); 4987 4988 gen_helper_store_msr(cpu_env, t1); 4989 tcg_temp_free(t0); 4990 tcg_temp_free(t1); 4991 4992 } else { 4993 TCGv msr = tcg_temp_new(); 4994 4995 /* 4996 * XXX: we need to update nip before the store if we enter 4997 * power saving mode, we will exit the loop directly from 4998 * ppc_store_msr 4999 */ 5000 gen_update_nip(ctx, ctx->base.pc_next); 5001 #if defined(TARGET_PPC64) 5002 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 5003 #else 5004 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 5005 #endif 5006 gen_helper_store_msr(cpu_env, msr); 5007 tcg_temp_free(msr); 5008 } 5009 /* Must stop the translation as machine state (may have) changed */ 5010 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 5011 #endif 5012 } 5013 5014 /* mtspr */ 5015 static void gen_mtspr(DisasContext *ctx) 5016 { 5017 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 5018 uint32_t sprn = SPR(ctx->opcode); 5019 5020 #if defined(CONFIG_USER_ONLY) 5021 write_cb = ctx->spr_cb[sprn].uea_write; 5022 #else 5023 if (ctx->pr) { 5024 write_cb = ctx->spr_cb[sprn].uea_write; 5025 } else if (ctx->hv) { 5026 write_cb = ctx->spr_cb[sprn].hea_write; 5027 } else { 5028 write_cb = ctx->spr_cb[sprn].oea_write; 5029 } 5030 #endif 5031 if (likely(write_cb != NULL)) { 5032 if (likely(write_cb != SPR_NOACCESS)) { 5033 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 5034 } else { 5035 /* Privilege exception */ 5036 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 5037 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 5038 ctx->cia); 5039 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 5040 } 5041 } else { 5042 /* ISA 2.07 defines these as no-ops */ 5043 if ((ctx->insns_flags2 & PPC2_ISA207S) && 5044 (sprn >= 808 && sprn <= 811)) { 5045 /* This is a nop */ 5046 return; 5047 } 5048 5049 /* Not defined */ 5050 qemu_log_mask(LOG_GUEST_ERROR, 5051 "Trying to write invalid spr %d (0x%03x) at " 5052 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 5053 5054 5055 /* 5056 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 5057 * generate a priv, a hv emu or a no-op 5058 */ 5059 if (sprn & 0x10) { 5060 if (ctx->pr) { 5061 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5062 } 5063 } else { 5064 if (ctx->pr || sprn == 0) { 5065 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5066 } 5067 } 5068 } 5069 } 5070 5071 #if defined(TARGET_PPC64) 5072 /* setb */ 5073 static void gen_setb(DisasContext *ctx) 5074 { 5075 TCGv_i32 t0 = tcg_temp_new_i32(); 5076 TCGv_i32 t8 = tcg_temp_new_i32(); 5077 TCGv_i32 tm1 = tcg_temp_new_i32(); 5078 int crf = crfS(ctx->opcode); 5079 5080 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 5081 tcg_gen_movi_i32(t8, 8); 5082 tcg_gen_movi_i32(tm1, -1); 5083 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 5084 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 5085 5086 tcg_temp_free_i32(t0); 5087 tcg_temp_free_i32(t8); 5088 tcg_temp_free_i32(tm1); 5089 } 5090 #endif 5091 5092 /*** Cache management ***/ 5093 5094 /* dcbf */ 5095 static void gen_dcbf(DisasContext *ctx) 5096 { 5097 /* XXX: specification says this is treated as a load by the MMU */ 5098 TCGv t0; 5099 gen_set_access_type(ctx, ACCESS_CACHE); 5100 t0 = tcg_temp_new(); 5101 gen_addr_reg_index(ctx, t0); 5102 gen_qemu_ld8u(ctx, t0, t0); 5103 tcg_temp_free(t0); 5104 } 5105 5106 /* dcbfep (external PID dcbf) */ 5107 static void gen_dcbfep(DisasContext *ctx) 5108 { 5109 /* XXX: specification says this is treated as a load by the MMU */ 5110 TCGv t0; 5111 CHK_SV; 5112 gen_set_access_type(ctx, ACCESS_CACHE); 5113 t0 = tcg_temp_new(); 5114 gen_addr_reg_index(ctx, t0); 5115 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5116 tcg_temp_free(t0); 5117 } 5118 5119 /* dcbi (Supervisor only) */ 5120 static void gen_dcbi(DisasContext *ctx) 5121 { 5122 #if defined(CONFIG_USER_ONLY) 5123 GEN_PRIV; 5124 #else 5125 TCGv EA, val; 5126 5127 CHK_SV; 5128 EA = tcg_temp_new(); 5129 gen_set_access_type(ctx, ACCESS_CACHE); 5130 gen_addr_reg_index(ctx, EA); 5131 val = tcg_temp_new(); 5132 /* XXX: specification says this should be treated as a store by the MMU */ 5133 gen_qemu_ld8u(ctx, val, EA); 5134 gen_qemu_st8(ctx, val, EA); 5135 tcg_temp_free(val); 5136 tcg_temp_free(EA); 5137 #endif /* defined(CONFIG_USER_ONLY) */ 5138 } 5139 5140 /* dcdst */ 5141 static void gen_dcbst(DisasContext *ctx) 5142 { 5143 /* XXX: specification say this is treated as a load by the MMU */ 5144 TCGv t0; 5145 gen_set_access_type(ctx, ACCESS_CACHE); 5146 t0 = tcg_temp_new(); 5147 gen_addr_reg_index(ctx, t0); 5148 gen_qemu_ld8u(ctx, t0, t0); 5149 tcg_temp_free(t0); 5150 } 5151 5152 /* dcbstep (dcbstep External PID version) */ 5153 static void gen_dcbstep(DisasContext *ctx) 5154 { 5155 /* XXX: specification say this is treated as a load by the MMU */ 5156 TCGv t0; 5157 gen_set_access_type(ctx, ACCESS_CACHE); 5158 t0 = tcg_temp_new(); 5159 gen_addr_reg_index(ctx, t0); 5160 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5161 tcg_temp_free(t0); 5162 } 5163 5164 /* dcbt */ 5165 static void gen_dcbt(DisasContext *ctx) 5166 { 5167 /* 5168 * interpreted as no-op 5169 * XXX: specification say this is treated as a load by the MMU but 5170 * does not generate any exception 5171 */ 5172 } 5173 5174 /* dcbtep */ 5175 static void gen_dcbtep(DisasContext *ctx) 5176 { 5177 /* 5178 * interpreted as no-op 5179 * XXX: specification say this is treated as a load by the MMU but 5180 * does not generate any exception 5181 */ 5182 } 5183 5184 /* dcbtst */ 5185 static void gen_dcbtst(DisasContext *ctx) 5186 { 5187 /* 5188 * interpreted as no-op 5189 * XXX: specification say this is treated as a load by the MMU but 5190 * does not generate any exception 5191 */ 5192 } 5193 5194 /* dcbtstep */ 5195 static void gen_dcbtstep(DisasContext *ctx) 5196 { 5197 /* 5198 * interpreted as no-op 5199 * XXX: specification say this is treated as a load by the MMU but 5200 * does not generate any exception 5201 */ 5202 } 5203 5204 /* dcbtls */ 5205 static void gen_dcbtls(DisasContext *ctx) 5206 { 5207 /* Always fails locking the cache */ 5208 TCGv t0 = tcg_temp_new(); 5209 gen_load_spr(t0, SPR_Exxx_L1CSR0); 5210 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 5211 gen_store_spr(SPR_Exxx_L1CSR0, t0); 5212 tcg_temp_free(t0); 5213 } 5214 5215 /* dcbz */ 5216 static void gen_dcbz(DisasContext *ctx) 5217 { 5218 TCGv tcgv_addr; 5219 TCGv_i32 tcgv_op; 5220 5221 gen_set_access_type(ctx, ACCESS_CACHE); 5222 tcgv_addr = tcg_temp_new(); 5223 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5224 gen_addr_reg_index(ctx, tcgv_addr); 5225 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 5226 tcg_temp_free(tcgv_addr); 5227 tcg_temp_free_i32(tcgv_op); 5228 } 5229 5230 /* dcbzep */ 5231 static void gen_dcbzep(DisasContext *ctx) 5232 { 5233 TCGv tcgv_addr; 5234 TCGv_i32 tcgv_op; 5235 5236 gen_set_access_type(ctx, ACCESS_CACHE); 5237 tcgv_addr = tcg_temp_new(); 5238 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5239 gen_addr_reg_index(ctx, tcgv_addr); 5240 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 5241 tcg_temp_free(tcgv_addr); 5242 tcg_temp_free_i32(tcgv_op); 5243 } 5244 5245 /* dst / dstt */ 5246 static void gen_dst(DisasContext *ctx) 5247 { 5248 if (rA(ctx->opcode) == 0) { 5249 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5250 } else { 5251 /* interpreted as no-op */ 5252 } 5253 } 5254 5255 /* dstst /dststt */ 5256 static void gen_dstst(DisasContext *ctx) 5257 { 5258 if (rA(ctx->opcode) == 0) { 5259 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5260 } else { 5261 /* interpreted as no-op */ 5262 } 5263 5264 } 5265 5266 /* dss / dssall */ 5267 static void gen_dss(DisasContext *ctx) 5268 { 5269 /* interpreted as no-op */ 5270 } 5271 5272 /* icbi */ 5273 static void gen_icbi(DisasContext *ctx) 5274 { 5275 TCGv t0; 5276 gen_set_access_type(ctx, ACCESS_CACHE); 5277 t0 = tcg_temp_new(); 5278 gen_addr_reg_index(ctx, t0); 5279 gen_helper_icbi(cpu_env, t0); 5280 tcg_temp_free(t0); 5281 } 5282 5283 /* icbiep */ 5284 static void gen_icbiep(DisasContext *ctx) 5285 { 5286 TCGv t0; 5287 gen_set_access_type(ctx, ACCESS_CACHE); 5288 t0 = tcg_temp_new(); 5289 gen_addr_reg_index(ctx, t0); 5290 gen_helper_icbiep(cpu_env, t0); 5291 tcg_temp_free(t0); 5292 } 5293 5294 /* Optional: */ 5295 /* dcba */ 5296 static void gen_dcba(DisasContext *ctx) 5297 { 5298 /* 5299 * interpreted as no-op 5300 * XXX: specification say this is treated as a store by the MMU 5301 * but does not generate any exception 5302 */ 5303 } 5304 5305 /*** Segment register manipulation ***/ 5306 /* Supervisor only: */ 5307 5308 /* mfsr */ 5309 static void gen_mfsr(DisasContext *ctx) 5310 { 5311 #if defined(CONFIG_USER_ONLY) 5312 GEN_PRIV; 5313 #else 5314 TCGv t0; 5315 5316 CHK_SV; 5317 t0 = tcg_const_tl(SR(ctx->opcode)); 5318 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5319 tcg_temp_free(t0); 5320 #endif /* defined(CONFIG_USER_ONLY) */ 5321 } 5322 5323 /* mfsrin */ 5324 static void gen_mfsrin(DisasContext *ctx) 5325 { 5326 #if defined(CONFIG_USER_ONLY) 5327 GEN_PRIV; 5328 #else 5329 TCGv t0; 5330 5331 CHK_SV; 5332 t0 = tcg_temp_new(); 5333 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5334 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5335 tcg_temp_free(t0); 5336 #endif /* defined(CONFIG_USER_ONLY) */ 5337 } 5338 5339 /* mtsr */ 5340 static void gen_mtsr(DisasContext *ctx) 5341 { 5342 #if defined(CONFIG_USER_ONLY) 5343 GEN_PRIV; 5344 #else 5345 TCGv t0; 5346 5347 CHK_SV; 5348 t0 = tcg_const_tl(SR(ctx->opcode)); 5349 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5350 tcg_temp_free(t0); 5351 #endif /* defined(CONFIG_USER_ONLY) */ 5352 } 5353 5354 /* mtsrin */ 5355 static void gen_mtsrin(DisasContext *ctx) 5356 { 5357 #if defined(CONFIG_USER_ONLY) 5358 GEN_PRIV; 5359 #else 5360 TCGv t0; 5361 CHK_SV; 5362 5363 t0 = tcg_temp_new(); 5364 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5365 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 5366 tcg_temp_free(t0); 5367 #endif /* defined(CONFIG_USER_ONLY) */ 5368 } 5369 5370 #if defined(TARGET_PPC64) 5371 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 5372 5373 /* mfsr */ 5374 static void gen_mfsr_64b(DisasContext *ctx) 5375 { 5376 #if defined(CONFIG_USER_ONLY) 5377 GEN_PRIV; 5378 #else 5379 TCGv t0; 5380 5381 CHK_SV; 5382 t0 = tcg_const_tl(SR(ctx->opcode)); 5383 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5384 tcg_temp_free(t0); 5385 #endif /* defined(CONFIG_USER_ONLY) */ 5386 } 5387 5388 /* mfsrin */ 5389 static void gen_mfsrin_64b(DisasContext *ctx) 5390 { 5391 #if defined(CONFIG_USER_ONLY) 5392 GEN_PRIV; 5393 #else 5394 TCGv t0; 5395 5396 CHK_SV; 5397 t0 = tcg_temp_new(); 5398 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5399 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5400 tcg_temp_free(t0); 5401 #endif /* defined(CONFIG_USER_ONLY) */ 5402 } 5403 5404 /* mtsr */ 5405 static void gen_mtsr_64b(DisasContext *ctx) 5406 { 5407 #if defined(CONFIG_USER_ONLY) 5408 GEN_PRIV; 5409 #else 5410 TCGv t0; 5411 5412 CHK_SV; 5413 t0 = tcg_const_tl(SR(ctx->opcode)); 5414 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5415 tcg_temp_free(t0); 5416 #endif /* defined(CONFIG_USER_ONLY) */ 5417 } 5418 5419 /* mtsrin */ 5420 static void gen_mtsrin_64b(DisasContext *ctx) 5421 { 5422 #if defined(CONFIG_USER_ONLY) 5423 GEN_PRIV; 5424 #else 5425 TCGv t0; 5426 5427 CHK_SV; 5428 t0 = tcg_temp_new(); 5429 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5430 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5431 tcg_temp_free(t0); 5432 #endif /* defined(CONFIG_USER_ONLY) */ 5433 } 5434 5435 /* slbmte */ 5436 static void gen_slbmte(DisasContext *ctx) 5437 { 5438 #if defined(CONFIG_USER_ONLY) 5439 GEN_PRIV; 5440 #else 5441 CHK_SV; 5442 5443 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 5444 cpu_gpr[rS(ctx->opcode)]); 5445 #endif /* defined(CONFIG_USER_ONLY) */ 5446 } 5447 5448 static void gen_slbmfee(DisasContext *ctx) 5449 { 5450 #if defined(CONFIG_USER_ONLY) 5451 GEN_PRIV; 5452 #else 5453 CHK_SV; 5454 5455 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5456 cpu_gpr[rB(ctx->opcode)]); 5457 #endif /* defined(CONFIG_USER_ONLY) */ 5458 } 5459 5460 static void gen_slbmfev(DisasContext *ctx) 5461 { 5462 #if defined(CONFIG_USER_ONLY) 5463 GEN_PRIV; 5464 #else 5465 CHK_SV; 5466 5467 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5468 cpu_gpr[rB(ctx->opcode)]); 5469 #endif /* defined(CONFIG_USER_ONLY) */ 5470 } 5471 5472 static void gen_slbfee_(DisasContext *ctx) 5473 { 5474 #if defined(CONFIG_USER_ONLY) 5475 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5476 #else 5477 TCGLabel *l1, *l2; 5478 5479 if (unlikely(ctx->pr)) { 5480 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5481 return; 5482 } 5483 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5484 cpu_gpr[rB(ctx->opcode)]); 5485 l1 = gen_new_label(); 5486 l2 = gen_new_label(); 5487 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5488 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 5489 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 5490 tcg_gen_br(l2); 5491 gen_set_label(l1); 5492 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 5493 gen_set_label(l2); 5494 #endif 5495 } 5496 #endif /* defined(TARGET_PPC64) */ 5497 5498 /*** Lookaside buffer management ***/ 5499 /* Optional & supervisor only: */ 5500 5501 /* tlbia */ 5502 static void gen_tlbia(DisasContext *ctx) 5503 { 5504 #if defined(CONFIG_USER_ONLY) 5505 GEN_PRIV; 5506 #else 5507 CHK_HV; 5508 5509 gen_helper_tlbia(cpu_env); 5510 #endif /* defined(CONFIG_USER_ONLY) */ 5511 } 5512 5513 /* tlbiel */ 5514 static void gen_tlbiel(DisasContext *ctx) 5515 { 5516 #if defined(CONFIG_USER_ONLY) 5517 GEN_PRIV; 5518 #else 5519 CHK_SV; 5520 5521 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5522 #endif /* defined(CONFIG_USER_ONLY) */ 5523 } 5524 5525 /* tlbie */ 5526 static void gen_tlbie(DisasContext *ctx) 5527 { 5528 #if defined(CONFIG_USER_ONLY) 5529 GEN_PRIV; 5530 #else 5531 TCGv_i32 t1; 5532 5533 if (ctx->gtse) { 5534 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */ 5535 } else { 5536 CHK_HV; /* Else hypervisor privileged */ 5537 } 5538 5539 if (NARROW_MODE(ctx)) { 5540 TCGv t0 = tcg_temp_new(); 5541 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 5542 gen_helper_tlbie(cpu_env, t0); 5543 tcg_temp_free(t0); 5544 } else { 5545 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5546 } 5547 t1 = tcg_temp_new_i32(); 5548 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5549 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 5550 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5551 tcg_temp_free_i32(t1); 5552 #endif /* defined(CONFIG_USER_ONLY) */ 5553 } 5554 5555 /* tlbsync */ 5556 static void gen_tlbsync(DisasContext *ctx) 5557 { 5558 #if defined(CONFIG_USER_ONLY) 5559 GEN_PRIV; 5560 #else 5561 5562 if (ctx->gtse) { 5563 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 5564 } else { 5565 CHK_HV; /* Else hypervisor privileged */ 5566 } 5567 5568 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 5569 if (ctx->insns_flags & PPC_BOOKE) { 5570 gen_check_tlb_flush(ctx, true); 5571 } 5572 #endif /* defined(CONFIG_USER_ONLY) */ 5573 } 5574 5575 #if defined(TARGET_PPC64) 5576 /* slbia */ 5577 static void gen_slbia(DisasContext *ctx) 5578 { 5579 #if defined(CONFIG_USER_ONLY) 5580 GEN_PRIV; 5581 #else 5582 uint32_t ih = (ctx->opcode >> 21) & 0x7; 5583 TCGv_i32 t0 = tcg_const_i32(ih); 5584 5585 CHK_SV; 5586 5587 gen_helper_slbia(cpu_env, t0); 5588 tcg_temp_free_i32(t0); 5589 #endif /* defined(CONFIG_USER_ONLY) */ 5590 } 5591 5592 /* slbie */ 5593 static void gen_slbie(DisasContext *ctx) 5594 { 5595 #if defined(CONFIG_USER_ONLY) 5596 GEN_PRIV; 5597 #else 5598 CHK_SV; 5599 5600 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5601 #endif /* defined(CONFIG_USER_ONLY) */ 5602 } 5603 5604 /* slbieg */ 5605 static void gen_slbieg(DisasContext *ctx) 5606 { 5607 #if defined(CONFIG_USER_ONLY) 5608 GEN_PRIV; 5609 #else 5610 CHK_SV; 5611 5612 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5613 #endif /* defined(CONFIG_USER_ONLY) */ 5614 } 5615 5616 /* slbsync */ 5617 static void gen_slbsync(DisasContext *ctx) 5618 { 5619 #if defined(CONFIG_USER_ONLY) 5620 GEN_PRIV; 5621 #else 5622 CHK_SV; 5623 gen_check_tlb_flush(ctx, true); 5624 #endif /* defined(CONFIG_USER_ONLY) */ 5625 } 5626 5627 #endif /* defined(TARGET_PPC64) */ 5628 5629 /*** External control ***/ 5630 /* Optional: */ 5631 5632 /* eciwx */ 5633 static void gen_eciwx(DisasContext *ctx) 5634 { 5635 TCGv t0; 5636 /* Should check EAR[E] ! */ 5637 gen_set_access_type(ctx, ACCESS_EXT); 5638 t0 = tcg_temp_new(); 5639 gen_addr_reg_index(ctx, t0); 5640 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5641 DEF_MEMOP(MO_UL | MO_ALIGN)); 5642 tcg_temp_free(t0); 5643 } 5644 5645 /* ecowx */ 5646 static void gen_ecowx(DisasContext *ctx) 5647 { 5648 TCGv t0; 5649 /* Should check EAR[E] ! */ 5650 gen_set_access_type(ctx, ACCESS_EXT); 5651 t0 = tcg_temp_new(); 5652 gen_addr_reg_index(ctx, t0); 5653 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5654 DEF_MEMOP(MO_UL | MO_ALIGN)); 5655 tcg_temp_free(t0); 5656 } 5657 5658 /* PowerPC 601 specific instructions */ 5659 5660 /* abs - abs. */ 5661 static void gen_abs(DisasContext *ctx) 5662 { 5663 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5664 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5665 5666 tcg_gen_abs_tl(d, a); 5667 if (unlikely(Rc(ctx->opcode) != 0)) { 5668 gen_set_Rc0(ctx, d); 5669 } 5670 } 5671 5672 /* abso - abso. */ 5673 static void gen_abso(DisasContext *ctx) 5674 { 5675 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5676 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5677 5678 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000); 5679 tcg_gen_abs_tl(d, a); 5680 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 5681 if (unlikely(Rc(ctx->opcode) != 0)) { 5682 gen_set_Rc0(ctx, d); 5683 } 5684 } 5685 5686 /* clcs */ 5687 static void gen_clcs(DisasContext *ctx) 5688 { 5689 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5690 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5691 tcg_temp_free_i32(t0); 5692 /* Rc=1 sets CR0 to an undefined state */ 5693 } 5694 5695 /* div - div. */ 5696 static void gen_div(DisasContext *ctx) 5697 { 5698 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5699 cpu_gpr[rB(ctx->opcode)]); 5700 if (unlikely(Rc(ctx->opcode) != 0)) { 5701 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5702 } 5703 } 5704 5705 /* divo - divo. */ 5706 static void gen_divo(DisasContext *ctx) 5707 { 5708 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5709 cpu_gpr[rB(ctx->opcode)]); 5710 if (unlikely(Rc(ctx->opcode) != 0)) { 5711 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5712 } 5713 } 5714 5715 /* divs - divs. */ 5716 static void gen_divs(DisasContext *ctx) 5717 { 5718 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5719 cpu_gpr[rB(ctx->opcode)]); 5720 if (unlikely(Rc(ctx->opcode) != 0)) { 5721 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5722 } 5723 } 5724 5725 /* divso - divso. */ 5726 static void gen_divso(DisasContext *ctx) 5727 { 5728 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5729 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5730 if (unlikely(Rc(ctx->opcode) != 0)) { 5731 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5732 } 5733 } 5734 5735 /* doz - doz. */ 5736 static void gen_doz(DisasContext *ctx) 5737 { 5738 TCGLabel *l1 = gen_new_label(); 5739 TCGLabel *l2 = gen_new_label(); 5740 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5741 cpu_gpr[rA(ctx->opcode)], l1); 5742 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 5743 cpu_gpr[rA(ctx->opcode)]); 5744 tcg_gen_br(l2); 5745 gen_set_label(l1); 5746 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5747 gen_set_label(l2); 5748 if (unlikely(Rc(ctx->opcode) != 0)) { 5749 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5750 } 5751 } 5752 5753 /* dozo - dozo. */ 5754 static void gen_dozo(DisasContext *ctx) 5755 { 5756 TCGLabel *l1 = gen_new_label(); 5757 TCGLabel *l2 = gen_new_label(); 5758 TCGv t0 = tcg_temp_new(); 5759 TCGv t1 = tcg_temp_new(); 5760 TCGv t2 = tcg_temp_new(); 5761 /* Start with XER OV disabled, the most likely case */ 5762 tcg_gen_movi_tl(cpu_ov, 0); 5763 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5764 cpu_gpr[rA(ctx->opcode)], l1); 5765 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5766 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5767 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5768 tcg_gen_andc_tl(t1, t1, t2); 5769 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5770 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5771 tcg_gen_movi_tl(cpu_ov, 1); 5772 tcg_gen_movi_tl(cpu_so, 1); 5773 tcg_gen_br(l2); 5774 gen_set_label(l1); 5775 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5776 gen_set_label(l2); 5777 tcg_temp_free(t0); 5778 tcg_temp_free(t1); 5779 tcg_temp_free(t2); 5780 if (unlikely(Rc(ctx->opcode) != 0)) { 5781 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5782 } 5783 } 5784 5785 /* dozi */ 5786 static void gen_dozi(DisasContext *ctx) 5787 { 5788 target_long simm = SIMM(ctx->opcode); 5789 TCGLabel *l1 = gen_new_label(); 5790 TCGLabel *l2 = gen_new_label(); 5791 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5792 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5793 tcg_gen_br(l2); 5794 gen_set_label(l1); 5795 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5796 gen_set_label(l2); 5797 if (unlikely(Rc(ctx->opcode) != 0)) { 5798 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5799 } 5800 } 5801 5802 /* lscbx - lscbx. */ 5803 static void gen_lscbx(DisasContext *ctx) 5804 { 5805 TCGv t0 = tcg_temp_new(); 5806 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5807 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5808 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5809 5810 gen_addr_reg_index(ctx, t0); 5811 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5812 tcg_temp_free_i32(t1); 5813 tcg_temp_free_i32(t2); 5814 tcg_temp_free_i32(t3); 5815 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5816 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5817 if (unlikely(Rc(ctx->opcode) != 0)) { 5818 gen_set_Rc0(ctx, t0); 5819 } 5820 tcg_temp_free(t0); 5821 } 5822 5823 /* maskg - maskg. */ 5824 static void gen_maskg(DisasContext *ctx) 5825 { 5826 TCGLabel *l1 = gen_new_label(); 5827 TCGv t0 = tcg_temp_new(); 5828 TCGv t1 = tcg_temp_new(); 5829 TCGv t2 = tcg_temp_new(); 5830 TCGv t3 = tcg_temp_new(); 5831 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5832 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5833 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5834 tcg_gen_addi_tl(t2, t0, 1); 5835 tcg_gen_shr_tl(t2, t3, t2); 5836 tcg_gen_shr_tl(t3, t3, t1); 5837 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5838 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5839 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5840 gen_set_label(l1); 5841 tcg_temp_free(t0); 5842 tcg_temp_free(t1); 5843 tcg_temp_free(t2); 5844 tcg_temp_free(t3); 5845 if (unlikely(Rc(ctx->opcode) != 0)) { 5846 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5847 } 5848 } 5849 5850 /* maskir - maskir. */ 5851 static void gen_maskir(DisasContext *ctx) 5852 { 5853 TCGv t0 = tcg_temp_new(); 5854 TCGv t1 = tcg_temp_new(); 5855 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5856 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5857 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5858 tcg_temp_free(t0); 5859 tcg_temp_free(t1); 5860 if (unlikely(Rc(ctx->opcode) != 0)) { 5861 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5862 } 5863 } 5864 5865 /* mul - mul. */ 5866 static void gen_mul(DisasContext *ctx) 5867 { 5868 TCGv_i64 t0 = tcg_temp_new_i64(); 5869 TCGv_i64 t1 = tcg_temp_new_i64(); 5870 TCGv t2 = tcg_temp_new(); 5871 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5872 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5873 tcg_gen_mul_i64(t0, t0, t1); 5874 tcg_gen_trunc_i64_tl(t2, t0); 5875 gen_store_spr(SPR_MQ, t2); 5876 tcg_gen_shri_i64(t1, t0, 32); 5877 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5878 tcg_temp_free_i64(t0); 5879 tcg_temp_free_i64(t1); 5880 tcg_temp_free(t2); 5881 if (unlikely(Rc(ctx->opcode) != 0)) { 5882 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5883 } 5884 } 5885 5886 /* mulo - mulo. */ 5887 static void gen_mulo(DisasContext *ctx) 5888 { 5889 TCGLabel *l1 = gen_new_label(); 5890 TCGv_i64 t0 = tcg_temp_new_i64(); 5891 TCGv_i64 t1 = tcg_temp_new_i64(); 5892 TCGv t2 = tcg_temp_new(); 5893 /* Start with XER OV disabled, the most likely case */ 5894 tcg_gen_movi_tl(cpu_ov, 0); 5895 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5896 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5897 tcg_gen_mul_i64(t0, t0, t1); 5898 tcg_gen_trunc_i64_tl(t2, t0); 5899 gen_store_spr(SPR_MQ, t2); 5900 tcg_gen_shri_i64(t1, t0, 32); 5901 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5902 tcg_gen_ext32s_i64(t1, t0); 5903 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5904 tcg_gen_movi_tl(cpu_ov, 1); 5905 tcg_gen_movi_tl(cpu_so, 1); 5906 gen_set_label(l1); 5907 tcg_temp_free_i64(t0); 5908 tcg_temp_free_i64(t1); 5909 tcg_temp_free(t2); 5910 if (unlikely(Rc(ctx->opcode) != 0)) { 5911 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5912 } 5913 } 5914 5915 /* nabs - nabs. */ 5916 static void gen_nabs(DisasContext *ctx) 5917 { 5918 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5919 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5920 5921 tcg_gen_abs_tl(d, a); 5922 tcg_gen_neg_tl(d, d); 5923 if (unlikely(Rc(ctx->opcode) != 0)) { 5924 gen_set_Rc0(ctx, d); 5925 } 5926 } 5927 5928 /* nabso - nabso. */ 5929 static void gen_nabso(DisasContext *ctx) 5930 { 5931 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5932 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5933 5934 tcg_gen_abs_tl(d, a); 5935 tcg_gen_neg_tl(d, d); 5936 /* nabs never overflows */ 5937 tcg_gen_movi_tl(cpu_ov, 0); 5938 if (unlikely(Rc(ctx->opcode) != 0)) { 5939 gen_set_Rc0(ctx, d); 5940 } 5941 } 5942 5943 /* rlmi - rlmi. */ 5944 static void gen_rlmi(DisasContext *ctx) 5945 { 5946 uint32_t mb = MB(ctx->opcode); 5947 uint32_t me = ME(ctx->opcode); 5948 TCGv t0 = tcg_temp_new(); 5949 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5950 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5951 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5952 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 5953 ~MASK(mb, me)); 5954 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5955 tcg_temp_free(t0); 5956 if (unlikely(Rc(ctx->opcode) != 0)) { 5957 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5958 } 5959 } 5960 5961 /* rrib - rrib. */ 5962 static void gen_rrib(DisasContext *ctx) 5963 { 5964 TCGv t0 = tcg_temp_new(); 5965 TCGv t1 = tcg_temp_new(); 5966 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5967 tcg_gen_movi_tl(t1, 0x80000000); 5968 tcg_gen_shr_tl(t1, t1, t0); 5969 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5970 tcg_gen_and_tl(t0, t0, t1); 5971 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5972 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5973 tcg_temp_free(t0); 5974 tcg_temp_free(t1); 5975 if (unlikely(Rc(ctx->opcode) != 0)) { 5976 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5977 } 5978 } 5979 5980 /* sle - sle. */ 5981 static void gen_sle(DisasContext *ctx) 5982 { 5983 TCGv t0 = tcg_temp_new(); 5984 TCGv t1 = tcg_temp_new(); 5985 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5986 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5987 tcg_gen_subfi_tl(t1, 32, t1); 5988 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5989 tcg_gen_or_tl(t1, t0, t1); 5990 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5991 gen_store_spr(SPR_MQ, t1); 5992 tcg_temp_free(t0); 5993 tcg_temp_free(t1); 5994 if (unlikely(Rc(ctx->opcode) != 0)) { 5995 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5996 } 5997 } 5998 5999 /* sleq - sleq. */ 6000 static void gen_sleq(DisasContext *ctx) 6001 { 6002 TCGv t0 = tcg_temp_new(); 6003 TCGv t1 = tcg_temp_new(); 6004 TCGv t2 = tcg_temp_new(); 6005 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6006 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 6007 tcg_gen_shl_tl(t2, t2, t0); 6008 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6009 gen_load_spr(t1, SPR_MQ); 6010 gen_store_spr(SPR_MQ, t0); 6011 tcg_gen_and_tl(t0, t0, t2); 6012 tcg_gen_andc_tl(t1, t1, t2); 6013 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6014 tcg_temp_free(t0); 6015 tcg_temp_free(t1); 6016 tcg_temp_free(t2); 6017 if (unlikely(Rc(ctx->opcode) != 0)) { 6018 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6019 } 6020 } 6021 6022 /* sliq - sliq. */ 6023 static void gen_sliq(DisasContext *ctx) 6024 { 6025 int sh = SH(ctx->opcode); 6026 TCGv t0 = tcg_temp_new(); 6027 TCGv t1 = tcg_temp_new(); 6028 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6029 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6030 tcg_gen_or_tl(t1, t0, t1); 6031 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6032 gen_store_spr(SPR_MQ, t1); 6033 tcg_temp_free(t0); 6034 tcg_temp_free(t1); 6035 if (unlikely(Rc(ctx->opcode) != 0)) { 6036 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6037 } 6038 } 6039 6040 /* slliq - slliq. */ 6041 static void gen_slliq(DisasContext *ctx) 6042 { 6043 int sh = SH(ctx->opcode); 6044 TCGv t0 = tcg_temp_new(); 6045 TCGv t1 = tcg_temp_new(); 6046 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6047 gen_load_spr(t1, SPR_MQ); 6048 gen_store_spr(SPR_MQ, t0); 6049 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 6050 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 6051 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6052 tcg_temp_free(t0); 6053 tcg_temp_free(t1); 6054 if (unlikely(Rc(ctx->opcode) != 0)) { 6055 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6056 } 6057 } 6058 6059 /* sllq - sllq. */ 6060 static void gen_sllq(DisasContext *ctx) 6061 { 6062 TCGLabel *l1 = gen_new_label(); 6063 TCGLabel *l2 = gen_new_label(); 6064 TCGv t0 = tcg_temp_local_new(); 6065 TCGv t1 = tcg_temp_local_new(); 6066 TCGv t2 = tcg_temp_local_new(); 6067 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6068 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6069 tcg_gen_shl_tl(t1, t1, t2); 6070 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6071 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6072 gen_load_spr(t0, SPR_MQ); 6073 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6074 tcg_gen_br(l2); 6075 gen_set_label(l1); 6076 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6077 gen_load_spr(t2, SPR_MQ); 6078 tcg_gen_andc_tl(t1, t2, t1); 6079 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6080 gen_set_label(l2); 6081 tcg_temp_free(t0); 6082 tcg_temp_free(t1); 6083 tcg_temp_free(t2); 6084 if (unlikely(Rc(ctx->opcode) != 0)) { 6085 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6086 } 6087 } 6088 6089 /* slq - slq. */ 6090 static void gen_slq(DisasContext *ctx) 6091 { 6092 TCGLabel *l1 = gen_new_label(); 6093 TCGv t0 = tcg_temp_new(); 6094 TCGv t1 = tcg_temp_new(); 6095 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6096 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6097 tcg_gen_subfi_tl(t1, 32, t1); 6098 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6099 tcg_gen_or_tl(t1, t0, t1); 6100 gen_store_spr(SPR_MQ, t1); 6101 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6102 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6103 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6104 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6105 gen_set_label(l1); 6106 tcg_temp_free(t0); 6107 tcg_temp_free(t1); 6108 if (unlikely(Rc(ctx->opcode) != 0)) { 6109 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6110 } 6111 } 6112 6113 /* sraiq - sraiq. */ 6114 static void gen_sraiq(DisasContext *ctx) 6115 { 6116 int sh = SH(ctx->opcode); 6117 TCGLabel *l1 = gen_new_label(); 6118 TCGv t0 = tcg_temp_new(); 6119 TCGv t1 = tcg_temp_new(); 6120 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6121 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6122 tcg_gen_or_tl(t0, t0, t1); 6123 gen_store_spr(SPR_MQ, t0); 6124 tcg_gen_movi_tl(cpu_ca, 0); 6125 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6126 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 6127 tcg_gen_movi_tl(cpu_ca, 1); 6128 gen_set_label(l1); 6129 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 6130 tcg_temp_free(t0); 6131 tcg_temp_free(t1); 6132 if (unlikely(Rc(ctx->opcode) != 0)) { 6133 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6134 } 6135 } 6136 6137 /* sraq - sraq. */ 6138 static void gen_sraq(DisasContext *ctx) 6139 { 6140 TCGLabel *l1 = gen_new_label(); 6141 TCGLabel *l2 = gen_new_label(); 6142 TCGv t0 = tcg_temp_new(); 6143 TCGv t1 = tcg_temp_local_new(); 6144 TCGv t2 = tcg_temp_local_new(); 6145 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6146 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6147 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 6148 tcg_gen_subfi_tl(t2, 32, t2); 6149 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 6150 tcg_gen_or_tl(t0, t0, t2); 6151 gen_store_spr(SPR_MQ, t0); 6152 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6153 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 6154 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 6155 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 6156 gen_set_label(l1); 6157 tcg_temp_free(t0); 6158 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 6159 tcg_gen_movi_tl(cpu_ca, 0); 6160 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 6161 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 6162 tcg_gen_movi_tl(cpu_ca, 1); 6163 gen_set_label(l2); 6164 tcg_temp_free(t1); 6165 tcg_temp_free(t2); 6166 if (unlikely(Rc(ctx->opcode) != 0)) { 6167 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6168 } 6169 } 6170 6171 /* sre - sre. */ 6172 static void gen_sre(DisasContext *ctx) 6173 { 6174 TCGv t0 = tcg_temp_new(); 6175 TCGv t1 = tcg_temp_new(); 6176 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6177 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6178 tcg_gen_subfi_tl(t1, 32, t1); 6179 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6180 tcg_gen_or_tl(t1, t0, t1); 6181 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6182 gen_store_spr(SPR_MQ, t1); 6183 tcg_temp_free(t0); 6184 tcg_temp_free(t1); 6185 if (unlikely(Rc(ctx->opcode) != 0)) { 6186 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6187 } 6188 } 6189 6190 /* srea - srea. */ 6191 static void gen_srea(DisasContext *ctx) 6192 { 6193 TCGv t0 = tcg_temp_new(); 6194 TCGv t1 = tcg_temp_new(); 6195 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6196 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6197 gen_store_spr(SPR_MQ, t0); 6198 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 6199 tcg_temp_free(t0); 6200 tcg_temp_free(t1); 6201 if (unlikely(Rc(ctx->opcode) != 0)) { 6202 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6203 } 6204 } 6205 6206 /* sreq */ 6207 static void gen_sreq(DisasContext *ctx) 6208 { 6209 TCGv t0 = tcg_temp_new(); 6210 TCGv t1 = tcg_temp_new(); 6211 TCGv t2 = tcg_temp_new(); 6212 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6213 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6214 tcg_gen_shr_tl(t1, t1, t0); 6215 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6216 gen_load_spr(t2, SPR_MQ); 6217 gen_store_spr(SPR_MQ, t0); 6218 tcg_gen_and_tl(t0, t0, t1); 6219 tcg_gen_andc_tl(t2, t2, t1); 6220 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6221 tcg_temp_free(t0); 6222 tcg_temp_free(t1); 6223 tcg_temp_free(t2); 6224 if (unlikely(Rc(ctx->opcode) != 0)) { 6225 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6226 } 6227 } 6228 6229 /* sriq */ 6230 static void gen_sriq(DisasContext *ctx) 6231 { 6232 int sh = SH(ctx->opcode); 6233 TCGv t0 = tcg_temp_new(); 6234 TCGv t1 = tcg_temp_new(); 6235 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6236 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6237 tcg_gen_or_tl(t1, t0, t1); 6238 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6239 gen_store_spr(SPR_MQ, t1); 6240 tcg_temp_free(t0); 6241 tcg_temp_free(t1); 6242 if (unlikely(Rc(ctx->opcode) != 0)) { 6243 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6244 } 6245 } 6246 6247 /* srliq */ 6248 static void gen_srliq(DisasContext *ctx) 6249 { 6250 int sh = SH(ctx->opcode); 6251 TCGv t0 = tcg_temp_new(); 6252 TCGv t1 = tcg_temp_new(); 6253 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6254 gen_load_spr(t1, SPR_MQ); 6255 gen_store_spr(SPR_MQ, t0); 6256 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 6257 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 6258 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6259 tcg_temp_free(t0); 6260 tcg_temp_free(t1); 6261 if (unlikely(Rc(ctx->opcode) != 0)) { 6262 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6263 } 6264 } 6265 6266 /* srlq */ 6267 static void gen_srlq(DisasContext *ctx) 6268 { 6269 TCGLabel *l1 = gen_new_label(); 6270 TCGLabel *l2 = gen_new_label(); 6271 TCGv t0 = tcg_temp_local_new(); 6272 TCGv t1 = tcg_temp_local_new(); 6273 TCGv t2 = tcg_temp_local_new(); 6274 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6275 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6276 tcg_gen_shr_tl(t2, t1, t2); 6277 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6278 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6279 gen_load_spr(t0, SPR_MQ); 6280 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6281 tcg_gen_br(l2); 6282 gen_set_label(l1); 6283 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6284 tcg_gen_and_tl(t0, t0, t2); 6285 gen_load_spr(t1, SPR_MQ); 6286 tcg_gen_andc_tl(t1, t1, t2); 6287 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6288 gen_set_label(l2); 6289 tcg_temp_free(t0); 6290 tcg_temp_free(t1); 6291 tcg_temp_free(t2); 6292 if (unlikely(Rc(ctx->opcode) != 0)) { 6293 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6294 } 6295 } 6296 6297 /* srq */ 6298 static void gen_srq(DisasContext *ctx) 6299 { 6300 TCGLabel *l1 = gen_new_label(); 6301 TCGv t0 = tcg_temp_new(); 6302 TCGv t1 = tcg_temp_new(); 6303 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6304 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6305 tcg_gen_subfi_tl(t1, 32, t1); 6306 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6307 tcg_gen_or_tl(t1, t0, t1); 6308 gen_store_spr(SPR_MQ, t1); 6309 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6310 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6311 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6312 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6313 gen_set_label(l1); 6314 tcg_temp_free(t0); 6315 tcg_temp_free(t1); 6316 if (unlikely(Rc(ctx->opcode) != 0)) { 6317 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6318 } 6319 } 6320 6321 /* PowerPC 602 specific instructions */ 6322 6323 /* dsa */ 6324 static void gen_dsa(DisasContext *ctx) 6325 { 6326 /* XXX: TODO */ 6327 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6328 } 6329 6330 /* esa */ 6331 static void gen_esa(DisasContext *ctx) 6332 { 6333 /* XXX: TODO */ 6334 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6335 } 6336 6337 /* mfrom */ 6338 static void gen_mfrom(DisasContext *ctx) 6339 { 6340 #if defined(CONFIG_USER_ONLY) 6341 GEN_PRIV; 6342 #else 6343 CHK_SV; 6344 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 6345 #endif /* defined(CONFIG_USER_ONLY) */ 6346 } 6347 6348 /* 602 - 603 - G2 TLB management */ 6349 6350 /* tlbld */ 6351 static void gen_tlbld_6xx(DisasContext *ctx) 6352 { 6353 #if defined(CONFIG_USER_ONLY) 6354 GEN_PRIV; 6355 #else 6356 CHK_SV; 6357 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6358 #endif /* defined(CONFIG_USER_ONLY) */ 6359 } 6360 6361 /* tlbli */ 6362 static void gen_tlbli_6xx(DisasContext *ctx) 6363 { 6364 #if defined(CONFIG_USER_ONLY) 6365 GEN_PRIV; 6366 #else 6367 CHK_SV; 6368 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6369 #endif /* defined(CONFIG_USER_ONLY) */ 6370 } 6371 6372 /* 74xx TLB management */ 6373 6374 /* tlbld */ 6375 static void gen_tlbld_74xx(DisasContext *ctx) 6376 { 6377 #if defined(CONFIG_USER_ONLY) 6378 GEN_PRIV; 6379 #else 6380 CHK_SV; 6381 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6382 #endif /* defined(CONFIG_USER_ONLY) */ 6383 } 6384 6385 /* tlbli */ 6386 static void gen_tlbli_74xx(DisasContext *ctx) 6387 { 6388 #if defined(CONFIG_USER_ONLY) 6389 GEN_PRIV; 6390 #else 6391 CHK_SV; 6392 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6393 #endif /* defined(CONFIG_USER_ONLY) */ 6394 } 6395 6396 /* POWER instructions not in PowerPC 601 */ 6397 6398 /* clf */ 6399 static void gen_clf(DisasContext *ctx) 6400 { 6401 /* Cache line flush: implemented as no-op */ 6402 } 6403 6404 /* cli */ 6405 static void gen_cli(DisasContext *ctx) 6406 { 6407 #if defined(CONFIG_USER_ONLY) 6408 GEN_PRIV; 6409 #else 6410 /* Cache line invalidate: privileged and treated as no-op */ 6411 CHK_SV; 6412 #endif /* defined(CONFIG_USER_ONLY) */ 6413 } 6414 6415 /* dclst */ 6416 static void gen_dclst(DisasContext *ctx) 6417 { 6418 /* Data cache line store: treated as no-op */ 6419 } 6420 6421 static void gen_mfsri(DisasContext *ctx) 6422 { 6423 #if defined(CONFIG_USER_ONLY) 6424 GEN_PRIV; 6425 #else 6426 int ra = rA(ctx->opcode); 6427 int rd = rD(ctx->opcode); 6428 TCGv t0; 6429 6430 CHK_SV; 6431 t0 = tcg_temp_new(); 6432 gen_addr_reg_index(ctx, t0); 6433 tcg_gen_extract_tl(t0, t0, 28, 4); 6434 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 6435 tcg_temp_free(t0); 6436 if (ra != 0 && ra != rd) { 6437 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 6438 } 6439 #endif /* defined(CONFIG_USER_ONLY) */ 6440 } 6441 6442 static void gen_rac(DisasContext *ctx) 6443 { 6444 #if defined(CONFIG_USER_ONLY) 6445 GEN_PRIV; 6446 #else 6447 TCGv t0; 6448 6449 CHK_SV; 6450 t0 = tcg_temp_new(); 6451 gen_addr_reg_index(ctx, t0); 6452 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6453 tcg_temp_free(t0); 6454 #endif /* defined(CONFIG_USER_ONLY) */ 6455 } 6456 6457 static void gen_rfsvc(DisasContext *ctx) 6458 { 6459 #if defined(CONFIG_USER_ONLY) 6460 GEN_PRIV; 6461 #else 6462 CHK_SV; 6463 6464 gen_helper_rfsvc(cpu_env); 6465 ctx->base.is_jmp = DISAS_EXIT; 6466 #endif /* defined(CONFIG_USER_ONLY) */ 6467 } 6468 6469 /* svc is not implemented for now */ 6470 6471 /* BookE specific instructions */ 6472 6473 /* XXX: not implemented on 440 ? */ 6474 static void gen_mfapidi(DisasContext *ctx) 6475 { 6476 /* XXX: TODO */ 6477 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6478 } 6479 6480 /* XXX: not implemented on 440 ? */ 6481 static void gen_tlbiva(DisasContext *ctx) 6482 { 6483 #if defined(CONFIG_USER_ONLY) 6484 GEN_PRIV; 6485 #else 6486 TCGv t0; 6487 6488 CHK_SV; 6489 t0 = tcg_temp_new(); 6490 gen_addr_reg_index(ctx, t0); 6491 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6492 tcg_temp_free(t0); 6493 #endif /* defined(CONFIG_USER_ONLY) */ 6494 } 6495 6496 /* All 405 MAC instructions are translated here */ 6497 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 6498 int ra, int rb, int rt, int Rc) 6499 { 6500 TCGv t0, t1; 6501 6502 t0 = tcg_temp_local_new(); 6503 t1 = tcg_temp_local_new(); 6504 6505 switch (opc3 & 0x0D) { 6506 case 0x05: 6507 /* macchw - macchw. - macchwo - macchwo. */ 6508 /* macchws - macchws. - macchwso - macchwso. */ 6509 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 6510 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 6511 /* mulchw - mulchw. */ 6512 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6513 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6514 tcg_gen_ext16s_tl(t1, t1); 6515 break; 6516 case 0x04: 6517 /* macchwu - macchwu. - macchwuo - macchwuo. */ 6518 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 6519 /* mulchwu - mulchwu. */ 6520 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6521 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6522 tcg_gen_ext16u_tl(t1, t1); 6523 break; 6524 case 0x01: 6525 /* machhw - machhw. - machhwo - machhwo. */ 6526 /* machhws - machhws. - machhwso - machhwso. */ 6527 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 6528 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 6529 /* mulhhw - mulhhw. */ 6530 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 6531 tcg_gen_ext16s_tl(t0, t0); 6532 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6533 tcg_gen_ext16s_tl(t1, t1); 6534 break; 6535 case 0x00: 6536 /* machhwu - machhwu. - machhwuo - machhwuo. */ 6537 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 6538 /* mulhhwu - mulhhwu. */ 6539 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 6540 tcg_gen_ext16u_tl(t0, t0); 6541 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6542 tcg_gen_ext16u_tl(t1, t1); 6543 break; 6544 case 0x0D: 6545 /* maclhw - maclhw. - maclhwo - maclhwo. */ 6546 /* maclhws - maclhws. - maclhwso - maclhwso. */ 6547 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 6548 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 6549 /* mullhw - mullhw. */ 6550 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6551 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 6552 break; 6553 case 0x0C: 6554 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 6555 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 6556 /* mullhwu - mullhwu. */ 6557 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6558 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 6559 break; 6560 } 6561 if (opc2 & 0x04) { 6562 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 6563 tcg_gen_mul_tl(t1, t0, t1); 6564 if (opc2 & 0x02) { 6565 /* nmultiply-and-accumulate (0x0E) */ 6566 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 6567 } else { 6568 /* multiply-and-accumulate (0x0C) */ 6569 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 6570 } 6571 6572 if (opc3 & 0x12) { 6573 /* Check overflow and/or saturate */ 6574 TCGLabel *l1 = gen_new_label(); 6575 6576 if (opc3 & 0x10) { 6577 /* Start with XER OV disabled, the most likely case */ 6578 tcg_gen_movi_tl(cpu_ov, 0); 6579 } 6580 if (opc3 & 0x01) { 6581 /* Signed */ 6582 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 6583 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 6584 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 6585 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 6586 if (opc3 & 0x02) { 6587 /* Saturate */ 6588 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 6589 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 6590 } 6591 } else { 6592 /* Unsigned */ 6593 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 6594 if (opc3 & 0x02) { 6595 /* Saturate */ 6596 tcg_gen_movi_tl(t0, UINT32_MAX); 6597 } 6598 } 6599 if (opc3 & 0x10) { 6600 /* Check overflow */ 6601 tcg_gen_movi_tl(cpu_ov, 1); 6602 tcg_gen_movi_tl(cpu_so, 1); 6603 } 6604 gen_set_label(l1); 6605 tcg_gen_mov_tl(cpu_gpr[rt], t0); 6606 } 6607 } else { 6608 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 6609 } 6610 tcg_temp_free(t0); 6611 tcg_temp_free(t1); 6612 if (unlikely(Rc) != 0) { 6613 /* Update Rc0 */ 6614 gen_set_Rc0(ctx, cpu_gpr[rt]); 6615 } 6616 } 6617 6618 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 6619 static void glue(gen_, name)(DisasContext *ctx) \ 6620 { \ 6621 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 6622 rD(ctx->opcode), Rc(ctx->opcode)); \ 6623 } 6624 6625 /* macchw - macchw. */ 6626 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 6627 /* macchwo - macchwo. */ 6628 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 6629 /* macchws - macchws. */ 6630 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 6631 /* macchwso - macchwso. */ 6632 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 6633 /* macchwsu - macchwsu. */ 6634 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 6635 /* macchwsuo - macchwsuo. */ 6636 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 6637 /* macchwu - macchwu. */ 6638 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 6639 /* macchwuo - macchwuo. */ 6640 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 6641 /* machhw - machhw. */ 6642 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 6643 /* machhwo - machhwo. */ 6644 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 6645 /* machhws - machhws. */ 6646 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 6647 /* machhwso - machhwso. */ 6648 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 6649 /* machhwsu - machhwsu. */ 6650 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 6651 /* machhwsuo - machhwsuo. */ 6652 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 6653 /* machhwu - machhwu. */ 6654 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 6655 /* machhwuo - machhwuo. */ 6656 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 6657 /* maclhw - maclhw. */ 6658 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 6659 /* maclhwo - maclhwo. */ 6660 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 6661 /* maclhws - maclhws. */ 6662 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 6663 /* maclhwso - maclhwso. */ 6664 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 6665 /* maclhwu - maclhwu. */ 6666 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 6667 /* maclhwuo - maclhwuo. */ 6668 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 6669 /* maclhwsu - maclhwsu. */ 6670 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 6671 /* maclhwsuo - maclhwsuo. */ 6672 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 6673 /* nmacchw - nmacchw. */ 6674 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 6675 /* nmacchwo - nmacchwo. */ 6676 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 6677 /* nmacchws - nmacchws. */ 6678 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 6679 /* nmacchwso - nmacchwso. */ 6680 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 6681 /* nmachhw - nmachhw. */ 6682 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 6683 /* nmachhwo - nmachhwo. */ 6684 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 6685 /* nmachhws - nmachhws. */ 6686 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 6687 /* nmachhwso - nmachhwso. */ 6688 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 6689 /* nmaclhw - nmaclhw. */ 6690 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 6691 /* nmaclhwo - nmaclhwo. */ 6692 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 6693 /* nmaclhws - nmaclhws. */ 6694 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 6695 /* nmaclhwso - nmaclhwso. */ 6696 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 6697 6698 /* mulchw - mulchw. */ 6699 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 6700 /* mulchwu - mulchwu. */ 6701 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 6702 /* mulhhw - mulhhw. */ 6703 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 6704 /* mulhhwu - mulhhwu. */ 6705 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 6706 /* mullhw - mullhw. */ 6707 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 6708 /* mullhwu - mullhwu. */ 6709 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 6710 6711 /* mfdcr */ 6712 static void gen_mfdcr(DisasContext *ctx) 6713 { 6714 #if defined(CONFIG_USER_ONLY) 6715 GEN_PRIV; 6716 #else 6717 TCGv dcrn; 6718 6719 CHK_SV; 6720 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6721 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6722 tcg_temp_free(dcrn); 6723 #endif /* defined(CONFIG_USER_ONLY) */ 6724 } 6725 6726 /* mtdcr */ 6727 static void gen_mtdcr(DisasContext *ctx) 6728 { 6729 #if defined(CONFIG_USER_ONLY) 6730 GEN_PRIV; 6731 #else 6732 TCGv dcrn; 6733 6734 CHK_SV; 6735 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6736 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6737 tcg_temp_free(dcrn); 6738 #endif /* defined(CONFIG_USER_ONLY) */ 6739 } 6740 6741 /* mfdcrx */ 6742 /* XXX: not implemented on 440 ? */ 6743 static void gen_mfdcrx(DisasContext *ctx) 6744 { 6745 #if defined(CONFIG_USER_ONLY) 6746 GEN_PRIV; 6747 #else 6748 CHK_SV; 6749 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6750 cpu_gpr[rA(ctx->opcode)]); 6751 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6752 #endif /* defined(CONFIG_USER_ONLY) */ 6753 } 6754 6755 /* mtdcrx */ 6756 /* XXX: not implemented on 440 ? */ 6757 static void gen_mtdcrx(DisasContext *ctx) 6758 { 6759 #if defined(CONFIG_USER_ONLY) 6760 GEN_PRIV; 6761 #else 6762 CHK_SV; 6763 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6764 cpu_gpr[rS(ctx->opcode)]); 6765 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6766 #endif /* defined(CONFIG_USER_ONLY) */ 6767 } 6768 6769 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6770 static void gen_mfdcrux(DisasContext *ctx) 6771 { 6772 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6773 cpu_gpr[rA(ctx->opcode)]); 6774 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6775 } 6776 6777 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6778 static void gen_mtdcrux(DisasContext *ctx) 6779 { 6780 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6781 cpu_gpr[rS(ctx->opcode)]); 6782 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6783 } 6784 6785 /* dccci */ 6786 static void gen_dccci(DisasContext *ctx) 6787 { 6788 CHK_SV; 6789 /* interpreted as no-op */ 6790 } 6791 6792 /* dcread */ 6793 static void gen_dcread(DisasContext *ctx) 6794 { 6795 #if defined(CONFIG_USER_ONLY) 6796 GEN_PRIV; 6797 #else 6798 TCGv EA, val; 6799 6800 CHK_SV; 6801 gen_set_access_type(ctx, ACCESS_CACHE); 6802 EA = tcg_temp_new(); 6803 gen_addr_reg_index(ctx, EA); 6804 val = tcg_temp_new(); 6805 gen_qemu_ld32u(ctx, val, EA); 6806 tcg_temp_free(val); 6807 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6808 tcg_temp_free(EA); 6809 #endif /* defined(CONFIG_USER_ONLY) */ 6810 } 6811 6812 /* icbt */ 6813 static void gen_icbt_40x(DisasContext *ctx) 6814 { 6815 /* 6816 * interpreted as no-op 6817 * XXX: specification say this is treated as a load by the MMU but 6818 * does not generate any exception 6819 */ 6820 } 6821 6822 /* iccci */ 6823 static void gen_iccci(DisasContext *ctx) 6824 { 6825 CHK_SV; 6826 /* interpreted as no-op */ 6827 } 6828 6829 /* icread */ 6830 static void gen_icread(DisasContext *ctx) 6831 { 6832 CHK_SV; 6833 /* interpreted as no-op */ 6834 } 6835 6836 /* rfci (supervisor only) */ 6837 static void gen_rfci_40x(DisasContext *ctx) 6838 { 6839 #if defined(CONFIG_USER_ONLY) 6840 GEN_PRIV; 6841 #else 6842 CHK_SV; 6843 /* Restore CPU state */ 6844 gen_helper_40x_rfci(cpu_env); 6845 ctx->base.is_jmp = DISAS_EXIT; 6846 #endif /* defined(CONFIG_USER_ONLY) */ 6847 } 6848 6849 static void gen_rfci(DisasContext *ctx) 6850 { 6851 #if defined(CONFIG_USER_ONLY) 6852 GEN_PRIV; 6853 #else 6854 CHK_SV; 6855 /* Restore CPU state */ 6856 gen_helper_rfci(cpu_env); 6857 ctx->base.is_jmp = DISAS_EXIT; 6858 #endif /* defined(CONFIG_USER_ONLY) */ 6859 } 6860 6861 /* BookE specific */ 6862 6863 /* XXX: not implemented on 440 ? */ 6864 static void gen_rfdi(DisasContext *ctx) 6865 { 6866 #if defined(CONFIG_USER_ONLY) 6867 GEN_PRIV; 6868 #else 6869 CHK_SV; 6870 /* Restore CPU state */ 6871 gen_helper_rfdi(cpu_env); 6872 ctx->base.is_jmp = DISAS_EXIT; 6873 #endif /* defined(CONFIG_USER_ONLY) */ 6874 } 6875 6876 /* XXX: not implemented on 440 ? */ 6877 static void gen_rfmci(DisasContext *ctx) 6878 { 6879 #if defined(CONFIG_USER_ONLY) 6880 GEN_PRIV; 6881 #else 6882 CHK_SV; 6883 /* Restore CPU state */ 6884 gen_helper_rfmci(cpu_env); 6885 ctx->base.is_jmp = DISAS_EXIT; 6886 #endif /* defined(CONFIG_USER_ONLY) */ 6887 } 6888 6889 /* TLB management - PowerPC 405 implementation */ 6890 6891 /* tlbre */ 6892 static void gen_tlbre_40x(DisasContext *ctx) 6893 { 6894 #if defined(CONFIG_USER_ONLY) 6895 GEN_PRIV; 6896 #else 6897 CHK_SV; 6898 switch (rB(ctx->opcode)) { 6899 case 0: 6900 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6901 cpu_gpr[rA(ctx->opcode)]); 6902 break; 6903 case 1: 6904 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6905 cpu_gpr[rA(ctx->opcode)]); 6906 break; 6907 default: 6908 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6909 break; 6910 } 6911 #endif /* defined(CONFIG_USER_ONLY) */ 6912 } 6913 6914 /* tlbsx - tlbsx. */ 6915 static void gen_tlbsx_40x(DisasContext *ctx) 6916 { 6917 #if defined(CONFIG_USER_ONLY) 6918 GEN_PRIV; 6919 #else 6920 TCGv t0; 6921 6922 CHK_SV; 6923 t0 = tcg_temp_new(); 6924 gen_addr_reg_index(ctx, t0); 6925 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6926 tcg_temp_free(t0); 6927 if (Rc(ctx->opcode)) { 6928 TCGLabel *l1 = gen_new_label(); 6929 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6930 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6931 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6932 gen_set_label(l1); 6933 } 6934 #endif /* defined(CONFIG_USER_ONLY) */ 6935 } 6936 6937 /* tlbwe */ 6938 static void gen_tlbwe_40x(DisasContext *ctx) 6939 { 6940 #if defined(CONFIG_USER_ONLY) 6941 GEN_PRIV; 6942 #else 6943 CHK_SV; 6944 6945 switch (rB(ctx->opcode)) { 6946 case 0: 6947 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6948 cpu_gpr[rS(ctx->opcode)]); 6949 break; 6950 case 1: 6951 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6952 cpu_gpr[rS(ctx->opcode)]); 6953 break; 6954 default: 6955 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6956 break; 6957 } 6958 #endif /* defined(CONFIG_USER_ONLY) */ 6959 } 6960 6961 /* TLB management - PowerPC 440 implementation */ 6962 6963 /* tlbre */ 6964 static void gen_tlbre_440(DisasContext *ctx) 6965 { 6966 #if defined(CONFIG_USER_ONLY) 6967 GEN_PRIV; 6968 #else 6969 CHK_SV; 6970 6971 switch (rB(ctx->opcode)) { 6972 case 0: 6973 case 1: 6974 case 2: 6975 { 6976 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6977 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6978 t0, cpu_gpr[rA(ctx->opcode)]); 6979 tcg_temp_free_i32(t0); 6980 } 6981 break; 6982 default: 6983 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6984 break; 6985 } 6986 #endif /* defined(CONFIG_USER_ONLY) */ 6987 } 6988 6989 /* tlbsx - tlbsx. */ 6990 static void gen_tlbsx_440(DisasContext *ctx) 6991 { 6992 #if defined(CONFIG_USER_ONLY) 6993 GEN_PRIV; 6994 #else 6995 TCGv t0; 6996 6997 CHK_SV; 6998 t0 = tcg_temp_new(); 6999 gen_addr_reg_index(ctx, t0); 7000 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 7001 tcg_temp_free(t0); 7002 if (Rc(ctx->opcode)) { 7003 TCGLabel *l1 = gen_new_label(); 7004 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 7005 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 7006 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 7007 gen_set_label(l1); 7008 } 7009 #endif /* defined(CONFIG_USER_ONLY) */ 7010 } 7011 7012 /* tlbwe */ 7013 static void gen_tlbwe_440(DisasContext *ctx) 7014 { 7015 #if defined(CONFIG_USER_ONLY) 7016 GEN_PRIV; 7017 #else 7018 CHK_SV; 7019 switch (rB(ctx->opcode)) { 7020 case 0: 7021 case 1: 7022 case 2: 7023 { 7024 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 7025 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 7026 cpu_gpr[rS(ctx->opcode)]); 7027 tcg_temp_free_i32(t0); 7028 } 7029 break; 7030 default: 7031 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7032 break; 7033 } 7034 #endif /* defined(CONFIG_USER_ONLY) */ 7035 } 7036 7037 /* TLB management - PowerPC BookE 2.06 implementation */ 7038 7039 /* tlbre */ 7040 static void gen_tlbre_booke206(DisasContext *ctx) 7041 { 7042 #if defined(CONFIG_USER_ONLY) 7043 GEN_PRIV; 7044 #else 7045 CHK_SV; 7046 gen_helper_booke206_tlbre(cpu_env); 7047 #endif /* defined(CONFIG_USER_ONLY) */ 7048 } 7049 7050 /* tlbsx - tlbsx. */ 7051 static void gen_tlbsx_booke206(DisasContext *ctx) 7052 { 7053 #if defined(CONFIG_USER_ONLY) 7054 GEN_PRIV; 7055 #else 7056 TCGv t0; 7057 7058 CHK_SV; 7059 if (rA(ctx->opcode)) { 7060 t0 = tcg_temp_new(); 7061 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 7062 } else { 7063 t0 = tcg_const_tl(0); 7064 } 7065 7066 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 7067 gen_helper_booke206_tlbsx(cpu_env, t0); 7068 tcg_temp_free(t0); 7069 #endif /* defined(CONFIG_USER_ONLY) */ 7070 } 7071 7072 /* tlbwe */ 7073 static void gen_tlbwe_booke206(DisasContext *ctx) 7074 { 7075 #if defined(CONFIG_USER_ONLY) 7076 GEN_PRIV; 7077 #else 7078 CHK_SV; 7079 gen_helper_booke206_tlbwe(cpu_env); 7080 #endif /* defined(CONFIG_USER_ONLY) */ 7081 } 7082 7083 static void gen_tlbivax_booke206(DisasContext *ctx) 7084 { 7085 #if defined(CONFIG_USER_ONLY) 7086 GEN_PRIV; 7087 #else 7088 TCGv t0; 7089 7090 CHK_SV; 7091 t0 = tcg_temp_new(); 7092 gen_addr_reg_index(ctx, t0); 7093 gen_helper_booke206_tlbivax(cpu_env, t0); 7094 tcg_temp_free(t0); 7095 #endif /* defined(CONFIG_USER_ONLY) */ 7096 } 7097 7098 static void gen_tlbilx_booke206(DisasContext *ctx) 7099 { 7100 #if defined(CONFIG_USER_ONLY) 7101 GEN_PRIV; 7102 #else 7103 TCGv t0; 7104 7105 CHK_SV; 7106 t0 = tcg_temp_new(); 7107 gen_addr_reg_index(ctx, t0); 7108 7109 switch ((ctx->opcode >> 21) & 0x3) { 7110 case 0: 7111 gen_helper_booke206_tlbilx0(cpu_env, t0); 7112 break; 7113 case 1: 7114 gen_helper_booke206_tlbilx1(cpu_env, t0); 7115 break; 7116 case 3: 7117 gen_helper_booke206_tlbilx3(cpu_env, t0); 7118 break; 7119 default: 7120 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7121 break; 7122 } 7123 7124 tcg_temp_free(t0); 7125 #endif /* defined(CONFIG_USER_ONLY) */ 7126 } 7127 7128 7129 /* wrtee */ 7130 static void gen_wrtee(DisasContext *ctx) 7131 { 7132 #if defined(CONFIG_USER_ONLY) 7133 GEN_PRIV; 7134 #else 7135 TCGv t0; 7136 7137 CHK_SV; 7138 t0 = tcg_temp_new(); 7139 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 7140 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7141 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 7142 tcg_temp_free(t0); 7143 /* 7144 * Stop translation to have a chance to raise an exception if we 7145 * just set msr_ee to 1 7146 */ 7147 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7148 #endif /* defined(CONFIG_USER_ONLY) */ 7149 } 7150 7151 /* wrteei */ 7152 static void gen_wrteei(DisasContext *ctx) 7153 { 7154 #if defined(CONFIG_USER_ONLY) 7155 GEN_PRIV; 7156 #else 7157 CHK_SV; 7158 if (ctx->opcode & 0x00008000) { 7159 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 7160 /* Stop translation to have a chance to raise an exception */ 7161 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7162 } else { 7163 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7164 } 7165 #endif /* defined(CONFIG_USER_ONLY) */ 7166 } 7167 7168 /* PowerPC 440 specific instructions */ 7169 7170 /* dlmzb */ 7171 static void gen_dlmzb(DisasContext *ctx) 7172 { 7173 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 7174 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 7175 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 7176 tcg_temp_free_i32(t0); 7177 } 7178 7179 /* mbar replaces eieio on 440 */ 7180 static void gen_mbar(DisasContext *ctx) 7181 { 7182 /* interpreted as no-op */ 7183 } 7184 7185 /* msync replaces sync on 440 */ 7186 static void gen_msync_4xx(DisasContext *ctx) 7187 { 7188 /* Only e500 seems to treat reserved bits as invalid */ 7189 if ((ctx->insns_flags2 & PPC2_BOOKE206) && 7190 (ctx->opcode & 0x03FFF801)) { 7191 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7192 } 7193 /* otherwise interpreted as no-op */ 7194 } 7195 7196 /* icbt */ 7197 static void gen_icbt_440(DisasContext *ctx) 7198 { 7199 /* 7200 * interpreted as no-op 7201 * XXX: specification say this is treated as a load by the MMU but 7202 * does not generate any exception 7203 */ 7204 } 7205 7206 /* Embedded.Processor Control */ 7207 7208 static void gen_msgclr(DisasContext *ctx) 7209 { 7210 #if defined(CONFIG_USER_ONLY) 7211 GEN_PRIV; 7212 #else 7213 CHK_HV; 7214 if (is_book3s_arch2x(ctx)) { 7215 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7216 } else { 7217 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7218 } 7219 #endif /* defined(CONFIG_USER_ONLY) */ 7220 } 7221 7222 static void gen_msgsnd(DisasContext *ctx) 7223 { 7224 #if defined(CONFIG_USER_ONLY) 7225 GEN_PRIV; 7226 #else 7227 CHK_HV; 7228 if (is_book3s_arch2x(ctx)) { 7229 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7230 } else { 7231 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7232 } 7233 #endif /* defined(CONFIG_USER_ONLY) */ 7234 } 7235 7236 #if defined(TARGET_PPC64) 7237 static void gen_msgclrp(DisasContext *ctx) 7238 { 7239 #if defined(CONFIG_USER_ONLY) 7240 GEN_PRIV; 7241 #else 7242 CHK_SV; 7243 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7244 #endif /* defined(CONFIG_USER_ONLY) */ 7245 } 7246 7247 static void gen_msgsndp(DisasContext *ctx) 7248 { 7249 #if defined(CONFIG_USER_ONLY) 7250 GEN_PRIV; 7251 #else 7252 CHK_SV; 7253 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7254 #endif /* defined(CONFIG_USER_ONLY) */ 7255 } 7256 #endif 7257 7258 static void gen_msgsync(DisasContext *ctx) 7259 { 7260 #if defined(CONFIG_USER_ONLY) 7261 GEN_PRIV; 7262 #else 7263 CHK_HV; 7264 #endif /* defined(CONFIG_USER_ONLY) */ 7265 /* interpreted as no-op */ 7266 } 7267 7268 #if defined(TARGET_PPC64) 7269 static void gen_maddld(DisasContext *ctx) 7270 { 7271 TCGv_i64 t1 = tcg_temp_new_i64(); 7272 7273 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 7274 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 7275 tcg_temp_free_i64(t1); 7276 } 7277 7278 /* maddhd maddhdu */ 7279 static void gen_maddhd_maddhdu(DisasContext *ctx) 7280 { 7281 TCGv_i64 lo = tcg_temp_new_i64(); 7282 TCGv_i64 hi = tcg_temp_new_i64(); 7283 TCGv_i64 t1 = tcg_temp_new_i64(); 7284 7285 if (Rc(ctx->opcode)) { 7286 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7287 cpu_gpr[rB(ctx->opcode)]); 7288 tcg_gen_movi_i64(t1, 0); 7289 } else { 7290 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7291 cpu_gpr[rB(ctx->opcode)]); 7292 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 7293 } 7294 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 7295 cpu_gpr[rC(ctx->opcode)], t1); 7296 tcg_temp_free_i64(lo); 7297 tcg_temp_free_i64(hi); 7298 tcg_temp_free_i64(t1); 7299 } 7300 #endif /* defined(TARGET_PPC64) */ 7301 7302 static void gen_tbegin(DisasContext *ctx) 7303 { 7304 if (unlikely(!ctx->tm_enabled)) { 7305 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7306 return; 7307 } 7308 gen_helper_tbegin(cpu_env); 7309 } 7310 7311 #define GEN_TM_NOOP(name) \ 7312 static inline void gen_##name(DisasContext *ctx) \ 7313 { \ 7314 if (unlikely(!ctx->tm_enabled)) { \ 7315 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7316 return; \ 7317 } \ 7318 /* \ 7319 * Because tbegin always fails in QEMU, these user \ 7320 * space instructions all have a simple implementation: \ 7321 * \ 7322 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7323 * = 0b0 || 0b00 || 0b0 \ 7324 */ \ 7325 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7326 } 7327 7328 GEN_TM_NOOP(tend); 7329 GEN_TM_NOOP(tabort); 7330 GEN_TM_NOOP(tabortwc); 7331 GEN_TM_NOOP(tabortwci); 7332 GEN_TM_NOOP(tabortdc); 7333 GEN_TM_NOOP(tabortdci); 7334 GEN_TM_NOOP(tsr); 7335 7336 static inline void gen_cp_abort(DisasContext *ctx) 7337 { 7338 /* Do Nothing */ 7339 } 7340 7341 #define GEN_CP_PASTE_NOOP(name) \ 7342 static inline void gen_##name(DisasContext *ctx) \ 7343 { \ 7344 /* \ 7345 * Generate invalid exception until we have an \ 7346 * implementation of the copy paste facility \ 7347 */ \ 7348 gen_invalid(ctx); \ 7349 } 7350 7351 GEN_CP_PASTE_NOOP(copy) 7352 GEN_CP_PASTE_NOOP(paste) 7353 7354 static void gen_tcheck(DisasContext *ctx) 7355 { 7356 if (unlikely(!ctx->tm_enabled)) { 7357 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7358 return; 7359 } 7360 /* 7361 * Because tbegin always fails, the tcheck implementation is 7362 * simple: 7363 * 7364 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 7365 * = 0b1 || 0b00 || 0b0 7366 */ 7367 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 7368 } 7369 7370 #if defined(CONFIG_USER_ONLY) 7371 #define GEN_TM_PRIV_NOOP(name) \ 7372 static inline void gen_##name(DisasContext *ctx) \ 7373 { \ 7374 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 7375 } 7376 7377 #else 7378 7379 #define GEN_TM_PRIV_NOOP(name) \ 7380 static inline void gen_##name(DisasContext *ctx) \ 7381 { \ 7382 CHK_SV; \ 7383 if (unlikely(!ctx->tm_enabled)) { \ 7384 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7385 return; \ 7386 } \ 7387 /* \ 7388 * Because tbegin always fails, the implementation is \ 7389 * simple: \ 7390 * \ 7391 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7392 * = 0b0 || 0b00 | 0b0 \ 7393 */ \ 7394 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7395 } 7396 7397 #endif 7398 7399 GEN_TM_PRIV_NOOP(treclaim); 7400 GEN_TM_PRIV_NOOP(trechkpt); 7401 7402 static inline void get_fpr(TCGv_i64 dst, int regno) 7403 { 7404 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno)); 7405 } 7406 7407 static inline void set_fpr(int regno, TCGv_i64 src) 7408 { 7409 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno)); 7410 } 7411 7412 static inline void get_avr64(TCGv_i64 dst, int regno, bool high) 7413 { 7414 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high)); 7415 } 7416 7417 static inline void set_avr64(int regno, TCGv_i64 src, bool high) 7418 { 7419 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high)); 7420 } 7421 7422 /* 7423 * Helpers for decodetree used by !function for decoding arguments. 7424 */ 7425 static int times_4(DisasContext *ctx, int x) 7426 { 7427 return x * 4; 7428 } 7429 7430 /* 7431 * Helpers for trans_* functions to check for specific insns flags. 7432 * Use token pasting to ensure that we use the proper flag with the 7433 * proper variable. 7434 */ 7435 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \ 7436 do { \ 7437 if (((CTX)->insns_flags & PPC_##NAME) == 0) { \ 7438 return false; \ 7439 } \ 7440 } while (0) 7441 7442 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \ 7443 do { \ 7444 if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \ 7445 return false; \ 7446 } \ 7447 } while (0) 7448 7449 /* Then special-case the check for 64-bit so that we elide code for ppc32. */ 7450 #if TARGET_LONG_BITS == 32 7451 # define REQUIRE_64BIT(CTX) return false 7452 #else 7453 # define REQUIRE_64BIT(CTX) REQUIRE_INSNS_FLAGS(CTX, 64B) 7454 #endif 7455 7456 /* 7457 * Helpers for implementing sets of trans_* functions. 7458 * Defer the implementation of NAME to FUNC, with optional extra arguments. 7459 */ 7460 #define TRANS(NAME, FUNC, ...) \ 7461 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7462 { return FUNC(ctx, a, __VA_ARGS__); } 7463 7464 #define TRANS64(NAME, FUNC, ...) \ 7465 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7466 { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); } 7467 7468 /* TODO: More TRANS* helpers for extra insn_flags checks. */ 7469 7470 7471 #include "decode-insn32.c.inc" 7472 #include "decode-insn64.c.inc" 7473 #include "translate/fixedpoint-impl.c.inc" 7474 7475 #include "translate/fp-impl.c.inc" 7476 7477 #include "translate/vmx-impl.c.inc" 7478 7479 #include "translate/vsx-impl.c.inc" 7480 #include "translate/vector-impl.c.inc" 7481 7482 #include "translate/dfp-impl.c.inc" 7483 7484 #include "translate/spe-impl.c.inc" 7485 7486 /* Handles lfdp, lxsd, lxssp */ 7487 static void gen_dform39(DisasContext *ctx) 7488 { 7489 switch (ctx->opcode & 0x3) { 7490 case 0: /* lfdp */ 7491 if (ctx->insns_flags2 & PPC2_ISA205) { 7492 return gen_lfdp(ctx); 7493 } 7494 break; 7495 case 2: /* lxsd */ 7496 if (ctx->insns_flags2 & PPC2_ISA300) { 7497 return gen_lxsd(ctx); 7498 } 7499 break; 7500 case 3: /* lxssp */ 7501 if (ctx->insns_flags2 & PPC2_ISA300) { 7502 return gen_lxssp(ctx); 7503 } 7504 break; 7505 } 7506 return gen_invalid(ctx); 7507 } 7508 7509 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 7510 static void gen_dform3D(DisasContext *ctx) 7511 { 7512 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 7513 switch (ctx->opcode & 0x7) { 7514 case 1: /* lxv */ 7515 if (ctx->insns_flags2 & PPC2_ISA300) { 7516 return gen_lxv(ctx); 7517 } 7518 break; 7519 case 5: /* stxv */ 7520 if (ctx->insns_flags2 & PPC2_ISA300) { 7521 return gen_stxv(ctx); 7522 } 7523 break; 7524 } 7525 } else { /* DS-FORM */ 7526 switch (ctx->opcode & 0x3) { 7527 case 0: /* stfdp */ 7528 if (ctx->insns_flags2 & PPC2_ISA205) { 7529 return gen_stfdp(ctx); 7530 } 7531 break; 7532 case 2: /* stxsd */ 7533 if (ctx->insns_flags2 & PPC2_ISA300) { 7534 return gen_stxsd(ctx); 7535 } 7536 break; 7537 case 3: /* stxssp */ 7538 if (ctx->insns_flags2 & PPC2_ISA300) { 7539 return gen_stxssp(ctx); 7540 } 7541 break; 7542 } 7543 } 7544 return gen_invalid(ctx); 7545 } 7546 7547 #if defined(TARGET_PPC64) 7548 /* brd */ 7549 static void gen_brd(DisasContext *ctx) 7550 { 7551 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7552 } 7553 7554 /* brw */ 7555 static void gen_brw(DisasContext *ctx) 7556 { 7557 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7558 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32); 7559 7560 } 7561 7562 /* brh */ 7563 static void gen_brh(DisasContext *ctx) 7564 { 7565 TCGv_i64 t0 = tcg_temp_new_i64(); 7566 TCGv_i64 t1 = tcg_temp_new_i64(); 7567 TCGv_i64 t2 = tcg_temp_new_i64(); 7568 7569 tcg_gen_movi_i64(t0, 0x00ff00ff00ff00ffull); 7570 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8); 7571 tcg_gen_and_i64(t2, t1, t0); 7572 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], t0); 7573 tcg_gen_shli_i64(t1, t1, 8); 7574 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2); 7575 7576 tcg_temp_free_i64(t0); 7577 tcg_temp_free_i64(t1); 7578 tcg_temp_free_i64(t2); 7579 } 7580 #endif 7581 7582 static opcode_t opcodes[] = { 7583 #if defined(TARGET_PPC64) 7584 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310), 7585 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310), 7586 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310), 7587 #endif 7588 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 7589 #if defined(TARGET_PPC64) 7590 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 7591 #endif 7592 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 7593 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 7594 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 7595 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7596 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7597 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 7598 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 7599 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 7600 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 7601 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7602 #if defined(TARGET_PPC64) 7603 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 7604 #endif 7605 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 7606 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 7607 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7608 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7609 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7610 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 7611 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 7612 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 7613 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7614 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 7615 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 7616 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 7617 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7618 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7619 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7620 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7621 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 7622 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 7623 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 7624 #if defined(TARGET_PPC64) 7625 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 7626 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 7627 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 7628 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 7629 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 7630 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 7631 #endif 7632 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7633 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7634 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7635 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 7636 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 7637 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 7638 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 7639 #if defined(TARGET_PPC64) 7640 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 7641 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 7642 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 7643 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 7644 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 7645 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 7646 PPC_NONE, PPC2_ISA300), 7647 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 7648 PPC_NONE, PPC2_ISA300), 7649 #endif 7650 #if defined(TARGET_PPC64) 7651 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 7652 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 7653 #endif 7654 /* handles lfdp, lxsd, lxssp */ 7655 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7656 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 7657 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7658 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7659 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7660 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 7661 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 7662 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 7663 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 7664 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 7665 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 7666 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7667 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7668 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 7669 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 7670 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 7671 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7672 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7673 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 7674 #if defined(TARGET_PPC64) 7675 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 7676 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 7677 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 7678 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 7679 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 7680 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 7681 #endif 7682 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 7683 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 7684 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 7685 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7686 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7687 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 7688 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 7689 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 7690 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 7691 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 7692 #if defined(TARGET_PPC64) 7693 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 7694 #if !defined(CONFIG_USER_ONLY) 7695 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7696 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7697 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7698 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300), 7699 #endif 7700 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7701 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7702 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7703 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7704 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7705 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 7706 #endif 7707 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7708 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW), 7709 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW), 7710 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 7711 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7712 #if defined(TARGET_PPC64) 7713 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 7714 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 7715 #endif 7716 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 7717 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 7718 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 7719 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 7720 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 7721 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 7722 #if defined(TARGET_PPC64) 7723 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 7724 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 7725 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 7726 #endif 7727 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 7728 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 7729 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 7730 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7731 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 7732 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 7733 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7734 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 7735 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7736 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 7737 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7738 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 7739 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 7740 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7741 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 7742 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 7743 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 7744 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 7745 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7746 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 7747 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 7748 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 7749 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 7750 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 7751 #if defined(TARGET_PPC64) 7752 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 7753 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 7754 PPC_SEGMENT_64B), 7755 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 7756 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 7757 PPC_SEGMENT_64B), 7758 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 7759 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 7760 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 7761 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 7762 #endif 7763 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 7764 /* 7765 * XXX Those instructions will need to be handled differently for 7766 * different ISA versions 7767 */ 7768 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 7769 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 7770 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 7771 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 7772 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 7773 #if defined(TARGET_PPC64) 7774 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 7775 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 7776 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 7777 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7778 #endif 7779 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 7780 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 7781 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 7782 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 7783 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 7784 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 7785 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 7786 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 7787 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 7788 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 7789 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 7790 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7791 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 7792 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 7793 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 7794 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 7795 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 7796 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 7797 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 7798 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7799 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 7800 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 7801 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 7802 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 7803 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 7804 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 7805 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 7806 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 7807 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 7808 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 7809 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 7810 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 7811 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 7812 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 7813 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 7814 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 7815 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 7816 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 7817 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 7818 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 7819 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 7820 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 7821 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 7822 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 7823 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7824 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7825 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7826 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7827 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7828 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7829 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7830 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7831 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7832 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7833 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7834 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7835 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7836 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7837 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7838 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7839 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7840 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7841 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7842 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7843 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7844 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7845 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7846 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7847 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7848 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7849 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7850 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7851 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7852 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7853 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7854 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7855 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7856 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7857 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7858 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7859 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7860 PPC_NONE, PPC2_BOOKE206), 7861 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7862 PPC_NONE, PPC2_BOOKE206), 7863 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7864 PPC_NONE, PPC2_BOOKE206), 7865 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7866 PPC_NONE, PPC2_BOOKE206), 7867 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7868 PPC_NONE, PPC2_BOOKE206), 7869 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7870 PPC_NONE, PPC2_PRCNTL), 7871 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7872 PPC_NONE, PPC2_PRCNTL), 7873 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7874 PPC_NONE, PPC2_PRCNTL), 7875 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7876 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7877 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7878 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7879 PPC_BOOKE, PPC2_BOOKE206), 7880 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE), 7881 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7882 PPC_BOOKE, PPC2_BOOKE206), 7883 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7884 PPC_440_SPEC), 7885 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7886 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7887 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7888 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7889 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7890 #if defined(TARGET_PPC64) 7891 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7892 PPC2_ISA300), 7893 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7894 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001, 7895 PPC_NONE, PPC2_ISA207S), 7896 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001, 7897 PPC_NONE, PPC2_ISA207S), 7898 #endif 7899 7900 #undef GEN_INT_ARITH_ADD 7901 #undef GEN_INT_ARITH_ADD_CONST 7902 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7903 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7904 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7905 add_ca, compute_ca, compute_ov) \ 7906 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7907 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7908 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7909 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7910 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7911 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7912 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7913 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7914 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7915 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 7916 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7917 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7918 7919 #undef GEN_INT_ARITH_DIVW 7920 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7921 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7922 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7923 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7924 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7925 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7926 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7927 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7928 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7929 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7930 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7931 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7932 7933 #if defined(TARGET_PPC64) 7934 #undef GEN_INT_ARITH_DIVD 7935 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7936 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7937 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7938 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7939 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7940 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7941 7942 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7943 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7944 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7945 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7946 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7947 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7948 7949 #undef GEN_INT_ARITH_MUL_HELPER 7950 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7951 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7952 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7953 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7954 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7955 #endif 7956 7957 #undef GEN_INT_ARITH_SUBF 7958 #undef GEN_INT_ARITH_SUBF_CONST 7959 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7960 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7961 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7962 add_ca, compute_ca, compute_ov) \ 7963 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7964 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7965 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7966 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7967 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7968 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7969 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7970 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7971 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7972 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7973 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7974 7975 #undef GEN_LOGICAL1 7976 #undef GEN_LOGICAL2 7977 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7978 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7979 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7980 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7981 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7982 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7983 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7984 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7985 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7986 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7987 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7988 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 7989 #if defined(TARGET_PPC64) 7990 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 7991 #endif 7992 7993 #if defined(TARGET_PPC64) 7994 #undef GEN_PPC64_R2 7995 #undef GEN_PPC64_R4 7996 #define GEN_PPC64_R2(name, opc1, opc2) \ 7997 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7998 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7999 PPC_64B) 8000 #define GEN_PPC64_R4(name, opc1, opc2) \ 8001 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 8002 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 8003 PPC_64B), \ 8004 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 8005 PPC_64B), \ 8006 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 8007 PPC_64B) 8008 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 8009 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 8010 GEN_PPC64_R4(rldic, 0x1E, 0x04), 8011 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 8012 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 8013 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 8014 #endif 8015 8016 #undef GEN_LDX_E 8017 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 8018 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 8019 8020 #if defined(TARGET_PPC64) 8021 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 8022 8023 /* HV/P7 and later only */ 8024 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 8025 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 8026 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 8027 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 8028 #endif 8029 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 8030 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 8031 8032 /* External PID based load */ 8033 #undef GEN_LDEPX 8034 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 8035 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8036 0x00000001, PPC_NONE, PPC2_BOOKE206), 8037 8038 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 8039 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 8040 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 8041 #if defined(TARGET_PPC64) 8042 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 8043 #endif 8044 8045 #undef GEN_STX_E 8046 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 8047 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 8048 8049 #if defined(TARGET_PPC64) 8050 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 8051 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 8052 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 8053 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 8054 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 8055 #endif 8056 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 8057 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 8058 8059 #undef GEN_STEPX 8060 #define GEN_STEPX(name, ldop, opc2, opc3) \ 8061 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8062 0x00000001, PPC_NONE, PPC2_BOOKE206), 8063 8064 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 8065 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 8066 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 8067 #if defined(TARGET_PPC64) 8068 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04) 8069 #endif 8070 8071 #undef GEN_CRLOGIC 8072 #define GEN_CRLOGIC(name, tcg_op, opc) \ 8073 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 8074 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 8075 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 8076 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 8077 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 8078 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 8079 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 8080 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 8081 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 8082 8083 #undef GEN_MAC_HANDLER 8084 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 8085 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 8086 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 8087 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 8088 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 8089 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 8090 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 8091 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 8092 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 8093 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 8094 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 8095 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 8096 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 8097 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 8098 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 8099 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 8100 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 8101 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 8102 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 8103 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 8104 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 8105 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 8106 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 8107 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 8108 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 8109 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 8110 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 8111 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 8112 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 8113 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 8114 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 8115 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 8116 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 8117 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 8118 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 8119 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 8120 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 8121 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 8122 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 8123 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 8124 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 8125 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 8126 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 8127 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 8128 8129 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 8130 PPC_NONE, PPC2_TM), 8131 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 8132 PPC_NONE, PPC2_TM), 8133 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 8134 PPC_NONE, PPC2_TM), 8135 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 8136 PPC_NONE, PPC2_TM), 8137 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 8138 PPC_NONE, PPC2_TM), 8139 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 8140 PPC_NONE, PPC2_TM), 8141 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 8142 PPC_NONE, PPC2_TM), 8143 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 8144 PPC_NONE, PPC2_TM), 8145 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 8146 PPC_NONE, PPC2_TM), 8147 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 8148 PPC_NONE, PPC2_TM), 8149 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 8150 PPC_NONE, PPC2_TM), 8151 8152 #include "translate/fp-ops.c.inc" 8153 8154 #include "translate/vmx-ops.c.inc" 8155 8156 #include "translate/vsx-ops.c.inc" 8157 8158 #include "translate/dfp-ops.c.inc" 8159 8160 #include "translate/spe-ops.c.inc" 8161 }; 8162 8163 /*****************************************************************************/ 8164 /* Opcode types */ 8165 enum { 8166 PPC_DIRECT = 0, /* Opcode routine */ 8167 PPC_INDIRECT = 1, /* Indirect opcode table */ 8168 }; 8169 8170 #define PPC_OPCODE_MASK 0x3 8171 8172 static inline int is_indirect_opcode(void *handler) 8173 { 8174 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT; 8175 } 8176 8177 static inline opc_handler_t **ind_table(void *handler) 8178 { 8179 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK); 8180 } 8181 8182 /* Instruction table creation */ 8183 /* Opcodes tables creation */ 8184 static void fill_new_table(opc_handler_t **table, int len) 8185 { 8186 int i; 8187 8188 for (i = 0; i < len; i++) { 8189 table[i] = &invalid_handler; 8190 } 8191 } 8192 8193 static int create_new_table(opc_handler_t **table, unsigned char idx) 8194 { 8195 opc_handler_t **tmp; 8196 8197 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN); 8198 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN); 8199 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT); 8200 8201 return 0; 8202 } 8203 8204 static int insert_in_table(opc_handler_t **table, unsigned char idx, 8205 opc_handler_t *handler) 8206 { 8207 if (table[idx] != &invalid_handler) { 8208 return -1; 8209 } 8210 table[idx] = handler; 8211 8212 return 0; 8213 } 8214 8215 static int register_direct_insn(opc_handler_t **ppc_opcodes, 8216 unsigned char idx, opc_handler_t *handler) 8217 { 8218 if (insert_in_table(ppc_opcodes, idx, handler) < 0) { 8219 printf("*** ERROR: opcode %02x already assigned in main " 8220 "opcode table\n", idx); 8221 return -1; 8222 } 8223 8224 return 0; 8225 } 8226 8227 static int register_ind_in_table(opc_handler_t **table, 8228 unsigned char idx1, unsigned char idx2, 8229 opc_handler_t *handler) 8230 { 8231 if (table[idx1] == &invalid_handler) { 8232 if (create_new_table(table, idx1) < 0) { 8233 printf("*** ERROR: unable to create indirect table " 8234 "idx=%02x\n", idx1); 8235 return -1; 8236 } 8237 } else { 8238 if (!is_indirect_opcode(table[idx1])) { 8239 printf("*** ERROR: idx %02x already assigned to a direct " 8240 "opcode\n", idx1); 8241 return -1; 8242 } 8243 } 8244 if (handler != NULL && 8245 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) { 8246 printf("*** ERROR: opcode %02x already assigned in " 8247 "opcode table %02x\n", idx2, idx1); 8248 return -1; 8249 } 8250 8251 return 0; 8252 } 8253 8254 static int register_ind_insn(opc_handler_t **ppc_opcodes, 8255 unsigned char idx1, unsigned char idx2, 8256 opc_handler_t *handler) 8257 { 8258 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler); 8259 } 8260 8261 static int register_dblind_insn(opc_handler_t **ppc_opcodes, 8262 unsigned char idx1, unsigned char idx2, 8263 unsigned char idx3, opc_handler_t *handler) 8264 { 8265 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8266 printf("*** ERROR: unable to join indirect table idx " 8267 "[%02x-%02x]\n", idx1, idx2); 8268 return -1; 8269 } 8270 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3, 8271 handler) < 0) { 8272 printf("*** ERROR: unable to insert opcode " 8273 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8274 return -1; 8275 } 8276 8277 return 0; 8278 } 8279 8280 static int register_trplind_insn(opc_handler_t **ppc_opcodes, 8281 unsigned char idx1, unsigned char idx2, 8282 unsigned char idx3, unsigned char idx4, 8283 opc_handler_t *handler) 8284 { 8285 opc_handler_t **table; 8286 8287 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8288 printf("*** ERROR: unable to join indirect table idx " 8289 "[%02x-%02x]\n", idx1, idx2); 8290 return -1; 8291 } 8292 table = ind_table(ppc_opcodes[idx1]); 8293 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) { 8294 printf("*** ERROR: unable to join 2nd-level indirect table idx " 8295 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8296 return -1; 8297 } 8298 table = ind_table(table[idx2]); 8299 if (register_ind_in_table(table, idx3, idx4, handler) < 0) { 8300 printf("*** ERROR: unable to insert opcode " 8301 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4); 8302 return -1; 8303 } 8304 return 0; 8305 } 8306 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn) 8307 { 8308 if (insn->opc2 != 0xFF) { 8309 if (insn->opc3 != 0xFF) { 8310 if (insn->opc4 != 0xFF) { 8311 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8312 insn->opc3, insn->opc4, 8313 &insn->handler) < 0) { 8314 return -1; 8315 } 8316 } else { 8317 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8318 insn->opc3, &insn->handler) < 0) { 8319 return -1; 8320 } 8321 } 8322 } else { 8323 if (register_ind_insn(ppc_opcodes, insn->opc1, 8324 insn->opc2, &insn->handler) < 0) { 8325 return -1; 8326 } 8327 } 8328 } else { 8329 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) { 8330 return -1; 8331 } 8332 } 8333 8334 return 0; 8335 } 8336 8337 static int test_opcode_table(opc_handler_t **table, int len) 8338 { 8339 int i, count, tmp; 8340 8341 for (i = 0, count = 0; i < len; i++) { 8342 /* Consistency fixup */ 8343 if (table[i] == NULL) { 8344 table[i] = &invalid_handler; 8345 } 8346 if (table[i] != &invalid_handler) { 8347 if (is_indirect_opcode(table[i])) { 8348 tmp = test_opcode_table(ind_table(table[i]), 8349 PPC_CPU_INDIRECT_OPCODES_LEN); 8350 if (tmp == 0) { 8351 free(table[i]); 8352 table[i] = &invalid_handler; 8353 } else { 8354 count++; 8355 } 8356 } else { 8357 count++; 8358 } 8359 } 8360 } 8361 8362 return count; 8363 } 8364 8365 static void fix_opcode_tables(opc_handler_t **ppc_opcodes) 8366 { 8367 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) { 8368 printf("*** WARNING: no opcode defined !\n"); 8369 } 8370 } 8371 8372 /*****************************************************************************/ 8373 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp) 8374 { 8375 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu); 8376 opcode_t *opc; 8377 8378 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN); 8379 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) { 8380 if (((opc->handler.type & pcc->insns_flags) != 0) || 8381 ((opc->handler.type2 & pcc->insns_flags2) != 0)) { 8382 if (register_insn(cpu->opcodes, opc) < 0) { 8383 error_setg(errp, "ERROR initializing PowerPC instruction " 8384 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2, 8385 opc->opc3); 8386 return; 8387 } 8388 } 8389 } 8390 fix_opcode_tables(cpu->opcodes); 8391 fflush(stdout); 8392 fflush(stderr); 8393 } 8394 8395 void destroy_ppc_opcodes(PowerPCCPU *cpu) 8396 { 8397 opc_handler_t **table, **table_2; 8398 int i, j, k; 8399 8400 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) { 8401 if (cpu->opcodes[i] == &invalid_handler) { 8402 continue; 8403 } 8404 if (is_indirect_opcode(cpu->opcodes[i])) { 8405 table = ind_table(cpu->opcodes[i]); 8406 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) { 8407 if (table[j] == &invalid_handler) { 8408 continue; 8409 } 8410 if (is_indirect_opcode(table[j])) { 8411 table_2 = ind_table(table[j]); 8412 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) { 8413 if (table_2[k] != &invalid_handler && 8414 is_indirect_opcode(table_2[k])) { 8415 g_free((opc_handler_t *)((uintptr_t)table_2[k] & 8416 ~PPC_INDIRECT)); 8417 } 8418 } 8419 g_free((opc_handler_t *)((uintptr_t)table[j] & 8420 ~PPC_INDIRECT)); 8421 } 8422 } 8423 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] & 8424 ~PPC_INDIRECT)); 8425 } 8426 } 8427 } 8428 8429 int ppc_fixup_cpu(PowerPCCPU *cpu) 8430 { 8431 CPUPPCState *env = &cpu->env; 8432 8433 /* 8434 * TCG doesn't (yet) emulate some groups of instructions that are 8435 * implemented on some otherwise supported CPUs (e.g. VSX and 8436 * decimal floating point instructions on POWER7). We remove 8437 * unsupported instruction groups from the cpu state's instruction 8438 * masks and hope the guest can cope. For at least the pseries 8439 * machine, the unavailability of these instructions can be 8440 * advertised to the guest via the device tree. 8441 */ 8442 if ((env->insns_flags & ~PPC_TCG_INSNS) 8443 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) { 8444 warn_report("Disabling some instructions which are not " 8445 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")", 8446 env->insns_flags & ~PPC_TCG_INSNS, 8447 env->insns_flags2 & ~PPC_TCG_INSNS2); 8448 } 8449 env->insns_flags &= PPC_TCG_INSNS; 8450 env->insns_flags2 &= PPC_TCG_INSNS2; 8451 return 0; 8452 } 8453 8454 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn) 8455 { 8456 opc_handler_t **table, *handler; 8457 uint32_t inval; 8458 8459 ctx->opcode = insn; 8460 8461 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 8462 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8463 ctx->le_mode ? "little" : "big"); 8464 8465 table = cpu->opcodes; 8466 handler = table[opc1(insn)]; 8467 if (is_indirect_opcode(handler)) { 8468 table = ind_table(handler); 8469 handler = table[opc2(insn)]; 8470 if (is_indirect_opcode(handler)) { 8471 table = ind_table(handler); 8472 handler = table[opc3(insn)]; 8473 if (is_indirect_opcode(handler)) { 8474 table = ind_table(handler); 8475 handler = table[opc4(insn)]; 8476 } 8477 } 8478 } 8479 8480 /* Is opcode *REALLY* valid ? */ 8481 if (unlikely(handler->handler == &gen_invalid)) { 8482 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 8483 "%02x - %02x - %02x - %02x (%08x) " 8484 TARGET_FMT_lx "\n", 8485 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8486 insn, ctx->cia); 8487 return false; 8488 } 8489 8490 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 8491 && Rc(insn))) { 8492 inval = handler->inval2; 8493 } else { 8494 inval = handler->inval1; 8495 } 8496 8497 if (unlikely((insn & inval) != 0)) { 8498 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 8499 "%02x - %02x - %02x - %02x (%08x) " 8500 TARGET_FMT_lx "\n", insn & inval, 8501 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8502 insn, ctx->cia); 8503 return false; 8504 } 8505 8506 handler->handler(ctx); 8507 return true; 8508 } 8509 8510 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 8511 { 8512 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8513 CPUPPCState *env = cs->env_ptr; 8514 uint32_t hflags = ctx->base.tb->flags; 8515 8516 ctx->spr_cb = env->spr_cb; 8517 ctx->pr = (hflags >> HFLAGS_PR) & 1; 8518 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7; 8519 ctx->dr = (hflags >> HFLAGS_DR) & 1; 8520 ctx->hv = (hflags >> HFLAGS_HV) & 1; 8521 ctx->insns_flags = env->insns_flags; 8522 ctx->insns_flags2 = env->insns_flags2; 8523 ctx->access_type = -1; 8524 ctx->need_access_type = !mmu_is_64bit(env->mmu_model); 8525 ctx->le_mode = (hflags >> HFLAGS_LE) & 1; 8526 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 8527 ctx->flags = env->flags; 8528 #if defined(TARGET_PPC64) 8529 ctx->sf_mode = (hflags >> HFLAGS_64) & 1; 8530 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 8531 #endif 8532 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 8533 || env->mmu_model == POWERPC_MMU_601 8534 || env->mmu_model & POWERPC_MMU_64; 8535 8536 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1; 8537 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1; 8538 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1; 8539 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1; 8540 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1; 8541 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1; 8542 8543 ctx->singlestep_enabled = 0; 8544 if ((hflags >> HFLAGS_SE) & 1) { 8545 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 8546 } 8547 if ((hflags >> HFLAGS_BE) & 1) { 8548 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 8549 } 8550 if (unlikely(ctx->base.singlestep_enabled)) { 8551 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP; 8552 } 8553 8554 if (ctx->singlestep_enabled & (CPU_SINGLE_STEP | GDBSTUB_SINGLE_STEP)) { 8555 ctx->base.max_insns = 1; 8556 } 8557 } 8558 8559 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 8560 { 8561 } 8562 8563 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 8564 { 8565 tcg_gen_insn_start(dcbase->pc_next); 8566 } 8567 8568 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs, 8569 const CPUBreakpoint *bp) 8570 { 8571 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8572 8573 gen_update_nip(ctx, ctx->base.pc_next); 8574 gen_debug_exception(ctx); 8575 /* 8576 * The address covered by the breakpoint must be included in 8577 * [tb->pc, tb->pc + tb->size) in order to for it to be properly 8578 * cleared -- thus we increment the PC here so that the logic 8579 * setting tb->size below does the right thing. 8580 */ 8581 ctx->base.pc_next += 4; 8582 return true; 8583 } 8584 8585 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn) 8586 { 8587 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 8588 return opc1(insn) == 1; 8589 } 8590 8591 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 8592 { 8593 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8594 PowerPCCPU *cpu = POWERPC_CPU(cs); 8595 CPUPPCState *env = cs->env_ptr; 8596 target_ulong pc; 8597 uint32_t insn; 8598 bool ok; 8599 8600 LOG_DISAS("----------------\n"); 8601 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 8602 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 8603 8604 ctx->cia = pc = ctx->base.pc_next; 8605 insn = translator_ldl_swap(env, pc, need_byteswap(ctx)); 8606 ctx->base.pc_next = pc += 4; 8607 8608 if (!is_prefix_insn(ctx, insn)) { 8609 ok = (decode_insn32(ctx, insn) || 8610 decode_legacy(cpu, ctx, insn)); 8611 } else if ((pc & 63) == 0) { 8612 /* 8613 * Power v3.1, section 1.9 Exceptions: 8614 * attempt to execute a prefixed instruction that crosses a 8615 * 64-byte address boundary (system alignment error). 8616 */ 8617 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN); 8618 ok = true; 8619 } else { 8620 uint32_t insn2 = translator_ldl_swap(env, pc, need_byteswap(ctx)); 8621 ctx->base.pc_next = pc += 4; 8622 ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn)); 8623 } 8624 if (!ok) { 8625 gen_invalid(ctx); 8626 } 8627 8628 /* End the TB when crossing a page boundary. */ 8629 if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) { 8630 ctx->base.is_jmp = DISAS_TOO_MANY; 8631 } 8632 8633 translator_loop_temp_check(&ctx->base); 8634 } 8635 8636 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 8637 { 8638 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8639 DisasJumpType is_jmp = ctx->base.is_jmp; 8640 target_ulong nip = ctx->base.pc_next; 8641 int sse; 8642 8643 if (is_jmp == DISAS_NORETURN) { 8644 /* We have already exited the TB. */ 8645 return; 8646 } 8647 8648 /* Honor single stepping. */ 8649 sse = ctx->singlestep_enabled & (CPU_SINGLE_STEP | GDBSTUB_SINGLE_STEP); 8650 if (unlikely(sse)) { 8651 switch (is_jmp) { 8652 case DISAS_TOO_MANY: 8653 case DISAS_EXIT_UPDATE: 8654 case DISAS_CHAIN_UPDATE: 8655 gen_update_nip(ctx, nip); 8656 break; 8657 case DISAS_EXIT: 8658 case DISAS_CHAIN: 8659 break; 8660 default: 8661 g_assert_not_reached(); 8662 } 8663 8664 if (sse & GDBSTUB_SINGLE_STEP) { 8665 gen_debug_exception(ctx); 8666 return; 8667 } 8668 /* else CPU_SINGLE_STEP... */ 8669 if (nip <= 0x100 || nip > 0xf00) { 8670 gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx))); 8671 return; 8672 } 8673 } 8674 8675 switch (is_jmp) { 8676 case DISAS_TOO_MANY: 8677 if (use_goto_tb(ctx, nip)) { 8678 tcg_gen_goto_tb(0); 8679 gen_update_nip(ctx, nip); 8680 tcg_gen_exit_tb(ctx->base.tb, 0); 8681 break; 8682 } 8683 /* fall through */ 8684 case DISAS_CHAIN_UPDATE: 8685 gen_update_nip(ctx, nip); 8686 /* fall through */ 8687 case DISAS_CHAIN: 8688 tcg_gen_lookup_and_goto_ptr(); 8689 break; 8690 8691 case DISAS_EXIT_UPDATE: 8692 gen_update_nip(ctx, nip); 8693 /* fall through */ 8694 case DISAS_EXIT: 8695 tcg_gen_exit_tb(NULL, 0); 8696 break; 8697 8698 default: 8699 g_assert_not_reached(); 8700 } 8701 } 8702 8703 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 8704 { 8705 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 8706 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 8707 } 8708 8709 static const TranslatorOps ppc_tr_ops = { 8710 .init_disas_context = ppc_tr_init_disas_context, 8711 .tb_start = ppc_tr_tb_start, 8712 .insn_start = ppc_tr_insn_start, 8713 .breakpoint_check = ppc_tr_breakpoint_check, 8714 .translate_insn = ppc_tr_translate_insn, 8715 .tb_stop = ppc_tr_tb_stop, 8716 .disas_log = ppc_tr_disas_log, 8717 }; 8718 8719 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns) 8720 { 8721 DisasContext ctx; 8722 8723 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns); 8724 } 8725 8726 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 8727 target_ulong *data) 8728 { 8729 env->nip = data[0]; 8730 } 8731