1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2.1 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg/tcg-op.h" 27 #include "tcg/tcg-op-gvec.h" 28 #include "qemu/host-utils.h" 29 #include "qemu/main-loop.h" 30 #include "exec/cpu_ldst.h" 31 32 #include "exec/helper-proto.h" 33 #include "exec/helper-gen.h" 34 35 #include "exec/translator.h" 36 #include "exec/log.h" 37 #include "qemu/atomic128.h" 38 #include "spr_tcg.h" 39 40 #include "qemu/qemu-print.h" 41 #include "qapi/error.h" 42 43 #define CPU_SINGLE_STEP 0x1 44 #define CPU_BRANCH_STEP 0x2 45 46 /* Include definitions for instructions classes and implementations flags */ 47 /* #define PPC_DEBUG_DISAS */ 48 49 #ifdef PPC_DEBUG_DISAS 50 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 51 #else 52 # define LOG_DISAS(...) do { } while (0) 53 #endif 54 /*****************************************************************************/ 55 /* Code translation helpers */ 56 57 /* global register indexes */ 58 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */ 59 + 10 * 4 + 22 * 5 /* SPE GPRh */ 60 + 8 * 5 /* CRF */]; 61 static TCGv cpu_gpr[32]; 62 static TCGv cpu_gprh[32]; 63 static TCGv_i32 cpu_crf[8]; 64 static TCGv cpu_nip; 65 static TCGv cpu_msr; 66 static TCGv cpu_ctr; 67 static TCGv cpu_lr; 68 #if defined(TARGET_PPC64) 69 static TCGv cpu_cfar; 70 #endif 71 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 72 static TCGv cpu_reserve; 73 static TCGv cpu_reserve_val; 74 static TCGv cpu_fpscr; 75 static TCGv_i32 cpu_access_type; 76 77 #include "exec/gen-icount.h" 78 79 void ppc_translate_init(void) 80 { 81 int i; 82 char *p; 83 size_t cpu_reg_names_size; 84 85 p = cpu_reg_names; 86 cpu_reg_names_size = sizeof(cpu_reg_names); 87 88 for (i = 0; i < 8; i++) { 89 snprintf(p, cpu_reg_names_size, "crf%d", i); 90 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 91 offsetof(CPUPPCState, crf[i]), p); 92 p += 5; 93 cpu_reg_names_size -= 5; 94 } 95 96 for (i = 0; i < 32; i++) { 97 snprintf(p, cpu_reg_names_size, "r%d", i); 98 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 99 offsetof(CPUPPCState, gpr[i]), p); 100 p += (i < 10) ? 3 : 4; 101 cpu_reg_names_size -= (i < 10) ? 3 : 4; 102 snprintf(p, cpu_reg_names_size, "r%dH", i); 103 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 104 offsetof(CPUPPCState, gprh[i]), p); 105 p += (i < 10) ? 4 : 5; 106 cpu_reg_names_size -= (i < 10) ? 4 : 5; 107 } 108 109 cpu_nip = tcg_global_mem_new(cpu_env, 110 offsetof(CPUPPCState, nip), "nip"); 111 112 cpu_msr = tcg_global_mem_new(cpu_env, 113 offsetof(CPUPPCState, msr), "msr"); 114 115 cpu_ctr = tcg_global_mem_new(cpu_env, 116 offsetof(CPUPPCState, ctr), "ctr"); 117 118 cpu_lr = tcg_global_mem_new(cpu_env, 119 offsetof(CPUPPCState, lr), "lr"); 120 121 #if defined(TARGET_PPC64) 122 cpu_cfar = tcg_global_mem_new(cpu_env, 123 offsetof(CPUPPCState, cfar), "cfar"); 124 #endif 125 126 cpu_xer = tcg_global_mem_new(cpu_env, 127 offsetof(CPUPPCState, xer), "xer"); 128 cpu_so = tcg_global_mem_new(cpu_env, 129 offsetof(CPUPPCState, so), "SO"); 130 cpu_ov = tcg_global_mem_new(cpu_env, 131 offsetof(CPUPPCState, ov), "OV"); 132 cpu_ca = tcg_global_mem_new(cpu_env, 133 offsetof(CPUPPCState, ca), "CA"); 134 cpu_ov32 = tcg_global_mem_new(cpu_env, 135 offsetof(CPUPPCState, ov32), "OV32"); 136 cpu_ca32 = tcg_global_mem_new(cpu_env, 137 offsetof(CPUPPCState, ca32), "CA32"); 138 139 cpu_reserve = tcg_global_mem_new(cpu_env, 140 offsetof(CPUPPCState, reserve_addr), 141 "reserve_addr"); 142 cpu_reserve_val = tcg_global_mem_new(cpu_env, 143 offsetof(CPUPPCState, reserve_val), 144 "reserve_val"); 145 146 cpu_fpscr = tcg_global_mem_new(cpu_env, 147 offsetof(CPUPPCState, fpscr), "fpscr"); 148 149 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 150 offsetof(CPUPPCState, access_type), 151 "access_type"); 152 } 153 154 /* internal defines */ 155 struct DisasContext { 156 DisasContextBase base; 157 target_ulong cia; /* current instruction address */ 158 uint32_t opcode; 159 /* Routine used to access memory */ 160 bool pr, hv, dr, le_mode; 161 bool lazy_tlb_flush; 162 bool need_access_type; 163 int mem_idx; 164 int access_type; 165 /* Translation flags */ 166 MemOp default_tcg_memop_mask; 167 #if defined(TARGET_PPC64) 168 bool sf_mode; 169 bool has_cfar; 170 #endif 171 bool fpu_enabled; 172 bool altivec_enabled; 173 bool vsx_enabled; 174 bool spe_enabled; 175 bool tm_enabled; 176 bool gtse; 177 bool hr; 178 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 179 int singlestep_enabled; 180 uint32_t flags; 181 uint64_t insns_flags; 182 uint64_t insns_flags2; 183 }; 184 185 #define DISAS_EXIT DISAS_TARGET_0 /* exit to main loop, pc updated */ 186 #define DISAS_EXIT_UPDATE DISAS_TARGET_1 /* exit to main loop, pc stale */ 187 #define DISAS_CHAIN DISAS_TARGET_2 /* lookup next tb, pc updated */ 188 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3 /* lookup next tb, pc stale */ 189 190 /* Return true iff byteswap is needed in a scalar memop */ 191 static inline bool need_byteswap(const DisasContext *ctx) 192 { 193 #if defined(TARGET_WORDS_BIGENDIAN) 194 return ctx->le_mode; 195 #else 196 return !ctx->le_mode; 197 #endif 198 } 199 200 /* True when active word size < size of target_long. */ 201 #ifdef TARGET_PPC64 202 # define NARROW_MODE(C) (!(C)->sf_mode) 203 #else 204 # define NARROW_MODE(C) 0 205 #endif 206 207 struct opc_handler_t { 208 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 209 uint32_t inval1; 210 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 211 uint32_t inval2; 212 /* instruction type */ 213 uint64_t type; 214 /* extended instruction type */ 215 uint64_t type2; 216 /* handler */ 217 void (*handler)(DisasContext *ctx); 218 }; 219 220 /* SPR load/store helpers */ 221 static inline void gen_load_spr(TCGv t, int reg) 222 { 223 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 224 } 225 226 static inline void gen_store_spr(int reg, TCGv t) 227 { 228 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 229 } 230 231 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 232 { 233 if (ctx->need_access_type && ctx->access_type != access_type) { 234 tcg_gen_movi_i32(cpu_access_type, access_type); 235 ctx->access_type = access_type; 236 } 237 } 238 239 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 240 { 241 if (NARROW_MODE(ctx)) { 242 nip = (uint32_t)nip; 243 } 244 tcg_gen_movi_tl(cpu_nip, nip); 245 } 246 247 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 248 { 249 TCGv_i32 t0, t1; 250 251 /* 252 * These are all synchronous exceptions, we set the PC back to the 253 * faulting instruction 254 */ 255 gen_update_nip(ctx, ctx->cia); 256 t0 = tcg_const_i32(excp); 257 t1 = tcg_const_i32(error); 258 gen_helper_raise_exception_err(cpu_env, t0, t1); 259 tcg_temp_free_i32(t0); 260 tcg_temp_free_i32(t1); 261 ctx->base.is_jmp = DISAS_NORETURN; 262 } 263 264 static void gen_exception(DisasContext *ctx, uint32_t excp) 265 { 266 TCGv_i32 t0; 267 268 /* 269 * These are all synchronous exceptions, we set the PC back to the 270 * faulting instruction 271 */ 272 gen_update_nip(ctx, ctx->cia); 273 t0 = tcg_const_i32(excp); 274 gen_helper_raise_exception(cpu_env, t0); 275 tcg_temp_free_i32(t0); 276 ctx->base.is_jmp = DISAS_NORETURN; 277 } 278 279 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 280 target_ulong nip) 281 { 282 TCGv_i32 t0; 283 284 gen_update_nip(ctx, nip); 285 t0 = tcg_const_i32(excp); 286 gen_helper_raise_exception(cpu_env, t0); 287 tcg_temp_free_i32(t0); 288 ctx->base.is_jmp = DISAS_NORETURN; 289 } 290 291 static void gen_icount_io_start(DisasContext *ctx) 292 { 293 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 294 gen_io_start(); 295 /* 296 * An I/O instruction must be last in the TB. 297 * Chain to the next TB, and let the code from gen_tb_start 298 * decide if we need to return to the main loop. 299 * Doing this first also allows this value to be overridden. 300 */ 301 ctx->base.is_jmp = DISAS_TOO_MANY; 302 } 303 } 304 305 /* 306 * Tells the caller what is the appropriate exception to generate and prepares 307 * SPR registers for this exception. 308 * 309 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or 310 * POWERPC_EXCP_DEBUG (on BookE). 311 */ 312 static uint32_t gen_prep_dbgex(DisasContext *ctx) 313 { 314 if (ctx->flags & POWERPC_FLAG_DE) { 315 target_ulong dbsr = 0; 316 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) { 317 dbsr = DBCR0_ICMP; 318 } else { 319 /* Must have been branch */ 320 dbsr = DBCR0_BRT; 321 } 322 TCGv t0 = tcg_temp_new(); 323 gen_load_spr(t0, SPR_BOOKE_DBSR); 324 tcg_gen_ori_tl(t0, t0, dbsr); 325 gen_store_spr(SPR_BOOKE_DBSR, t0); 326 tcg_temp_free(t0); 327 return POWERPC_EXCP_DEBUG; 328 } else { 329 return POWERPC_EXCP_TRACE; 330 } 331 } 332 333 static void gen_debug_exception(DisasContext *ctx) 334 { 335 gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx))); 336 ctx->base.is_jmp = DISAS_NORETURN; 337 } 338 339 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 340 { 341 /* Will be converted to program check if needed */ 342 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 343 } 344 345 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 346 { 347 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 348 } 349 350 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 351 { 352 /* Will be converted to program check if needed */ 353 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 354 } 355 356 /*****************************************************************************/ 357 /* SPR READ/WRITE CALLBACKS */ 358 359 void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 360 { 361 #if 0 362 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 363 printf("ERROR: try to access SPR %d !\n", sprn); 364 #endif 365 } 366 367 /* #define PPC_DUMP_SPR_ACCESSES */ 368 369 /* 370 * Generic callbacks: 371 * do nothing but store/retrieve spr value 372 */ 373 static void spr_load_dump_spr(int sprn) 374 { 375 #ifdef PPC_DUMP_SPR_ACCESSES 376 TCGv_i32 t0 = tcg_const_i32(sprn); 377 gen_helper_load_dump_spr(cpu_env, t0); 378 tcg_temp_free_i32(t0); 379 #endif 380 } 381 382 void spr_read_generic(DisasContext *ctx, int gprn, int sprn) 383 { 384 gen_load_spr(cpu_gpr[gprn], sprn); 385 spr_load_dump_spr(sprn); 386 } 387 388 static void spr_store_dump_spr(int sprn) 389 { 390 #ifdef PPC_DUMP_SPR_ACCESSES 391 TCGv_i32 t0 = tcg_const_i32(sprn); 392 gen_helper_store_dump_spr(cpu_env, t0); 393 tcg_temp_free_i32(t0); 394 #endif 395 } 396 397 void spr_write_generic(DisasContext *ctx, int sprn, int gprn) 398 { 399 gen_store_spr(sprn, cpu_gpr[gprn]); 400 spr_store_dump_spr(sprn); 401 } 402 403 #if !defined(CONFIG_USER_ONLY) 404 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn) 405 { 406 #ifdef TARGET_PPC64 407 TCGv t0 = tcg_temp_new(); 408 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]); 409 gen_store_spr(sprn, t0); 410 tcg_temp_free(t0); 411 spr_store_dump_spr(sprn); 412 #else 413 spr_write_generic(ctx, sprn, gprn); 414 #endif 415 } 416 417 void spr_write_clear(DisasContext *ctx, int sprn, int gprn) 418 { 419 TCGv t0 = tcg_temp_new(); 420 TCGv t1 = tcg_temp_new(); 421 gen_load_spr(t0, sprn); 422 tcg_gen_neg_tl(t1, cpu_gpr[gprn]); 423 tcg_gen_and_tl(t0, t0, t1); 424 gen_store_spr(sprn, t0); 425 tcg_temp_free(t0); 426 tcg_temp_free(t1); 427 } 428 429 void spr_access_nop(DisasContext *ctx, int sprn, int gprn) 430 { 431 } 432 433 #endif 434 435 /* SPR common to all PowerPC */ 436 /* XER */ 437 void spr_read_xer(DisasContext *ctx, int gprn, int sprn) 438 { 439 TCGv dst = cpu_gpr[gprn]; 440 TCGv t0 = tcg_temp_new(); 441 TCGv t1 = tcg_temp_new(); 442 TCGv t2 = tcg_temp_new(); 443 tcg_gen_mov_tl(dst, cpu_xer); 444 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 445 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 446 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 447 tcg_gen_or_tl(t0, t0, t1); 448 tcg_gen_or_tl(dst, dst, t2); 449 tcg_gen_or_tl(dst, dst, t0); 450 if (is_isa300(ctx)) { 451 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 452 tcg_gen_or_tl(dst, dst, t0); 453 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 454 tcg_gen_or_tl(dst, dst, t0); 455 } 456 tcg_temp_free(t0); 457 tcg_temp_free(t1); 458 tcg_temp_free(t2); 459 } 460 461 void spr_write_xer(DisasContext *ctx, int sprn, int gprn) 462 { 463 TCGv src = cpu_gpr[gprn]; 464 /* Write all flags, while reading back check for isa300 */ 465 tcg_gen_andi_tl(cpu_xer, src, 466 ~((1u << XER_SO) | 467 (1u << XER_OV) | (1u << XER_OV32) | 468 (1u << XER_CA) | (1u << XER_CA32))); 469 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 470 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 471 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 472 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 473 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 474 } 475 476 /* LR */ 477 void spr_read_lr(DisasContext *ctx, int gprn, int sprn) 478 { 479 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr); 480 } 481 482 void spr_write_lr(DisasContext *ctx, int sprn, int gprn) 483 { 484 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]); 485 } 486 487 /* CFAR */ 488 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 489 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn) 490 { 491 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar); 492 } 493 494 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn) 495 { 496 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]); 497 } 498 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */ 499 500 /* CTR */ 501 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn) 502 { 503 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr); 504 } 505 506 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn) 507 { 508 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]); 509 } 510 511 /* User read access to SPR */ 512 /* USPRx */ 513 /* UMMCRx */ 514 /* UPMCx */ 515 /* USIA */ 516 /* UDECR */ 517 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn) 518 { 519 gen_load_spr(cpu_gpr[gprn], sprn + 0x10); 520 } 521 522 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 523 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn) 524 { 525 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]); 526 } 527 #endif 528 529 /* SPR common to all non-embedded PowerPC */ 530 /* DECR */ 531 #if !defined(CONFIG_USER_ONLY) 532 void spr_read_decr(DisasContext *ctx, int gprn, int sprn) 533 { 534 gen_icount_io_start(ctx); 535 gen_helper_load_decr(cpu_gpr[gprn], cpu_env); 536 } 537 538 void spr_write_decr(DisasContext *ctx, int sprn, int gprn) 539 { 540 gen_icount_io_start(ctx); 541 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]); 542 } 543 #endif 544 545 /* SPR common to all non-embedded PowerPC, except 601 */ 546 /* Time base */ 547 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn) 548 { 549 gen_icount_io_start(ctx); 550 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env); 551 } 552 553 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn) 554 { 555 gen_icount_io_start(ctx); 556 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env); 557 } 558 559 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn) 560 { 561 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env); 562 } 563 564 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn) 565 { 566 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env); 567 } 568 569 #if !defined(CONFIG_USER_ONLY) 570 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn) 571 { 572 gen_icount_io_start(ctx); 573 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]); 574 } 575 576 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn) 577 { 578 gen_icount_io_start(ctx); 579 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]); 580 } 581 582 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn) 583 { 584 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]); 585 } 586 587 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn) 588 { 589 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]); 590 } 591 592 #if defined(TARGET_PPC64) 593 void spr_read_purr(DisasContext *ctx, int gprn, int sprn) 594 { 595 gen_icount_io_start(ctx); 596 gen_helper_load_purr(cpu_gpr[gprn], cpu_env); 597 } 598 599 void spr_write_purr(DisasContext *ctx, int sprn, int gprn) 600 { 601 gen_icount_io_start(ctx); 602 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]); 603 } 604 605 /* HDECR */ 606 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn) 607 { 608 gen_icount_io_start(ctx); 609 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env); 610 } 611 612 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn) 613 { 614 gen_icount_io_start(ctx); 615 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]); 616 } 617 618 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn) 619 { 620 gen_icount_io_start(ctx); 621 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env); 622 } 623 624 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn) 625 { 626 gen_icount_io_start(ctx); 627 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]); 628 } 629 630 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn) 631 { 632 gen_icount_io_start(ctx); 633 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]); 634 } 635 636 #endif 637 #endif 638 639 #if !defined(CONFIG_USER_ONLY) 640 /* IBAT0U...IBAT0U */ 641 /* IBAT0L...IBAT7L */ 642 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn) 643 { 644 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 645 offsetof(CPUPPCState, 646 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 647 } 648 649 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn) 650 { 651 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 652 offsetof(CPUPPCState, 653 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4])); 654 } 655 656 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn) 657 { 658 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 659 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 660 tcg_temp_free_i32(t0); 661 } 662 663 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn) 664 { 665 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4); 666 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 667 tcg_temp_free_i32(t0); 668 } 669 670 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn) 671 { 672 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2); 673 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 674 tcg_temp_free_i32(t0); 675 } 676 677 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn) 678 { 679 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4); 680 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 681 tcg_temp_free_i32(t0); 682 } 683 684 /* DBAT0U...DBAT7U */ 685 /* DBAT0L...DBAT7L */ 686 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn) 687 { 688 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 689 offsetof(CPUPPCState, 690 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2])); 691 } 692 693 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn) 694 { 695 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 696 offsetof(CPUPPCState, 697 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4])); 698 } 699 700 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn) 701 { 702 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2); 703 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 704 tcg_temp_free_i32(t0); 705 } 706 707 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn) 708 { 709 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4); 710 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 711 tcg_temp_free_i32(t0); 712 } 713 714 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn) 715 { 716 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2); 717 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 718 tcg_temp_free_i32(t0); 719 } 720 721 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn) 722 { 723 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4); 724 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 725 tcg_temp_free_i32(t0); 726 } 727 728 /* SDR1 */ 729 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn) 730 { 731 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]); 732 } 733 734 #if defined(TARGET_PPC64) 735 /* 64 bits PowerPC specific SPRs */ 736 /* PIDR */ 737 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn) 738 { 739 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]); 740 } 741 742 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn) 743 { 744 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]); 745 } 746 747 void spr_read_hior(DisasContext *ctx, int gprn, int sprn) 748 { 749 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix)); 750 } 751 752 void spr_write_hior(DisasContext *ctx, int sprn, int gprn) 753 { 754 TCGv t0 = tcg_temp_new(); 755 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL); 756 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 757 tcg_temp_free(t0); 758 } 759 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn) 760 { 761 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]); 762 } 763 764 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn) 765 { 766 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]); 767 } 768 769 /* DPDES */ 770 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn) 771 { 772 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env); 773 } 774 775 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn) 776 { 777 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]); 778 } 779 #endif 780 #endif 781 782 /* PowerPC 601 specific registers */ 783 /* RTC */ 784 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn) 785 { 786 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env); 787 } 788 789 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn) 790 { 791 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env); 792 } 793 794 #if !defined(CONFIG_USER_ONLY) 795 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn) 796 { 797 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]); 798 } 799 800 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn) 801 { 802 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]); 803 } 804 805 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn) 806 { 807 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]); 808 /* Must stop the translation as endianness may have changed */ 809 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 810 } 811 #endif 812 813 /* Unified bats */ 814 #if !defined(CONFIG_USER_ONLY) 815 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn) 816 { 817 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 818 offsetof(CPUPPCState, 819 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 820 } 821 822 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn) 823 { 824 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 825 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]); 826 tcg_temp_free_i32(t0); 827 } 828 829 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn) 830 { 831 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 832 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]); 833 tcg_temp_free_i32(t0); 834 } 835 #endif 836 837 /* PowerPC 40x specific registers */ 838 #if !defined(CONFIG_USER_ONLY) 839 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn) 840 { 841 gen_icount_io_start(ctx); 842 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env); 843 } 844 845 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn) 846 { 847 gen_icount_io_start(ctx); 848 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]); 849 } 850 851 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn) 852 { 853 gen_icount_io_start(ctx); 854 gen_store_spr(sprn, cpu_gpr[gprn]); 855 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]); 856 /* We must stop translation as we may have rebooted */ 857 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 858 } 859 860 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn) 861 { 862 gen_icount_io_start(ctx); 863 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]); 864 } 865 866 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn) 867 { 868 gen_icount_io_start(ctx); 869 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]); 870 } 871 872 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn) 873 { 874 gen_icount_io_start(ctx); 875 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]); 876 } 877 #endif 878 879 /* PowerPC 403 specific registers */ 880 /* PBL1 / PBU1 / PBL2 / PBU2 */ 881 #if !defined(CONFIG_USER_ONLY) 882 void spr_read_403_pbr(DisasContext *ctx, int gprn, int sprn) 883 { 884 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 885 offsetof(CPUPPCState, pb[sprn - SPR_403_PBL1])); 886 } 887 888 void spr_write_403_pbr(DisasContext *ctx, int sprn, int gprn) 889 { 890 TCGv_i32 t0 = tcg_const_i32(sprn - SPR_403_PBL1); 891 gen_helper_store_403_pbr(cpu_env, t0, cpu_gpr[gprn]); 892 tcg_temp_free_i32(t0); 893 } 894 895 void spr_write_pir(DisasContext *ctx, int sprn, int gprn) 896 { 897 TCGv t0 = tcg_temp_new(); 898 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF); 899 gen_store_spr(SPR_PIR, t0); 900 tcg_temp_free(t0); 901 } 902 #endif 903 904 /* SPE specific registers */ 905 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn) 906 { 907 TCGv_i32 t0 = tcg_temp_new_i32(); 908 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 909 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0); 910 tcg_temp_free_i32(t0); 911 } 912 913 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn) 914 { 915 TCGv_i32 t0 = tcg_temp_new_i32(); 916 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]); 917 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 918 tcg_temp_free_i32(t0); 919 } 920 921 #if !defined(CONFIG_USER_ONLY) 922 /* Callback used to write the exception vector base */ 923 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn) 924 { 925 TCGv t0 = tcg_temp_new(); 926 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask)); 927 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 928 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 929 gen_store_spr(sprn, t0); 930 tcg_temp_free(t0); 931 } 932 933 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn) 934 { 935 int sprn_offs; 936 937 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) { 938 sprn_offs = sprn - SPR_BOOKE_IVOR0; 939 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) { 940 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32; 941 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) { 942 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38; 943 } else { 944 printf("Trying to write an unknown exception vector %d %03x\n", 945 sprn, sprn); 946 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 947 return; 948 } 949 950 TCGv t0 = tcg_temp_new(); 951 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask)); 952 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 953 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs])); 954 gen_store_spr(sprn, t0); 955 tcg_temp_free(t0); 956 } 957 #endif 958 959 #ifdef TARGET_PPC64 960 #ifndef CONFIG_USER_ONLY 961 void spr_write_amr(DisasContext *ctx, int sprn, int gprn) 962 { 963 TCGv t0 = tcg_temp_new(); 964 TCGv t1 = tcg_temp_new(); 965 TCGv t2 = tcg_temp_new(); 966 967 /* 968 * Note, the HV=1 PR=0 case is handled earlier by simply using 969 * spr_write_generic for HV mode in the SPR table 970 */ 971 972 /* Build insertion mask into t1 based on context */ 973 if (ctx->pr) { 974 gen_load_spr(t1, SPR_UAMOR); 975 } else { 976 gen_load_spr(t1, SPR_AMOR); 977 } 978 979 /* Mask new bits into t2 */ 980 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 981 982 /* Load AMR and clear new bits in t0 */ 983 gen_load_spr(t0, SPR_AMR); 984 tcg_gen_andc_tl(t0, t0, t1); 985 986 /* Or'in new bits and write it out */ 987 tcg_gen_or_tl(t0, t0, t2); 988 gen_store_spr(SPR_AMR, t0); 989 spr_store_dump_spr(SPR_AMR); 990 991 tcg_temp_free(t0); 992 tcg_temp_free(t1); 993 tcg_temp_free(t2); 994 } 995 996 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn) 997 { 998 TCGv t0 = tcg_temp_new(); 999 TCGv t1 = tcg_temp_new(); 1000 TCGv t2 = tcg_temp_new(); 1001 1002 /* 1003 * Note, the HV=1 case is handled earlier by simply using 1004 * spr_write_generic for HV mode in the SPR table 1005 */ 1006 1007 /* Build insertion mask into t1 based on context */ 1008 gen_load_spr(t1, SPR_AMOR); 1009 1010 /* Mask new bits into t2 */ 1011 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1012 1013 /* Load AMR and clear new bits in t0 */ 1014 gen_load_spr(t0, SPR_UAMOR); 1015 tcg_gen_andc_tl(t0, t0, t1); 1016 1017 /* Or'in new bits and write it out */ 1018 tcg_gen_or_tl(t0, t0, t2); 1019 gen_store_spr(SPR_UAMOR, t0); 1020 spr_store_dump_spr(SPR_UAMOR); 1021 1022 tcg_temp_free(t0); 1023 tcg_temp_free(t1); 1024 tcg_temp_free(t2); 1025 } 1026 1027 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn) 1028 { 1029 TCGv t0 = tcg_temp_new(); 1030 TCGv t1 = tcg_temp_new(); 1031 TCGv t2 = tcg_temp_new(); 1032 1033 /* 1034 * Note, the HV=1 case is handled earlier by simply using 1035 * spr_write_generic for HV mode in the SPR table 1036 */ 1037 1038 /* Build insertion mask into t1 based on context */ 1039 gen_load_spr(t1, SPR_AMOR); 1040 1041 /* Mask new bits into t2 */ 1042 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1043 1044 /* Load AMR and clear new bits in t0 */ 1045 gen_load_spr(t0, SPR_IAMR); 1046 tcg_gen_andc_tl(t0, t0, t1); 1047 1048 /* Or'in new bits and write it out */ 1049 tcg_gen_or_tl(t0, t0, t2); 1050 gen_store_spr(SPR_IAMR, t0); 1051 spr_store_dump_spr(SPR_IAMR); 1052 1053 tcg_temp_free(t0); 1054 tcg_temp_free(t1); 1055 tcg_temp_free(t2); 1056 } 1057 #endif 1058 #endif 1059 1060 #ifndef CONFIG_USER_ONLY 1061 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn) 1062 { 1063 gen_helper_fixup_thrm(cpu_env); 1064 gen_load_spr(cpu_gpr[gprn], sprn); 1065 spr_load_dump_spr(sprn); 1066 } 1067 #endif /* !CONFIG_USER_ONLY */ 1068 1069 #if !defined(CONFIG_USER_ONLY) 1070 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn) 1071 { 1072 TCGv t0 = tcg_temp_new(); 1073 1074 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE); 1075 gen_store_spr(sprn, t0); 1076 tcg_temp_free(t0); 1077 } 1078 1079 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn) 1080 { 1081 TCGv t0 = tcg_temp_new(); 1082 1083 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE); 1084 gen_store_spr(sprn, t0); 1085 tcg_temp_free(t0); 1086 } 1087 1088 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn) 1089 { 1090 TCGv t0 = tcg_temp_new(); 1091 1092 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 1093 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC)); 1094 gen_store_spr(sprn, t0); 1095 tcg_temp_free(t0); 1096 } 1097 1098 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn) 1099 { 1100 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]); 1101 } 1102 1103 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn) 1104 { 1105 TCGv_i32 t0 = tcg_const_i32(sprn); 1106 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]); 1107 tcg_temp_free_i32(t0); 1108 } 1109 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn) 1110 { 1111 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]); 1112 } 1113 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn) 1114 { 1115 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]); 1116 } 1117 1118 #endif 1119 1120 #if !defined(CONFIG_USER_ONLY) 1121 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn) 1122 { 1123 TCGv val = tcg_temp_new(); 1124 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]); 1125 gen_store_spr(SPR_BOOKE_MAS3, val); 1126 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32); 1127 gen_store_spr(SPR_BOOKE_MAS7, val); 1128 tcg_temp_free(val); 1129 } 1130 1131 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn) 1132 { 1133 TCGv mas7 = tcg_temp_new(); 1134 TCGv mas3 = tcg_temp_new(); 1135 gen_load_spr(mas7, SPR_BOOKE_MAS7); 1136 tcg_gen_shli_tl(mas7, mas7, 32); 1137 gen_load_spr(mas3, SPR_BOOKE_MAS3); 1138 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7); 1139 tcg_temp_free(mas3); 1140 tcg_temp_free(mas7); 1141 } 1142 1143 #endif 1144 1145 #ifdef TARGET_PPC64 1146 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn, 1147 int bit, int sprn, int cause) 1148 { 1149 TCGv_i32 t1 = tcg_const_i32(bit); 1150 TCGv_i32 t2 = tcg_const_i32(sprn); 1151 TCGv_i32 t3 = tcg_const_i32(cause); 1152 1153 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3); 1154 1155 tcg_temp_free_i32(t3); 1156 tcg_temp_free_i32(t2); 1157 tcg_temp_free_i32(t1); 1158 } 1159 1160 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn, 1161 int bit, int sprn, int cause) 1162 { 1163 TCGv_i32 t1 = tcg_const_i32(bit); 1164 TCGv_i32 t2 = tcg_const_i32(sprn); 1165 TCGv_i32 t3 = tcg_const_i32(cause); 1166 1167 gen_helper_msr_facility_check(cpu_env, t1, t2, t3); 1168 1169 tcg_temp_free_i32(t3); 1170 tcg_temp_free_i32(t2); 1171 tcg_temp_free_i32(t1); 1172 } 1173 1174 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn) 1175 { 1176 TCGv spr_up = tcg_temp_new(); 1177 TCGv spr = tcg_temp_new(); 1178 1179 gen_load_spr(spr, sprn - 1); 1180 tcg_gen_shri_tl(spr_up, spr, 32); 1181 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up); 1182 1183 tcg_temp_free(spr); 1184 tcg_temp_free(spr_up); 1185 } 1186 1187 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn) 1188 { 1189 TCGv spr = tcg_temp_new(); 1190 1191 gen_load_spr(spr, sprn - 1); 1192 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32); 1193 gen_store_spr(sprn - 1, spr); 1194 1195 tcg_temp_free(spr); 1196 } 1197 1198 #if !defined(CONFIG_USER_ONLY) 1199 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn) 1200 { 1201 TCGv hmer = tcg_temp_new(); 1202 1203 gen_load_spr(hmer, sprn); 1204 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer); 1205 gen_store_spr(sprn, hmer); 1206 spr_store_dump_spr(sprn); 1207 tcg_temp_free(hmer); 1208 } 1209 1210 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn) 1211 { 1212 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]); 1213 } 1214 #endif /* !defined(CONFIG_USER_ONLY) */ 1215 1216 void spr_read_tar(DisasContext *ctx, int gprn, int sprn) 1217 { 1218 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1219 spr_read_generic(ctx, gprn, sprn); 1220 } 1221 1222 void spr_write_tar(DisasContext *ctx, int sprn, int gprn) 1223 { 1224 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1225 spr_write_generic(ctx, sprn, gprn); 1226 } 1227 1228 void spr_read_tm(DisasContext *ctx, int gprn, int sprn) 1229 { 1230 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1231 spr_read_generic(ctx, gprn, sprn); 1232 } 1233 1234 void spr_write_tm(DisasContext *ctx, int sprn, int gprn) 1235 { 1236 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1237 spr_write_generic(ctx, sprn, gprn); 1238 } 1239 1240 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn) 1241 { 1242 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1243 spr_read_prev_upper32(ctx, gprn, sprn); 1244 } 1245 1246 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn) 1247 { 1248 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1249 spr_write_prev_upper32(ctx, sprn, gprn); 1250 } 1251 1252 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn) 1253 { 1254 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1255 spr_read_generic(ctx, gprn, sprn); 1256 } 1257 1258 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn) 1259 { 1260 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1261 spr_write_generic(ctx, sprn, gprn); 1262 } 1263 1264 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn) 1265 { 1266 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1267 spr_read_prev_upper32(ctx, gprn, sprn); 1268 } 1269 1270 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn) 1271 { 1272 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1273 spr_write_prev_upper32(ctx, sprn, gprn); 1274 } 1275 #endif 1276 1277 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 1278 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 1279 1280 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 1281 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 1282 1283 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 1284 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 1285 1286 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 1287 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 1288 1289 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 1290 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 1291 1292 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 1293 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 1294 1295 typedef struct opcode_t { 1296 unsigned char opc1, opc2, opc3, opc4; 1297 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 1298 unsigned char pad[4]; 1299 #endif 1300 opc_handler_t handler; 1301 const char *oname; 1302 } opcode_t; 1303 1304 /* Helpers for priv. check */ 1305 #define GEN_PRIV \ 1306 do { \ 1307 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 1308 } while (0) 1309 1310 #if defined(CONFIG_USER_ONLY) 1311 #define CHK_HV GEN_PRIV 1312 #define CHK_SV GEN_PRIV 1313 #define CHK_HVRM GEN_PRIV 1314 #else 1315 #define CHK_HV \ 1316 do { \ 1317 if (unlikely(ctx->pr || !ctx->hv)) { \ 1318 GEN_PRIV; \ 1319 } \ 1320 } while (0) 1321 #define CHK_SV \ 1322 do { \ 1323 if (unlikely(ctx->pr)) { \ 1324 GEN_PRIV; \ 1325 } \ 1326 } while (0) 1327 #define CHK_HVRM \ 1328 do { \ 1329 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 1330 GEN_PRIV; \ 1331 } \ 1332 } while (0) 1333 #endif 1334 1335 #define CHK_NONE 1336 1337 /*****************************************************************************/ 1338 /* PowerPC instructions table */ 1339 1340 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 1341 { \ 1342 .opc1 = op1, \ 1343 .opc2 = op2, \ 1344 .opc3 = op3, \ 1345 .opc4 = 0xff, \ 1346 .handler = { \ 1347 .inval1 = invl, \ 1348 .type = _typ, \ 1349 .type2 = _typ2, \ 1350 .handler = &gen_##name, \ 1351 }, \ 1352 .oname = stringify(name), \ 1353 } 1354 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 1355 { \ 1356 .opc1 = op1, \ 1357 .opc2 = op2, \ 1358 .opc3 = op3, \ 1359 .opc4 = 0xff, \ 1360 .handler = { \ 1361 .inval1 = invl1, \ 1362 .inval2 = invl2, \ 1363 .type = _typ, \ 1364 .type2 = _typ2, \ 1365 .handler = &gen_##name, \ 1366 }, \ 1367 .oname = stringify(name), \ 1368 } 1369 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 1370 { \ 1371 .opc1 = op1, \ 1372 .opc2 = op2, \ 1373 .opc3 = op3, \ 1374 .opc4 = 0xff, \ 1375 .handler = { \ 1376 .inval1 = invl, \ 1377 .type = _typ, \ 1378 .type2 = _typ2, \ 1379 .handler = &gen_##name, \ 1380 }, \ 1381 .oname = onam, \ 1382 } 1383 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 1384 { \ 1385 .opc1 = op1, \ 1386 .opc2 = op2, \ 1387 .opc3 = op3, \ 1388 .opc4 = op4, \ 1389 .handler = { \ 1390 .inval1 = invl, \ 1391 .type = _typ, \ 1392 .type2 = _typ2, \ 1393 .handler = &gen_##name, \ 1394 }, \ 1395 .oname = stringify(name), \ 1396 } 1397 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 1398 { \ 1399 .opc1 = op1, \ 1400 .opc2 = op2, \ 1401 .opc3 = op3, \ 1402 .opc4 = op4, \ 1403 .handler = { \ 1404 .inval1 = invl, \ 1405 .type = _typ, \ 1406 .type2 = _typ2, \ 1407 .handler = &gen_##name, \ 1408 }, \ 1409 .oname = onam, \ 1410 } 1411 1412 /* Invalid instruction */ 1413 static void gen_invalid(DisasContext *ctx) 1414 { 1415 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 1416 } 1417 1418 static opc_handler_t invalid_handler = { 1419 .inval1 = 0xFFFFFFFF, 1420 .inval2 = 0xFFFFFFFF, 1421 .type = PPC_NONE, 1422 .type2 = PPC_NONE, 1423 .handler = gen_invalid, 1424 }; 1425 1426 /*** Integer comparison ***/ 1427 1428 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 1429 { 1430 TCGv t0 = tcg_temp_new(); 1431 TCGv t1 = tcg_temp_new(); 1432 TCGv_i32 t = tcg_temp_new_i32(); 1433 1434 tcg_gen_movi_tl(t0, CRF_EQ); 1435 tcg_gen_movi_tl(t1, CRF_LT); 1436 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), 1437 t0, arg0, arg1, t1, t0); 1438 tcg_gen_movi_tl(t1, CRF_GT); 1439 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), 1440 t0, arg0, arg1, t1, t0); 1441 1442 tcg_gen_trunc_tl_i32(t, t0); 1443 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 1444 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 1445 1446 tcg_temp_free(t0); 1447 tcg_temp_free(t1); 1448 tcg_temp_free_i32(t); 1449 } 1450 1451 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 1452 { 1453 TCGv t0 = tcg_const_tl(arg1); 1454 gen_op_cmp(arg0, t0, s, crf); 1455 tcg_temp_free(t0); 1456 } 1457 1458 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 1459 { 1460 TCGv t0, t1; 1461 t0 = tcg_temp_new(); 1462 t1 = tcg_temp_new(); 1463 if (s) { 1464 tcg_gen_ext32s_tl(t0, arg0); 1465 tcg_gen_ext32s_tl(t1, arg1); 1466 } else { 1467 tcg_gen_ext32u_tl(t0, arg0); 1468 tcg_gen_ext32u_tl(t1, arg1); 1469 } 1470 gen_op_cmp(t0, t1, s, crf); 1471 tcg_temp_free(t1); 1472 tcg_temp_free(t0); 1473 } 1474 1475 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 1476 { 1477 TCGv t0 = tcg_const_tl(arg1); 1478 gen_op_cmp32(arg0, t0, s, crf); 1479 tcg_temp_free(t0); 1480 } 1481 1482 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 1483 { 1484 if (NARROW_MODE(ctx)) { 1485 gen_op_cmpi32(reg, 0, 1, 0); 1486 } else { 1487 gen_op_cmpi(reg, 0, 1, 0); 1488 } 1489 } 1490 1491 /* cmprb - range comparison: isupper, isaplha, islower*/ 1492 static void gen_cmprb(DisasContext *ctx) 1493 { 1494 TCGv_i32 src1 = tcg_temp_new_i32(); 1495 TCGv_i32 src2 = tcg_temp_new_i32(); 1496 TCGv_i32 src2lo = tcg_temp_new_i32(); 1497 TCGv_i32 src2hi = tcg_temp_new_i32(); 1498 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 1499 1500 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 1501 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 1502 1503 tcg_gen_andi_i32(src1, src1, 0xFF); 1504 tcg_gen_ext8u_i32(src2lo, src2); 1505 tcg_gen_shri_i32(src2, src2, 8); 1506 tcg_gen_ext8u_i32(src2hi, src2); 1507 1508 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1509 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1510 tcg_gen_and_i32(crf, src2lo, src2hi); 1511 1512 if (ctx->opcode & 0x00200000) { 1513 tcg_gen_shri_i32(src2, src2, 8); 1514 tcg_gen_ext8u_i32(src2lo, src2); 1515 tcg_gen_shri_i32(src2, src2, 8); 1516 tcg_gen_ext8u_i32(src2hi, src2); 1517 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1518 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1519 tcg_gen_and_i32(src2lo, src2lo, src2hi); 1520 tcg_gen_or_i32(crf, crf, src2lo); 1521 } 1522 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 1523 tcg_temp_free_i32(src1); 1524 tcg_temp_free_i32(src2); 1525 tcg_temp_free_i32(src2lo); 1526 tcg_temp_free_i32(src2hi); 1527 } 1528 1529 #if defined(TARGET_PPC64) 1530 /* cmpeqb */ 1531 static void gen_cmpeqb(DisasContext *ctx) 1532 { 1533 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1534 cpu_gpr[rB(ctx->opcode)]); 1535 } 1536 #endif 1537 1538 /* isel (PowerPC 2.03 specification) */ 1539 static void gen_isel(DisasContext *ctx) 1540 { 1541 uint32_t bi = rC(ctx->opcode); 1542 uint32_t mask = 0x08 >> (bi & 0x03); 1543 TCGv t0 = tcg_temp_new(); 1544 TCGv zr; 1545 1546 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 1547 tcg_gen_andi_tl(t0, t0, mask); 1548 1549 zr = tcg_const_tl(0); 1550 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 1551 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 1552 cpu_gpr[rB(ctx->opcode)]); 1553 tcg_temp_free(zr); 1554 tcg_temp_free(t0); 1555 } 1556 1557 /* cmpb: PowerPC 2.05 specification */ 1558 static void gen_cmpb(DisasContext *ctx) 1559 { 1560 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 1561 cpu_gpr[rB(ctx->opcode)]); 1562 } 1563 1564 /*** Integer arithmetic ***/ 1565 1566 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 1567 TCGv arg1, TCGv arg2, int sub) 1568 { 1569 TCGv t0 = tcg_temp_new(); 1570 1571 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 1572 tcg_gen_xor_tl(t0, arg1, arg2); 1573 if (sub) { 1574 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 1575 } else { 1576 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 1577 } 1578 tcg_temp_free(t0); 1579 if (NARROW_MODE(ctx)) { 1580 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 1581 if (is_isa300(ctx)) { 1582 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1583 } 1584 } else { 1585 if (is_isa300(ctx)) { 1586 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 1587 } 1588 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 1589 } 1590 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1591 } 1592 1593 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 1594 TCGv res, TCGv arg0, TCGv arg1, 1595 TCGv ca32, int sub) 1596 { 1597 TCGv t0; 1598 1599 if (!is_isa300(ctx)) { 1600 return; 1601 } 1602 1603 t0 = tcg_temp_new(); 1604 if (sub) { 1605 tcg_gen_eqv_tl(t0, arg0, arg1); 1606 } else { 1607 tcg_gen_xor_tl(t0, arg0, arg1); 1608 } 1609 tcg_gen_xor_tl(t0, t0, res); 1610 tcg_gen_extract_tl(ca32, t0, 32, 1); 1611 tcg_temp_free(t0); 1612 } 1613 1614 /* Common add function */ 1615 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 1616 TCGv arg2, TCGv ca, TCGv ca32, 1617 bool add_ca, bool compute_ca, 1618 bool compute_ov, bool compute_rc0) 1619 { 1620 TCGv t0 = ret; 1621 1622 if (compute_ca || compute_ov) { 1623 t0 = tcg_temp_new(); 1624 } 1625 1626 if (compute_ca) { 1627 if (NARROW_MODE(ctx)) { 1628 /* 1629 * Caution: a non-obvious corner case of the spec is that 1630 * we must produce the *entire* 64-bit addition, but 1631 * produce the carry into bit 32. 1632 */ 1633 TCGv t1 = tcg_temp_new(); 1634 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 1635 tcg_gen_add_tl(t0, arg1, arg2); 1636 if (add_ca) { 1637 tcg_gen_add_tl(t0, t0, ca); 1638 } 1639 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 1640 tcg_temp_free(t1); 1641 tcg_gen_extract_tl(ca, ca, 32, 1); 1642 if (is_isa300(ctx)) { 1643 tcg_gen_mov_tl(ca32, ca); 1644 } 1645 } else { 1646 TCGv zero = tcg_const_tl(0); 1647 if (add_ca) { 1648 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 1649 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 1650 } else { 1651 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 1652 } 1653 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 1654 tcg_temp_free(zero); 1655 } 1656 } else { 1657 tcg_gen_add_tl(t0, arg1, arg2); 1658 if (add_ca) { 1659 tcg_gen_add_tl(t0, t0, ca); 1660 } 1661 } 1662 1663 if (compute_ov) { 1664 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 1665 } 1666 if (unlikely(compute_rc0)) { 1667 gen_set_Rc0(ctx, t0); 1668 } 1669 1670 if (t0 != ret) { 1671 tcg_gen_mov_tl(ret, t0); 1672 tcg_temp_free(t0); 1673 } 1674 } 1675 /* Add functions with two operands */ 1676 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 1677 static void glue(gen_, name)(DisasContext *ctx) \ 1678 { \ 1679 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1680 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1681 ca, glue(ca, 32), \ 1682 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1683 } 1684 /* Add functions with one operand and one immediate */ 1685 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 1686 add_ca, compute_ca, compute_ov) \ 1687 static void glue(gen_, name)(DisasContext *ctx) \ 1688 { \ 1689 TCGv t0 = tcg_const_tl(const_val); \ 1690 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1691 cpu_gpr[rA(ctx->opcode)], t0, \ 1692 ca, glue(ca, 32), \ 1693 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1694 tcg_temp_free(t0); \ 1695 } 1696 1697 /* add add. addo addo. */ 1698 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 1699 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 1700 /* addc addc. addco addco. */ 1701 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 1702 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 1703 /* adde adde. addeo addeo. */ 1704 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 1705 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 1706 /* addme addme. addmeo addmeo. */ 1707 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 1708 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 1709 /* addex */ 1710 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 1711 /* addze addze. addzeo addzeo.*/ 1712 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 1713 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 1714 /* addic addic.*/ 1715 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 1716 { 1717 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1718 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1719 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 1720 tcg_temp_free(c); 1721 } 1722 1723 static void gen_addic(DisasContext *ctx) 1724 { 1725 gen_op_addic(ctx, 0); 1726 } 1727 1728 static void gen_addic_(DisasContext *ctx) 1729 { 1730 gen_op_addic(ctx, 1); 1731 } 1732 1733 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1734 TCGv arg2, int sign, int compute_ov) 1735 { 1736 TCGv_i32 t0 = tcg_temp_new_i32(); 1737 TCGv_i32 t1 = tcg_temp_new_i32(); 1738 TCGv_i32 t2 = tcg_temp_new_i32(); 1739 TCGv_i32 t3 = tcg_temp_new_i32(); 1740 1741 tcg_gen_trunc_tl_i32(t0, arg1); 1742 tcg_gen_trunc_tl_i32(t1, arg2); 1743 if (sign) { 1744 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1745 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1746 tcg_gen_and_i32(t2, t2, t3); 1747 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1748 tcg_gen_or_i32(t2, t2, t3); 1749 tcg_gen_movi_i32(t3, 0); 1750 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1751 tcg_gen_div_i32(t3, t0, t1); 1752 tcg_gen_extu_i32_tl(ret, t3); 1753 } else { 1754 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1755 tcg_gen_movi_i32(t3, 0); 1756 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1757 tcg_gen_divu_i32(t3, t0, t1); 1758 tcg_gen_extu_i32_tl(ret, t3); 1759 } 1760 if (compute_ov) { 1761 tcg_gen_extu_i32_tl(cpu_ov, t2); 1762 if (is_isa300(ctx)) { 1763 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1764 } 1765 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1766 } 1767 tcg_temp_free_i32(t0); 1768 tcg_temp_free_i32(t1); 1769 tcg_temp_free_i32(t2); 1770 tcg_temp_free_i32(t3); 1771 1772 if (unlikely(Rc(ctx->opcode) != 0)) { 1773 gen_set_Rc0(ctx, ret); 1774 } 1775 } 1776 /* Div functions */ 1777 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1778 static void glue(gen_, name)(DisasContext *ctx) \ 1779 { \ 1780 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1781 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1782 sign, compute_ov); \ 1783 } 1784 /* divwu divwu. divwuo divwuo. */ 1785 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1786 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1787 /* divw divw. divwo divwo. */ 1788 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1789 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1790 1791 /* div[wd]eu[o][.] */ 1792 #define GEN_DIVE(name, hlpr, compute_ov) \ 1793 static void gen_##name(DisasContext *ctx) \ 1794 { \ 1795 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1796 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1797 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1798 tcg_temp_free_i32(t0); \ 1799 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1800 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1801 } \ 1802 } 1803 1804 GEN_DIVE(divweu, divweu, 0); 1805 GEN_DIVE(divweuo, divweu, 1); 1806 GEN_DIVE(divwe, divwe, 0); 1807 GEN_DIVE(divweo, divwe, 1); 1808 1809 #if defined(TARGET_PPC64) 1810 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1811 TCGv arg2, int sign, int compute_ov) 1812 { 1813 TCGv_i64 t0 = tcg_temp_new_i64(); 1814 TCGv_i64 t1 = tcg_temp_new_i64(); 1815 TCGv_i64 t2 = tcg_temp_new_i64(); 1816 TCGv_i64 t3 = tcg_temp_new_i64(); 1817 1818 tcg_gen_mov_i64(t0, arg1); 1819 tcg_gen_mov_i64(t1, arg2); 1820 if (sign) { 1821 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1822 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1823 tcg_gen_and_i64(t2, t2, t3); 1824 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1825 tcg_gen_or_i64(t2, t2, t3); 1826 tcg_gen_movi_i64(t3, 0); 1827 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1828 tcg_gen_div_i64(ret, t0, t1); 1829 } else { 1830 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1831 tcg_gen_movi_i64(t3, 0); 1832 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1833 tcg_gen_divu_i64(ret, t0, t1); 1834 } 1835 if (compute_ov) { 1836 tcg_gen_mov_tl(cpu_ov, t2); 1837 if (is_isa300(ctx)) { 1838 tcg_gen_mov_tl(cpu_ov32, t2); 1839 } 1840 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1841 } 1842 tcg_temp_free_i64(t0); 1843 tcg_temp_free_i64(t1); 1844 tcg_temp_free_i64(t2); 1845 tcg_temp_free_i64(t3); 1846 1847 if (unlikely(Rc(ctx->opcode) != 0)) { 1848 gen_set_Rc0(ctx, ret); 1849 } 1850 } 1851 1852 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1853 static void glue(gen_, name)(DisasContext *ctx) \ 1854 { \ 1855 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1856 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1857 sign, compute_ov); \ 1858 } 1859 /* divdu divdu. divduo divduo. */ 1860 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1861 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1862 /* divd divd. divdo divdo. */ 1863 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1864 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1865 1866 GEN_DIVE(divdeu, divdeu, 0); 1867 GEN_DIVE(divdeuo, divdeu, 1); 1868 GEN_DIVE(divde, divde, 0); 1869 GEN_DIVE(divdeo, divde, 1); 1870 #endif 1871 1872 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1873 TCGv arg2, int sign) 1874 { 1875 TCGv_i32 t0 = tcg_temp_new_i32(); 1876 TCGv_i32 t1 = tcg_temp_new_i32(); 1877 1878 tcg_gen_trunc_tl_i32(t0, arg1); 1879 tcg_gen_trunc_tl_i32(t1, arg2); 1880 if (sign) { 1881 TCGv_i32 t2 = tcg_temp_new_i32(); 1882 TCGv_i32 t3 = tcg_temp_new_i32(); 1883 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1884 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1885 tcg_gen_and_i32(t2, t2, t3); 1886 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1887 tcg_gen_or_i32(t2, t2, t3); 1888 tcg_gen_movi_i32(t3, 0); 1889 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1890 tcg_gen_rem_i32(t3, t0, t1); 1891 tcg_gen_ext_i32_tl(ret, t3); 1892 tcg_temp_free_i32(t2); 1893 tcg_temp_free_i32(t3); 1894 } else { 1895 TCGv_i32 t2 = tcg_const_i32(1); 1896 TCGv_i32 t3 = tcg_const_i32(0); 1897 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1898 tcg_gen_remu_i32(t3, t0, t1); 1899 tcg_gen_extu_i32_tl(ret, t3); 1900 tcg_temp_free_i32(t2); 1901 tcg_temp_free_i32(t3); 1902 } 1903 tcg_temp_free_i32(t0); 1904 tcg_temp_free_i32(t1); 1905 } 1906 1907 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1908 static void glue(gen_, name)(DisasContext *ctx) \ 1909 { \ 1910 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1911 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1912 sign); \ 1913 } 1914 1915 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1916 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1917 1918 #if defined(TARGET_PPC64) 1919 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1920 TCGv arg2, int sign) 1921 { 1922 TCGv_i64 t0 = tcg_temp_new_i64(); 1923 TCGv_i64 t1 = tcg_temp_new_i64(); 1924 1925 tcg_gen_mov_i64(t0, arg1); 1926 tcg_gen_mov_i64(t1, arg2); 1927 if (sign) { 1928 TCGv_i64 t2 = tcg_temp_new_i64(); 1929 TCGv_i64 t3 = tcg_temp_new_i64(); 1930 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1931 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1932 tcg_gen_and_i64(t2, t2, t3); 1933 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1934 tcg_gen_or_i64(t2, t2, t3); 1935 tcg_gen_movi_i64(t3, 0); 1936 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1937 tcg_gen_rem_i64(ret, t0, t1); 1938 tcg_temp_free_i64(t2); 1939 tcg_temp_free_i64(t3); 1940 } else { 1941 TCGv_i64 t2 = tcg_const_i64(1); 1942 TCGv_i64 t3 = tcg_const_i64(0); 1943 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1944 tcg_gen_remu_i64(ret, t0, t1); 1945 tcg_temp_free_i64(t2); 1946 tcg_temp_free_i64(t3); 1947 } 1948 tcg_temp_free_i64(t0); 1949 tcg_temp_free_i64(t1); 1950 } 1951 1952 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1953 static void glue(gen_, name)(DisasContext *ctx) \ 1954 { \ 1955 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1956 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1957 sign); \ 1958 } 1959 1960 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1961 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1962 #endif 1963 1964 /* mulhw mulhw. */ 1965 static void gen_mulhw(DisasContext *ctx) 1966 { 1967 TCGv_i32 t0 = tcg_temp_new_i32(); 1968 TCGv_i32 t1 = tcg_temp_new_i32(); 1969 1970 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1971 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1972 tcg_gen_muls2_i32(t0, t1, t0, t1); 1973 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1974 tcg_temp_free_i32(t0); 1975 tcg_temp_free_i32(t1); 1976 if (unlikely(Rc(ctx->opcode) != 0)) { 1977 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1978 } 1979 } 1980 1981 /* mulhwu mulhwu. */ 1982 static void gen_mulhwu(DisasContext *ctx) 1983 { 1984 TCGv_i32 t0 = tcg_temp_new_i32(); 1985 TCGv_i32 t1 = tcg_temp_new_i32(); 1986 1987 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1988 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1989 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1990 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1991 tcg_temp_free_i32(t0); 1992 tcg_temp_free_i32(t1); 1993 if (unlikely(Rc(ctx->opcode) != 0)) { 1994 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1995 } 1996 } 1997 1998 /* mullw mullw. */ 1999 static void gen_mullw(DisasContext *ctx) 2000 { 2001 #if defined(TARGET_PPC64) 2002 TCGv_i64 t0, t1; 2003 t0 = tcg_temp_new_i64(); 2004 t1 = tcg_temp_new_i64(); 2005 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 2006 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 2007 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2008 tcg_temp_free(t0); 2009 tcg_temp_free(t1); 2010 #else 2011 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2012 cpu_gpr[rB(ctx->opcode)]); 2013 #endif 2014 if (unlikely(Rc(ctx->opcode) != 0)) { 2015 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2016 } 2017 } 2018 2019 /* mullwo mullwo. */ 2020 static void gen_mullwo(DisasContext *ctx) 2021 { 2022 TCGv_i32 t0 = tcg_temp_new_i32(); 2023 TCGv_i32 t1 = tcg_temp_new_i32(); 2024 2025 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2026 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2027 tcg_gen_muls2_i32(t0, t1, t0, t1); 2028 #if defined(TARGET_PPC64) 2029 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2030 #else 2031 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 2032 #endif 2033 2034 tcg_gen_sari_i32(t0, t0, 31); 2035 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 2036 tcg_gen_extu_i32_tl(cpu_ov, t0); 2037 if (is_isa300(ctx)) { 2038 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2039 } 2040 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2041 2042 tcg_temp_free_i32(t0); 2043 tcg_temp_free_i32(t1); 2044 if (unlikely(Rc(ctx->opcode) != 0)) { 2045 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2046 } 2047 } 2048 2049 /* mulli */ 2050 static void gen_mulli(DisasContext *ctx) 2051 { 2052 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2053 SIMM(ctx->opcode)); 2054 } 2055 2056 #if defined(TARGET_PPC64) 2057 /* mulhd mulhd. */ 2058 static void gen_mulhd(DisasContext *ctx) 2059 { 2060 TCGv lo = tcg_temp_new(); 2061 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2062 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2063 tcg_temp_free(lo); 2064 if (unlikely(Rc(ctx->opcode) != 0)) { 2065 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2066 } 2067 } 2068 2069 /* mulhdu mulhdu. */ 2070 static void gen_mulhdu(DisasContext *ctx) 2071 { 2072 TCGv lo = tcg_temp_new(); 2073 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2074 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2075 tcg_temp_free(lo); 2076 if (unlikely(Rc(ctx->opcode) != 0)) { 2077 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2078 } 2079 } 2080 2081 /* mulld mulld. */ 2082 static void gen_mulld(DisasContext *ctx) 2083 { 2084 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2085 cpu_gpr[rB(ctx->opcode)]); 2086 if (unlikely(Rc(ctx->opcode) != 0)) { 2087 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2088 } 2089 } 2090 2091 /* mulldo mulldo. */ 2092 static void gen_mulldo(DisasContext *ctx) 2093 { 2094 TCGv_i64 t0 = tcg_temp_new_i64(); 2095 TCGv_i64 t1 = tcg_temp_new_i64(); 2096 2097 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 2098 cpu_gpr[rB(ctx->opcode)]); 2099 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 2100 2101 tcg_gen_sari_i64(t0, t0, 63); 2102 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 2103 if (is_isa300(ctx)) { 2104 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2105 } 2106 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2107 2108 tcg_temp_free_i64(t0); 2109 tcg_temp_free_i64(t1); 2110 2111 if (unlikely(Rc(ctx->opcode) != 0)) { 2112 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2113 } 2114 } 2115 #endif 2116 2117 /* Common subf function */ 2118 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 2119 TCGv arg2, bool add_ca, bool compute_ca, 2120 bool compute_ov, bool compute_rc0) 2121 { 2122 TCGv t0 = ret; 2123 2124 if (compute_ca || compute_ov) { 2125 t0 = tcg_temp_new(); 2126 } 2127 2128 if (compute_ca) { 2129 /* dest = ~arg1 + arg2 [+ ca]. */ 2130 if (NARROW_MODE(ctx)) { 2131 /* 2132 * Caution: a non-obvious corner case of the spec is that 2133 * we must produce the *entire* 64-bit addition, but 2134 * produce the carry into bit 32. 2135 */ 2136 TCGv inv1 = tcg_temp_new(); 2137 TCGv t1 = tcg_temp_new(); 2138 tcg_gen_not_tl(inv1, arg1); 2139 if (add_ca) { 2140 tcg_gen_add_tl(t0, arg2, cpu_ca); 2141 } else { 2142 tcg_gen_addi_tl(t0, arg2, 1); 2143 } 2144 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 2145 tcg_gen_add_tl(t0, t0, inv1); 2146 tcg_temp_free(inv1); 2147 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 2148 tcg_temp_free(t1); 2149 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 2150 if (is_isa300(ctx)) { 2151 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2152 } 2153 } else if (add_ca) { 2154 TCGv zero, inv1 = tcg_temp_new(); 2155 tcg_gen_not_tl(inv1, arg1); 2156 zero = tcg_const_tl(0); 2157 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 2158 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 2159 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 2160 tcg_temp_free(zero); 2161 tcg_temp_free(inv1); 2162 } else { 2163 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 2164 tcg_gen_sub_tl(t0, arg2, arg1); 2165 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 2166 } 2167 } else if (add_ca) { 2168 /* 2169 * Since we're ignoring carry-out, we can simplify the 2170 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. 2171 */ 2172 tcg_gen_sub_tl(t0, arg2, arg1); 2173 tcg_gen_add_tl(t0, t0, cpu_ca); 2174 tcg_gen_subi_tl(t0, t0, 1); 2175 } else { 2176 tcg_gen_sub_tl(t0, arg2, arg1); 2177 } 2178 2179 if (compute_ov) { 2180 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 2181 } 2182 if (unlikely(compute_rc0)) { 2183 gen_set_Rc0(ctx, t0); 2184 } 2185 2186 if (t0 != ret) { 2187 tcg_gen_mov_tl(ret, t0); 2188 tcg_temp_free(t0); 2189 } 2190 } 2191 /* Sub functions with Two operands functions */ 2192 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 2193 static void glue(gen_, name)(DisasContext *ctx) \ 2194 { \ 2195 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2196 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 2197 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2198 } 2199 /* Sub functions with one operand and one immediate */ 2200 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 2201 add_ca, compute_ca, compute_ov) \ 2202 static void glue(gen_, name)(DisasContext *ctx) \ 2203 { \ 2204 TCGv t0 = tcg_const_tl(const_val); \ 2205 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2206 cpu_gpr[rA(ctx->opcode)], t0, \ 2207 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2208 tcg_temp_free(t0); \ 2209 } 2210 /* subf subf. subfo subfo. */ 2211 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 2212 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 2213 /* subfc subfc. subfco subfco. */ 2214 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 2215 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 2216 /* subfe subfe. subfeo subfo. */ 2217 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 2218 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 2219 /* subfme subfme. subfmeo subfmeo. */ 2220 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 2221 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 2222 /* subfze subfze. subfzeo subfzeo.*/ 2223 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 2224 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 2225 2226 /* subfic */ 2227 static void gen_subfic(DisasContext *ctx) 2228 { 2229 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 2230 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2231 c, 0, 1, 0, 0); 2232 tcg_temp_free(c); 2233 } 2234 2235 /* neg neg. nego nego. */ 2236 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 2237 { 2238 TCGv zero = tcg_const_tl(0); 2239 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2240 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 2241 tcg_temp_free(zero); 2242 } 2243 2244 static void gen_neg(DisasContext *ctx) 2245 { 2246 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2247 if (unlikely(Rc(ctx->opcode))) { 2248 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2249 } 2250 } 2251 2252 static void gen_nego(DisasContext *ctx) 2253 { 2254 gen_op_arith_neg(ctx, 1); 2255 } 2256 2257 /*** Integer logical ***/ 2258 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 2259 static void glue(gen_, name)(DisasContext *ctx) \ 2260 { \ 2261 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 2262 cpu_gpr[rB(ctx->opcode)]); \ 2263 if (unlikely(Rc(ctx->opcode) != 0)) \ 2264 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2265 } 2266 2267 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 2268 static void glue(gen_, name)(DisasContext *ctx) \ 2269 { \ 2270 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 2271 if (unlikely(Rc(ctx->opcode) != 0)) \ 2272 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2273 } 2274 2275 /* and & and. */ 2276 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 2277 /* andc & andc. */ 2278 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 2279 2280 /* andi. */ 2281 static void gen_andi_(DisasContext *ctx) 2282 { 2283 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2284 UIMM(ctx->opcode)); 2285 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2286 } 2287 2288 /* andis. */ 2289 static void gen_andis_(DisasContext *ctx) 2290 { 2291 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2292 UIMM(ctx->opcode) << 16); 2293 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2294 } 2295 2296 /* cntlzw */ 2297 static void gen_cntlzw(DisasContext *ctx) 2298 { 2299 TCGv_i32 t = tcg_temp_new_i32(); 2300 2301 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2302 tcg_gen_clzi_i32(t, t, 32); 2303 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2304 tcg_temp_free_i32(t); 2305 2306 if (unlikely(Rc(ctx->opcode) != 0)) { 2307 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2308 } 2309 } 2310 2311 /* cnttzw */ 2312 static void gen_cnttzw(DisasContext *ctx) 2313 { 2314 TCGv_i32 t = tcg_temp_new_i32(); 2315 2316 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2317 tcg_gen_ctzi_i32(t, t, 32); 2318 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2319 tcg_temp_free_i32(t); 2320 2321 if (unlikely(Rc(ctx->opcode) != 0)) { 2322 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2323 } 2324 } 2325 2326 /* eqv & eqv. */ 2327 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 2328 /* extsb & extsb. */ 2329 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 2330 /* extsh & extsh. */ 2331 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 2332 /* nand & nand. */ 2333 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 2334 /* nor & nor. */ 2335 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 2336 2337 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 2338 static void gen_pause(DisasContext *ctx) 2339 { 2340 TCGv_i32 t0 = tcg_const_i32(0); 2341 tcg_gen_st_i32(t0, cpu_env, 2342 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 2343 tcg_temp_free_i32(t0); 2344 2345 /* Stop translation, this gives other CPUs a chance to run */ 2346 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 2347 } 2348 #endif /* defined(TARGET_PPC64) */ 2349 2350 /* or & or. */ 2351 static void gen_or(DisasContext *ctx) 2352 { 2353 int rs, ra, rb; 2354 2355 rs = rS(ctx->opcode); 2356 ra = rA(ctx->opcode); 2357 rb = rB(ctx->opcode); 2358 /* Optimisation for mr. ri case */ 2359 if (rs != ra || rs != rb) { 2360 if (rs != rb) { 2361 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 2362 } else { 2363 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 2364 } 2365 if (unlikely(Rc(ctx->opcode) != 0)) { 2366 gen_set_Rc0(ctx, cpu_gpr[ra]); 2367 } 2368 } else if (unlikely(Rc(ctx->opcode) != 0)) { 2369 gen_set_Rc0(ctx, cpu_gpr[rs]); 2370 #if defined(TARGET_PPC64) 2371 } else if (rs != 0) { /* 0 is nop */ 2372 int prio = 0; 2373 2374 switch (rs) { 2375 case 1: 2376 /* Set process priority to low */ 2377 prio = 2; 2378 break; 2379 case 6: 2380 /* Set process priority to medium-low */ 2381 prio = 3; 2382 break; 2383 case 2: 2384 /* Set process priority to normal */ 2385 prio = 4; 2386 break; 2387 #if !defined(CONFIG_USER_ONLY) 2388 case 31: 2389 if (!ctx->pr) { 2390 /* Set process priority to very low */ 2391 prio = 1; 2392 } 2393 break; 2394 case 5: 2395 if (!ctx->pr) { 2396 /* Set process priority to medium-hight */ 2397 prio = 5; 2398 } 2399 break; 2400 case 3: 2401 if (!ctx->pr) { 2402 /* Set process priority to high */ 2403 prio = 6; 2404 } 2405 break; 2406 case 7: 2407 if (ctx->hv && !ctx->pr) { 2408 /* Set process priority to very high */ 2409 prio = 7; 2410 } 2411 break; 2412 #endif 2413 default: 2414 break; 2415 } 2416 if (prio) { 2417 TCGv t0 = tcg_temp_new(); 2418 gen_load_spr(t0, SPR_PPR); 2419 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 2420 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 2421 gen_store_spr(SPR_PPR, t0); 2422 tcg_temp_free(t0); 2423 } 2424 #if !defined(CONFIG_USER_ONLY) 2425 /* 2426 * Pause out of TCG otherwise spin loops with smt_low eat too 2427 * much CPU and the kernel hangs. This applies to all 2428 * encodings other than no-op, e.g., miso(rs=26), yield(27), 2429 * mdoio(29), mdoom(30), and all currently undefined. 2430 */ 2431 gen_pause(ctx); 2432 #endif 2433 #endif 2434 } 2435 } 2436 /* orc & orc. */ 2437 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 2438 2439 /* xor & xor. */ 2440 static void gen_xor(DisasContext *ctx) 2441 { 2442 /* Optimisation for "set to zero" case */ 2443 if (rS(ctx->opcode) != rB(ctx->opcode)) { 2444 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2445 cpu_gpr[rB(ctx->opcode)]); 2446 } else { 2447 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 2448 } 2449 if (unlikely(Rc(ctx->opcode) != 0)) { 2450 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2451 } 2452 } 2453 2454 /* ori */ 2455 static void gen_ori(DisasContext *ctx) 2456 { 2457 target_ulong uimm = UIMM(ctx->opcode); 2458 2459 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2460 return; 2461 } 2462 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2463 } 2464 2465 /* oris */ 2466 static void gen_oris(DisasContext *ctx) 2467 { 2468 target_ulong uimm = UIMM(ctx->opcode); 2469 2470 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2471 /* NOP */ 2472 return; 2473 } 2474 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2475 uimm << 16); 2476 } 2477 2478 /* xori */ 2479 static void gen_xori(DisasContext *ctx) 2480 { 2481 target_ulong uimm = UIMM(ctx->opcode); 2482 2483 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2484 /* NOP */ 2485 return; 2486 } 2487 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2488 } 2489 2490 /* xoris */ 2491 static void gen_xoris(DisasContext *ctx) 2492 { 2493 target_ulong uimm = UIMM(ctx->opcode); 2494 2495 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2496 /* NOP */ 2497 return; 2498 } 2499 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2500 uimm << 16); 2501 } 2502 2503 /* popcntb : PowerPC 2.03 specification */ 2504 static void gen_popcntb(DisasContext *ctx) 2505 { 2506 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2507 } 2508 2509 static void gen_popcntw(DisasContext *ctx) 2510 { 2511 #if defined(TARGET_PPC64) 2512 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2513 #else 2514 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2515 #endif 2516 } 2517 2518 #if defined(TARGET_PPC64) 2519 /* popcntd: PowerPC 2.06 specification */ 2520 static void gen_popcntd(DisasContext *ctx) 2521 { 2522 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2523 } 2524 #endif 2525 2526 /* prtyw: PowerPC 2.05 specification */ 2527 static void gen_prtyw(DisasContext *ctx) 2528 { 2529 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2530 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2531 TCGv t0 = tcg_temp_new(); 2532 tcg_gen_shri_tl(t0, rs, 16); 2533 tcg_gen_xor_tl(ra, rs, t0); 2534 tcg_gen_shri_tl(t0, ra, 8); 2535 tcg_gen_xor_tl(ra, ra, t0); 2536 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 2537 tcg_temp_free(t0); 2538 } 2539 2540 #if defined(TARGET_PPC64) 2541 /* prtyd: PowerPC 2.05 specification */ 2542 static void gen_prtyd(DisasContext *ctx) 2543 { 2544 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2545 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2546 TCGv t0 = tcg_temp_new(); 2547 tcg_gen_shri_tl(t0, rs, 32); 2548 tcg_gen_xor_tl(ra, rs, t0); 2549 tcg_gen_shri_tl(t0, ra, 16); 2550 tcg_gen_xor_tl(ra, ra, t0); 2551 tcg_gen_shri_tl(t0, ra, 8); 2552 tcg_gen_xor_tl(ra, ra, t0); 2553 tcg_gen_andi_tl(ra, ra, 1); 2554 tcg_temp_free(t0); 2555 } 2556 #endif 2557 2558 #if defined(TARGET_PPC64) 2559 /* bpermd */ 2560 static void gen_bpermd(DisasContext *ctx) 2561 { 2562 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 2563 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2564 } 2565 #endif 2566 2567 #if defined(TARGET_PPC64) 2568 /* extsw & extsw. */ 2569 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 2570 2571 /* cntlzd */ 2572 static void gen_cntlzd(DisasContext *ctx) 2573 { 2574 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2575 if (unlikely(Rc(ctx->opcode) != 0)) { 2576 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2577 } 2578 } 2579 2580 /* cnttzd */ 2581 static void gen_cnttzd(DisasContext *ctx) 2582 { 2583 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2584 if (unlikely(Rc(ctx->opcode) != 0)) { 2585 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2586 } 2587 } 2588 2589 /* darn */ 2590 static void gen_darn(DisasContext *ctx) 2591 { 2592 int l = L(ctx->opcode); 2593 2594 if (l > 2) { 2595 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 2596 } else { 2597 gen_icount_io_start(ctx); 2598 if (l == 0) { 2599 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 2600 } else { 2601 /* Return 64-bit random for both CRN and RRN */ 2602 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 2603 } 2604 } 2605 } 2606 #endif 2607 2608 /*** Integer rotate ***/ 2609 2610 /* rlwimi & rlwimi. */ 2611 static void gen_rlwimi(DisasContext *ctx) 2612 { 2613 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2614 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2615 uint32_t sh = SH(ctx->opcode); 2616 uint32_t mb = MB(ctx->opcode); 2617 uint32_t me = ME(ctx->opcode); 2618 2619 if (sh == (31 - me) && mb <= me) { 2620 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2621 } else { 2622 target_ulong mask; 2623 bool mask_in_32b = true; 2624 TCGv t1; 2625 2626 #if defined(TARGET_PPC64) 2627 mb += 32; 2628 me += 32; 2629 #endif 2630 mask = MASK(mb, me); 2631 2632 #if defined(TARGET_PPC64) 2633 if (mask > 0xffffffffu) { 2634 mask_in_32b = false; 2635 } 2636 #endif 2637 t1 = tcg_temp_new(); 2638 if (mask_in_32b) { 2639 TCGv_i32 t0 = tcg_temp_new_i32(); 2640 tcg_gen_trunc_tl_i32(t0, t_rs); 2641 tcg_gen_rotli_i32(t0, t0, sh); 2642 tcg_gen_extu_i32_tl(t1, t0); 2643 tcg_temp_free_i32(t0); 2644 } else { 2645 #if defined(TARGET_PPC64) 2646 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 2647 tcg_gen_rotli_i64(t1, t1, sh); 2648 #else 2649 g_assert_not_reached(); 2650 #endif 2651 } 2652 2653 tcg_gen_andi_tl(t1, t1, mask); 2654 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2655 tcg_gen_or_tl(t_ra, t_ra, t1); 2656 tcg_temp_free(t1); 2657 } 2658 if (unlikely(Rc(ctx->opcode) != 0)) { 2659 gen_set_Rc0(ctx, t_ra); 2660 } 2661 } 2662 2663 /* rlwinm & rlwinm. */ 2664 static void gen_rlwinm(DisasContext *ctx) 2665 { 2666 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2667 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2668 int sh = SH(ctx->opcode); 2669 int mb = MB(ctx->opcode); 2670 int me = ME(ctx->opcode); 2671 int len = me - mb + 1; 2672 int rsh = (32 - sh) & 31; 2673 2674 if (sh != 0 && len > 0 && me == (31 - sh)) { 2675 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2676 } else if (me == 31 && rsh + len <= 32) { 2677 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2678 } else { 2679 target_ulong mask; 2680 bool mask_in_32b = true; 2681 #if defined(TARGET_PPC64) 2682 mb += 32; 2683 me += 32; 2684 #endif 2685 mask = MASK(mb, me); 2686 #if defined(TARGET_PPC64) 2687 if (mask > 0xffffffffu) { 2688 mask_in_32b = false; 2689 } 2690 #endif 2691 if (mask_in_32b) { 2692 if (sh == 0) { 2693 tcg_gen_andi_tl(t_ra, t_rs, mask); 2694 } else { 2695 TCGv_i32 t0 = tcg_temp_new_i32(); 2696 tcg_gen_trunc_tl_i32(t0, t_rs); 2697 tcg_gen_rotli_i32(t0, t0, sh); 2698 tcg_gen_andi_i32(t0, t0, mask); 2699 tcg_gen_extu_i32_tl(t_ra, t0); 2700 tcg_temp_free_i32(t0); 2701 } 2702 } else { 2703 #if defined(TARGET_PPC64) 2704 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2705 tcg_gen_rotli_i64(t_ra, t_ra, sh); 2706 tcg_gen_andi_i64(t_ra, t_ra, mask); 2707 #else 2708 g_assert_not_reached(); 2709 #endif 2710 } 2711 } 2712 if (unlikely(Rc(ctx->opcode) != 0)) { 2713 gen_set_Rc0(ctx, t_ra); 2714 } 2715 } 2716 2717 /* rlwnm & rlwnm. */ 2718 static void gen_rlwnm(DisasContext *ctx) 2719 { 2720 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2721 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2722 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2723 uint32_t mb = MB(ctx->opcode); 2724 uint32_t me = ME(ctx->opcode); 2725 target_ulong mask; 2726 bool mask_in_32b = true; 2727 2728 #if defined(TARGET_PPC64) 2729 mb += 32; 2730 me += 32; 2731 #endif 2732 mask = MASK(mb, me); 2733 2734 #if defined(TARGET_PPC64) 2735 if (mask > 0xffffffffu) { 2736 mask_in_32b = false; 2737 } 2738 #endif 2739 if (mask_in_32b) { 2740 TCGv_i32 t0 = tcg_temp_new_i32(); 2741 TCGv_i32 t1 = tcg_temp_new_i32(); 2742 tcg_gen_trunc_tl_i32(t0, t_rb); 2743 tcg_gen_trunc_tl_i32(t1, t_rs); 2744 tcg_gen_andi_i32(t0, t0, 0x1f); 2745 tcg_gen_rotl_i32(t1, t1, t0); 2746 tcg_gen_extu_i32_tl(t_ra, t1); 2747 tcg_temp_free_i32(t0); 2748 tcg_temp_free_i32(t1); 2749 } else { 2750 #if defined(TARGET_PPC64) 2751 TCGv_i64 t0 = tcg_temp_new_i64(); 2752 tcg_gen_andi_i64(t0, t_rb, 0x1f); 2753 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2754 tcg_gen_rotl_i64(t_ra, t_ra, t0); 2755 tcg_temp_free_i64(t0); 2756 #else 2757 g_assert_not_reached(); 2758 #endif 2759 } 2760 2761 tcg_gen_andi_tl(t_ra, t_ra, mask); 2762 2763 if (unlikely(Rc(ctx->opcode) != 0)) { 2764 gen_set_Rc0(ctx, t_ra); 2765 } 2766 } 2767 2768 #if defined(TARGET_PPC64) 2769 #define GEN_PPC64_R2(name, opc1, opc2) \ 2770 static void glue(gen_, name##0)(DisasContext *ctx) \ 2771 { \ 2772 gen_##name(ctx, 0); \ 2773 } \ 2774 \ 2775 static void glue(gen_, name##1)(DisasContext *ctx) \ 2776 { \ 2777 gen_##name(ctx, 1); \ 2778 } 2779 #define GEN_PPC64_R4(name, opc1, opc2) \ 2780 static void glue(gen_, name##0)(DisasContext *ctx) \ 2781 { \ 2782 gen_##name(ctx, 0, 0); \ 2783 } \ 2784 \ 2785 static void glue(gen_, name##1)(DisasContext *ctx) \ 2786 { \ 2787 gen_##name(ctx, 0, 1); \ 2788 } \ 2789 \ 2790 static void glue(gen_, name##2)(DisasContext *ctx) \ 2791 { \ 2792 gen_##name(ctx, 1, 0); \ 2793 } \ 2794 \ 2795 static void glue(gen_, name##3)(DisasContext *ctx) \ 2796 { \ 2797 gen_##name(ctx, 1, 1); \ 2798 } 2799 2800 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2801 { 2802 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2803 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2804 int len = me - mb + 1; 2805 int rsh = (64 - sh) & 63; 2806 2807 if (sh != 0 && len > 0 && me == (63 - sh)) { 2808 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2809 } else if (me == 63 && rsh + len <= 64) { 2810 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2811 } else { 2812 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2813 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2814 } 2815 if (unlikely(Rc(ctx->opcode) != 0)) { 2816 gen_set_Rc0(ctx, t_ra); 2817 } 2818 } 2819 2820 /* rldicl - rldicl. */ 2821 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2822 { 2823 uint32_t sh, mb; 2824 2825 sh = SH(ctx->opcode) | (shn << 5); 2826 mb = MB(ctx->opcode) | (mbn << 5); 2827 gen_rldinm(ctx, mb, 63, sh); 2828 } 2829 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2830 2831 /* rldicr - rldicr. */ 2832 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2833 { 2834 uint32_t sh, me; 2835 2836 sh = SH(ctx->opcode) | (shn << 5); 2837 me = MB(ctx->opcode) | (men << 5); 2838 gen_rldinm(ctx, 0, me, sh); 2839 } 2840 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2841 2842 /* rldic - rldic. */ 2843 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2844 { 2845 uint32_t sh, mb; 2846 2847 sh = SH(ctx->opcode) | (shn << 5); 2848 mb = MB(ctx->opcode) | (mbn << 5); 2849 gen_rldinm(ctx, mb, 63 - sh, sh); 2850 } 2851 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2852 2853 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2854 { 2855 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2856 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2857 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2858 TCGv t0; 2859 2860 t0 = tcg_temp_new(); 2861 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2862 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2863 tcg_temp_free(t0); 2864 2865 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2866 if (unlikely(Rc(ctx->opcode) != 0)) { 2867 gen_set_Rc0(ctx, t_ra); 2868 } 2869 } 2870 2871 /* rldcl - rldcl. */ 2872 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2873 { 2874 uint32_t mb; 2875 2876 mb = MB(ctx->opcode) | (mbn << 5); 2877 gen_rldnm(ctx, mb, 63); 2878 } 2879 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2880 2881 /* rldcr - rldcr. */ 2882 static inline void gen_rldcr(DisasContext *ctx, int men) 2883 { 2884 uint32_t me; 2885 2886 me = MB(ctx->opcode) | (men << 5); 2887 gen_rldnm(ctx, 0, me); 2888 } 2889 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2890 2891 /* rldimi - rldimi. */ 2892 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2893 { 2894 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2895 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2896 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2897 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2898 uint32_t me = 63 - sh; 2899 2900 if (mb <= me) { 2901 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2902 } else { 2903 target_ulong mask = MASK(mb, me); 2904 TCGv t1 = tcg_temp_new(); 2905 2906 tcg_gen_rotli_tl(t1, t_rs, sh); 2907 tcg_gen_andi_tl(t1, t1, mask); 2908 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2909 tcg_gen_or_tl(t_ra, t_ra, t1); 2910 tcg_temp_free(t1); 2911 } 2912 if (unlikely(Rc(ctx->opcode) != 0)) { 2913 gen_set_Rc0(ctx, t_ra); 2914 } 2915 } 2916 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2917 #endif 2918 2919 /*** Integer shift ***/ 2920 2921 /* slw & slw. */ 2922 static void gen_slw(DisasContext *ctx) 2923 { 2924 TCGv t0, t1; 2925 2926 t0 = tcg_temp_new(); 2927 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2928 #if defined(TARGET_PPC64) 2929 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2930 tcg_gen_sari_tl(t0, t0, 0x3f); 2931 #else 2932 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2933 tcg_gen_sari_tl(t0, t0, 0x1f); 2934 #endif 2935 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2936 t1 = tcg_temp_new(); 2937 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2938 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2939 tcg_temp_free(t1); 2940 tcg_temp_free(t0); 2941 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2942 if (unlikely(Rc(ctx->opcode) != 0)) { 2943 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2944 } 2945 } 2946 2947 /* sraw & sraw. */ 2948 static void gen_sraw(DisasContext *ctx) 2949 { 2950 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2951 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2952 if (unlikely(Rc(ctx->opcode) != 0)) { 2953 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2954 } 2955 } 2956 2957 /* srawi & srawi. */ 2958 static void gen_srawi(DisasContext *ctx) 2959 { 2960 int sh = SH(ctx->opcode); 2961 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2962 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2963 if (sh == 0) { 2964 tcg_gen_ext32s_tl(dst, src); 2965 tcg_gen_movi_tl(cpu_ca, 0); 2966 if (is_isa300(ctx)) { 2967 tcg_gen_movi_tl(cpu_ca32, 0); 2968 } 2969 } else { 2970 TCGv t0; 2971 tcg_gen_ext32s_tl(dst, src); 2972 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2973 t0 = tcg_temp_new(); 2974 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2975 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2976 tcg_temp_free(t0); 2977 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2978 if (is_isa300(ctx)) { 2979 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2980 } 2981 tcg_gen_sari_tl(dst, dst, sh); 2982 } 2983 if (unlikely(Rc(ctx->opcode) != 0)) { 2984 gen_set_Rc0(ctx, dst); 2985 } 2986 } 2987 2988 /* srw & srw. */ 2989 static void gen_srw(DisasContext *ctx) 2990 { 2991 TCGv t0, t1; 2992 2993 t0 = tcg_temp_new(); 2994 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2995 #if defined(TARGET_PPC64) 2996 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2997 tcg_gen_sari_tl(t0, t0, 0x3f); 2998 #else 2999 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 3000 tcg_gen_sari_tl(t0, t0, 0x1f); 3001 #endif 3002 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3003 tcg_gen_ext32u_tl(t0, t0); 3004 t1 = tcg_temp_new(); 3005 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 3006 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3007 tcg_temp_free(t1); 3008 tcg_temp_free(t0); 3009 if (unlikely(Rc(ctx->opcode) != 0)) { 3010 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3011 } 3012 } 3013 3014 #if defined(TARGET_PPC64) 3015 /* sld & sld. */ 3016 static void gen_sld(DisasContext *ctx) 3017 { 3018 TCGv t0, t1; 3019 3020 t0 = tcg_temp_new(); 3021 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3022 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3023 tcg_gen_sari_tl(t0, t0, 0x3f); 3024 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3025 t1 = tcg_temp_new(); 3026 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3027 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3028 tcg_temp_free(t1); 3029 tcg_temp_free(t0); 3030 if (unlikely(Rc(ctx->opcode) != 0)) { 3031 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3032 } 3033 } 3034 3035 /* srad & srad. */ 3036 static void gen_srad(DisasContext *ctx) 3037 { 3038 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 3039 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3040 if (unlikely(Rc(ctx->opcode) != 0)) { 3041 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3042 } 3043 } 3044 /* sradi & sradi. */ 3045 static inline void gen_sradi(DisasContext *ctx, int n) 3046 { 3047 int sh = SH(ctx->opcode) + (n << 5); 3048 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3049 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3050 if (sh == 0) { 3051 tcg_gen_mov_tl(dst, src); 3052 tcg_gen_movi_tl(cpu_ca, 0); 3053 if (is_isa300(ctx)) { 3054 tcg_gen_movi_tl(cpu_ca32, 0); 3055 } 3056 } else { 3057 TCGv t0; 3058 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 3059 t0 = tcg_temp_new(); 3060 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 3061 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 3062 tcg_temp_free(t0); 3063 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 3064 if (is_isa300(ctx)) { 3065 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 3066 } 3067 tcg_gen_sari_tl(dst, src, sh); 3068 } 3069 if (unlikely(Rc(ctx->opcode) != 0)) { 3070 gen_set_Rc0(ctx, dst); 3071 } 3072 } 3073 3074 static void gen_sradi0(DisasContext *ctx) 3075 { 3076 gen_sradi(ctx, 0); 3077 } 3078 3079 static void gen_sradi1(DisasContext *ctx) 3080 { 3081 gen_sradi(ctx, 1); 3082 } 3083 3084 /* extswsli & extswsli. */ 3085 static inline void gen_extswsli(DisasContext *ctx, int n) 3086 { 3087 int sh = SH(ctx->opcode) + (n << 5); 3088 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3089 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3090 3091 tcg_gen_ext32s_tl(dst, src); 3092 tcg_gen_shli_tl(dst, dst, sh); 3093 if (unlikely(Rc(ctx->opcode) != 0)) { 3094 gen_set_Rc0(ctx, dst); 3095 } 3096 } 3097 3098 static void gen_extswsli0(DisasContext *ctx) 3099 { 3100 gen_extswsli(ctx, 0); 3101 } 3102 3103 static void gen_extswsli1(DisasContext *ctx) 3104 { 3105 gen_extswsli(ctx, 1); 3106 } 3107 3108 /* srd & srd. */ 3109 static void gen_srd(DisasContext *ctx) 3110 { 3111 TCGv t0, t1; 3112 3113 t0 = tcg_temp_new(); 3114 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3115 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3116 tcg_gen_sari_tl(t0, t0, 0x3f); 3117 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3118 t1 = tcg_temp_new(); 3119 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3120 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3121 tcg_temp_free(t1); 3122 tcg_temp_free(t0); 3123 if (unlikely(Rc(ctx->opcode) != 0)) { 3124 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3125 } 3126 } 3127 #endif 3128 3129 /*** Addressing modes ***/ 3130 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 3131 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 3132 target_long maskl) 3133 { 3134 target_long simm = SIMM(ctx->opcode); 3135 3136 simm &= ~maskl; 3137 if (rA(ctx->opcode) == 0) { 3138 if (NARROW_MODE(ctx)) { 3139 simm = (uint32_t)simm; 3140 } 3141 tcg_gen_movi_tl(EA, simm); 3142 } else if (likely(simm != 0)) { 3143 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 3144 if (NARROW_MODE(ctx)) { 3145 tcg_gen_ext32u_tl(EA, EA); 3146 } 3147 } else { 3148 if (NARROW_MODE(ctx)) { 3149 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3150 } else { 3151 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3152 } 3153 } 3154 } 3155 3156 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 3157 { 3158 if (rA(ctx->opcode) == 0) { 3159 if (NARROW_MODE(ctx)) { 3160 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3161 } else { 3162 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3163 } 3164 } else { 3165 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3166 if (NARROW_MODE(ctx)) { 3167 tcg_gen_ext32u_tl(EA, EA); 3168 } 3169 } 3170 } 3171 3172 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 3173 { 3174 if (rA(ctx->opcode) == 0) { 3175 tcg_gen_movi_tl(EA, 0); 3176 } else if (NARROW_MODE(ctx)) { 3177 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3178 } else { 3179 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3180 } 3181 } 3182 3183 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 3184 target_long val) 3185 { 3186 tcg_gen_addi_tl(ret, arg1, val); 3187 if (NARROW_MODE(ctx)) { 3188 tcg_gen_ext32u_tl(ret, ret); 3189 } 3190 } 3191 3192 static inline void gen_align_no_le(DisasContext *ctx) 3193 { 3194 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 3195 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 3196 } 3197 3198 /*** Integer load ***/ 3199 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 3200 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 3201 3202 #define GEN_QEMU_LOAD_TL(ldop, op) \ 3203 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 3204 TCGv val, \ 3205 TCGv addr) \ 3206 { \ 3207 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 3208 } 3209 3210 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 3211 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 3212 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 3213 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 3214 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 3215 3216 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 3217 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 3218 3219 #define GEN_QEMU_LOAD_64(ldop, op) \ 3220 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 3221 TCGv_i64 val, \ 3222 TCGv addr) \ 3223 { \ 3224 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 3225 } 3226 3227 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 3228 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 3229 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 3230 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 3231 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 3232 3233 #if defined(TARGET_PPC64) 3234 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 3235 #endif 3236 3237 #define GEN_QEMU_STORE_TL(stop, op) \ 3238 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 3239 TCGv val, \ 3240 TCGv addr) \ 3241 { \ 3242 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 3243 } 3244 3245 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY) 3246 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 3247 #endif 3248 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 3249 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 3250 3251 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 3252 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 3253 3254 #define GEN_QEMU_STORE_64(stop, op) \ 3255 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 3256 TCGv_i64 val, \ 3257 TCGv addr) \ 3258 { \ 3259 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 3260 } 3261 3262 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 3263 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 3264 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 3265 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 3266 3267 #if defined(TARGET_PPC64) 3268 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 3269 #endif 3270 3271 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 3272 static void glue(gen_, name##x)(DisasContext *ctx) \ 3273 { \ 3274 TCGv EA; \ 3275 chk; \ 3276 gen_set_access_type(ctx, ACCESS_INT); \ 3277 EA = tcg_temp_new(); \ 3278 gen_addr_reg_index(ctx, EA); \ 3279 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 3280 tcg_temp_free(EA); \ 3281 } 3282 3283 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 3284 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3285 3286 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 3287 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3288 3289 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 3290 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3291 { \ 3292 TCGv EA; \ 3293 CHK_SV; \ 3294 gen_set_access_type(ctx, ACCESS_INT); \ 3295 EA = tcg_temp_new(); \ 3296 gen_addr_reg_index(ctx, EA); \ 3297 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 3298 tcg_temp_free(EA); \ 3299 } 3300 3301 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 3302 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 3303 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 3304 #if defined(TARGET_PPC64) 3305 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 3306 #endif 3307 3308 #if defined(TARGET_PPC64) 3309 /* CI load/store variants */ 3310 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 3311 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 3312 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 3313 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 3314 3315 /* lq */ 3316 static void gen_lq(DisasContext *ctx) 3317 { 3318 int ra, rd; 3319 TCGv EA, hi, lo; 3320 3321 /* lq is a legal user mode instruction starting in ISA 2.07 */ 3322 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3323 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3324 3325 if (!legal_in_user_mode && ctx->pr) { 3326 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 3327 return; 3328 } 3329 3330 if (!le_is_supported && ctx->le_mode) { 3331 gen_align_no_le(ctx); 3332 return; 3333 } 3334 ra = rA(ctx->opcode); 3335 rd = rD(ctx->opcode); 3336 if (unlikely((rd & 1) || rd == ra)) { 3337 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3338 return; 3339 } 3340 3341 gen_set_access_type(ctx, ACCESS_INT); 3342 EA = tcg_temp_new(); 3343 gen_addr_imm_index(ctx, EA, 0x0F); 3344 3345 /* Note that the low part is always in RD+1, even in LE mode. */ 3346 lo = cpu_gpr[rd + 1]; 3347 hi = cpu_gpr[rd]; 3348 3349 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3350 if (HAVE_ATOMIC128) { 3351 TCGv_i32 oi = tcg_temp_new_i32(); 3352 if (ctx->le_mode) { 3353 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 3354 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3355 } else { 3356 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 3357 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3358 } 3359 tcg_temp_free_i32(oi); 3360 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3361 } else { 3362 /* Restart with exclusive lock. */ 3363 gen_helper_exit_atomic(cpu_env); 3364 ctx->base.is_jmp = DISAS_NORETURN; 3365 } 3366 } else if (ctx->le_mode) { 3367 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 3368 gen_addr_add(ctx, EA, EA, 8); 3369 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3370 } else { 3371 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 3372 gen_addr_add(ctx, EA, EA, 8); 3373 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3374 } 3375 tcg_temp_free(EA); 3376 } 3377 #endif 3378 3379 /*** Integer store ***/ 3380 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 3381 static void glue(gen_, name##x)(DisasContext *ctx) \ 3382 { \ 3383 TCGv EA; \ 3384 chk; \ 3385 gen_set_access_type(ctx, ACCESS_INT); \ 3386 EA = tcg_temp_new(); \ 3387 gen_addr_reg_index(ctx, EA); \ 3388 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 3389 tcg_temp_free(EA); \ 3390 } 3391 #define GEN_STX(name, stop, opc2, opc3, type) \ 3392 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3393 3394 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 3395 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3396 3397 #define GEN_STEPX(name, stop, opc2, opc3) \ 3398 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3399 { \ 3400 TCGv EA; \ 3401 CHK_SV; \ 3402 gen_set_access_type(ctx, ACCESS_INT); \ 3403 EA = tcg_temp_new(); \ 3404 gen_addr_reg_index(ctx, EA); \ 3405 tcg_gen_qemu_st_tl( \ 3406 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 3407 tcg_temp_free(EA); \ 3408 } 3409 3410 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 3411 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 3412 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 3413 #if defined(TARGET_PPC64) 3414 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04) 3415 #endif 3416 3417 #if defined(TARGET_PPC64) 3418 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 3419 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 3420 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 3421 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 3422 3423 static void gen_std(DisasContext *ctx) 3424 { 3425 int rs; 3426 TCGv EA; 3427 3428 rs = rS(ctx->opcode); 3429 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 3430 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3431 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 3432 TCGv hi, lo; 3433 3434 if (!(ctx->insns_flags & PPC_64BX)) { 3435 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3436 } 3437 3438 if (!legal_in_user_mode && ctx->pr) { 3439 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 3440 return; 3441 } 3442 3443 if (!le_is_supported && ctx->le_mode) { 3444 gen_align_no_le(ctx); 3445 return; 3446 } 3447 3448 if (unlikely(rs & 1)) { 3449 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3450 return; 3451 } 3452 gen_set_access_type(ctx, ACCESS_INT); 3453 EA = tcg_temp_new(); 3454 gen_addr_imm_index(ctx, EA, 0x03); 3455 3456 /* Note that the low part is always in RS+1, even in LE mode. */ 3457 lo = cpu_gpr[rs + 1]; 3458 hi = cpu_gpr[rs]; 3459 3460 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3461 if (HAVE_ATOMIC128) { 3462 TCGv_i32 oi = tcg_temp_new_i32(); 3463 if (ctx->le_mode) { 3464 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128, 3465 ctx->mem_idx)); 3466 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 3467 } else { 3468 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128, 3469 ctx->mem_idx)); 3470 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 3471 } 3472 tcg_temp_free_i32(oi); 3473 } else { 3474 /* Restart with exclusive lock. */ 3475 gen_helper_exit_atomic(cpu_env); 3476 ctx->base.is_jmp = DISAS_NORETURN; 3477 } 3478 } else if (ctx->le_mode) { 3479 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 3480 gen_addr_add(ctx, EA, EA, 8); 3481 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3482 } else { 3483 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 3484 gen_addr_add(ctx, EA, EA, 8); 3485 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3486 } 3487 tcg_temp_free(EA); 3488 } else { 3489 /* std / stdu */ 3490 if (Rc(ctx->opcode)) { 3491 if (unlikely(rA(ctx->opcode) == 0)) { 3492 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3493 return; 3494 } 3495 } 3496 gen_set_access_type(ctx, ACCESS_INT); 3497 EA = tcg_temp_new(); 3498 gen_addr_imm_index(ctx, EA, 0x03); 3499 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 3500 if (Rc(ctx->opcode)) { 3501 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 3502 } 3503 tcg_temp_free(EA); 3504 } 3505 } 3506 #endif 3507 /*** Integer load and store with byte reverse ***/ 3508 3509 /* lhbrx */ 3510 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 3511 3512 /* lwbrx */ 3513 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 3514 3515 #if defined(TARGET_PPC64) 3516 /* ldbrx */ 3517 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 3518 /* stdbrx */ 3519 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 3520 #endif /* TARGET_PPC64 */ 3521 3522 /* sthbrx */ 3523 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 3524 /* stwbrx */ 3525 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 3526 3527 /*** Integer load and store multiple ***/ 3528 3529 /* lmw */ 3530 static void gen_lmw(DisasContext *ctx) 3531 { 3532 TCGv t0; 3533 TCGv_i32 t1; 3534 3535 if (ctx->le_mode) { 3536 gen_align_no_le(ctx); 3537 return; 3538 } 3539 gen_set_access_type(ctx, ACCESS_INT); 3540 t0 = tcg_temp_new(); 3541 t1 = tcg_const_i32(rD(ctx->opcode)); 3542 gen_addr_imm_index(ctx, t0, 0); 3543 gen_helper_lmw(cpu_env, t0, t1); 3544 tcg_temp_free(t0); 3545 tcg_temp_free_i32(t1); 3546 } 3547 3548 /* stmw */ 3549 static void gen_stmw(DisasContext *ctx) 3550 { 3551 TCGv t0; 3552 TCGv_i32 t1; 3553 3554 if (ctx->le_mode) { 3555 gen_align_no_le(ctx); 3556 return; 3557 } 3558 gen_set_access_type(ctx, ACCESS_INT); 3559 t0 = tcg_temp_new(); 3560 t1 = tcg_const_i32(rS(ctx->opcode)); 3561 gen_addr_imm_index(ctx, t0, 0); 3562 gen_helper_stmw(cpu_env, t0, t1); 3563 tcg_temp_free(t0); 3564 tcg_temp_free_i32(t1); 3565 } 3566 3567 /*** Integer load and store strings ***/ 3568 3569 /* lswi */ 3570 /* 3571 * PowerPC32 specification says we must generate an exception if rA is 3572 * in the range of registers to be loaded. In an other hand, IBM says 3573 * this is valid, but rA won't be loaded. For now, I'll follow the 3574 * spec... 3575 */ 3576 static void gen_lswi(DisasContext *ctx) 3577 { 3578 TCGv t0; 3579 TCGv_i32 t1, t2; 3580 int nb = NB(ctx->opcode); 3581 int start = rD(ctx->opcode); 3582 int ra = rA(ctx->opcode); 3583 int nr; 3584 3585 if (ctx->le_mode) { 3586 gen_align_no_le(ctx); 3587 return; 3588 } 3589 if (nb == 0) { 3590 nb = 32; 3591 } 3592 nr = DIV_ROUND_UP(nb, 4); 3593 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 3594 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 3595 return; 3596 } 3597 gen_set_access_type(ctx, ACCESS_INT); 3598 t0 = tcg_temp_new(); 3599 gen_addr_register(ctx, t0); 3600 t1 = tcg_const_i32(nb); 3601 t2 = tcg_const_i32(start); 3602 gen_helper_lsw(cpu_env, t0, t1, t2); 3603 tcg_temp_free(t0); 3604 tcg_temp_free_i32(t1); 3605 tcg_temp_free_i32(t2); 3606 } 3607 3608 /* lswx */ 3609 static void gen_lswx(DisasContext *ctx) 3610 { 3611 TCGv t0; 3612 TCGv_i32 t1, t2, t3; 3613 3614 if (ctx->le_mode) { 3615 gen_align_no_le(ctx); 3616 return; 3617 } 3618 gen_set_access_type(ctx, ACCESS_INT); 3619 t0 = tcg_temp_new(); 3620 gen_addr_reg_index(ctx, t0); 3621 t1 = tcg_const_i32(rD(ctx->opcode)); 3622 t2 = tcg_const_i32(rA(ctx->opcode)); 3623 t3 = tcg_const_i32(rB(ctx->opcode)); 3624 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 3625 tcg_temp_free(t0); 3626 tcg_temp_free_i32(t1); 3627 tcg_temp_free_i32(t2); 3628 tcg_temp_free_i32(t3); 3629 } 3630 3631 /* stswi */ 3632 static void gen_stswi(DisasContext *ctx) 3633 { 3634 TCGv t0; 3635 TCGv_i32 t1, t2; 3636 int nb = NB(ctx->opcode); 3637 3638 if (ctx->le_mode) { 3639 gen_align_no_le(ctx); 3640 return; 3641 } 3642 gen_set_access_type(ctx, ACCESS_INT); 3643 t0 = tcg_temp_new(); 3644 gen_addr_register(ctx, t0); 3645 if (nb == 0) { 3646 nb = 32; 3647 } 3648 t1 = tcg_const_i32(nb); 3649 t2 = tcg_const_i32(rS(ctx->opcode)); 3650 gen_helper_stsw(cpu_env, t0, t1, t2); 3651 tcg_temp_free(t0); 3652 tcg_temp_free_i32(t1); 3653 tcg_temp_free_i32(t2); 3654 } 3655 3656 /* stswx */ 3657 static void gen_stswx(DisasContext *ctx) 3658 { 3659 TCGv t0; 3660 TCGv_i32 t1, t2; 3661 3662 if (ctx->le_mode) { 3663 gen_align_no_le(ctx); 3664 return; 3665 } 3666 gen_set_access_type(ctx, ACCESS_INT); 3667 t0 = tcg_temp_new(); 3668 gen_addr_reg_index(ctx, t0); 3669 t1 = tcg_temp_new_i32(); 3670 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3671 tcg_gen_andi_i32(t1, t1, 0x7F); 3672 t2 = tcg_const_i32(rS(ctx->opcode)); 3673 gen_helper_stsw(cpu_env, t0, t1, t2); 3674 tcg_temp_free(t0); 3675 tcg_temp_free_i32(t1); 3676 tcg_temp_free_i32(t2); 3677 } 3678 3679 /*** Memory synchronisation ***/ 3680 /* eieio */ 3681 static void gen_eieio(DisasContext *ctx) 3682 { 3683 TCGBar bar = TCG_MO_LD_ST; 3684 3685 /* 3686 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3687 * tell the CPU it is a store-forwarding barrier. 3688 */ 3689 if (ctx->opcode & 0x2000000) { 3690 /* 3691 * ISA says that "Reserved fields in instructions are ignored 3692 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3693 * as this is not an instruction software should be using, 3694 * complain to the user. 3695 */ 3696 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3697 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3698 TARGET_FMT_lx "\n", ctx->cia); 3699 } else { 3700 bar = TCG_MO_ST_LD; 3701 } 3702 } 3703 3704 tcg_gen_mb(bar | TCG_BAR_SC); 3705 } 3706 3707 #if !defined(CONFIG_USER_ONLY) 3708 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3709 { 3710 TCGv_i32 t; 3711 TCGLabel *l; 3712 3713 if (!ctx->lazy_tlb_flush) { 3714 return; 3715 } 3716 l = gen_new_label(); 3717 t = tcg_temp_new_i32(); 3718 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3719 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3720 if (global) { 3721 gen_helper_check_tlb_flush_global(cpu_env); 3722 } else { 3723 gen_helper_check_tlb_flush_local(cpu_env); 3724 } 3725 gen_set_label(l); 3726 tcg_temp_free_i32(t); 3727 } 3728 #else 3729 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3730 #endif 3731 3732 /* isync */ 3733 static void gen_isync(DisasContext *ctx) 3734 { 3735 /* 3736 * We need to check for a pending TLB flush. This can only happen in 3737 * kernel mode however so check MSR_PR 3738 */ 3739 if (!ctx->pr) { 3740 gen_check_tlb_flush(ctx, false); 3741 } 3742 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3743 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 3744 } 3745 3746 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3747 3748 static void gen_load_locked(DisasContext *ctx, MemOp memop) 3749 { 3750 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3751 TCGv t0 = tcg_temp_new(); 3752 3753 gen_set_access_type(ctx, ACCESS_RES); 3754 gen_addr_reg_index(ctx, t0); 3755 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3756 tcg_gen_mov_tl(cpu_reserve, t0); 3757 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3758 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3759 tcg_temp_free(t0); 3760 } 3761 3762 #define LARX(name, memop) \ 3763 static void gen_##name(DisasContext *ctx) \ 3764 { \ 3765 gen_load_locked(ctx, memop); \ 3766 } 3767 3768 /* lwarx */ 3769 LARX(lbarx, DEF_MEMOP(MO_UB)) 3770 LARX(lharx, DEF_MEMOP(MO_UW)) 3771 LARX(lwarx, DEF_MEMOP(MO_UL)) 3772 3773 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop, 3774 TCGv EA, TCGCond cond, int addend) 3775 { 3776 TCGv t = tcg_temp_new(); 3777 TCGv t2 = tcg_temp_new(); 3778 TCGv u = tcg_temp_new(); 3779 3780 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3781 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3782 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3783 tcg_gen_addi_tl(u, t, addend); 3784 3785 /* E.g. for fetch and increment bounded... */ 3786 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3787 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3788 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3789 3790 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3791 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3792 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3793 3794 tcg_temp_free(t); 3795 tcg_temp_free(t2); 3796 tcg_temp_free(u); 3797 } 3798 3799 static void gen_ld_atomic(DisasContext *ctx, MemOp memop) 3800 { 3801 uint32_t gpr_FC = FC(ctx->opcode); 3802 TCGv EA = tcg_temp_new(); 3803 int rt = rD(ctx->opcode); 3804 bool need_serial; 3805 TCGv src, dst; 3806 3807 gen_addr_register(ctx, EA); 3808 dst = cpu_gpr[rt]; 3809 src = cpu_gpr[(rt + 1) & 31]; 3810 3811 need_serial = false; 3812 memop |= MO_ALIGN; 3813 switch (gpr_FC) { 3814 case 0: /* Fetch and add */ 3815 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3816 break; 3817 case 1: /* Fetch and xor */ 3818 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3819 break; 3820 case 2: /* Fetch and or */ 3821 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3822 break; 3823 case 3: /* Fetch and 'and' */ 3824 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3825 break; 3826 case 4: /* Fetch and max unsigned */ 3827 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3828 break; 3829 case 5: /* Fetch and max signed */ 3830 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3831 break; 3832 case 6: /* Fetch and min unsigned */ 3833 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3834 break; 3835 case 7: /* Fetch and min signed */ 3836 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3837 break; 3838 case 8: /* Swap */ 3839 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3840 break; 3841 3842 case 16: /* Compare and swap not equal */ 3843 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3844 need_serial = true; 3845 } else { 3846 TCGv t0 = tcg_temp_new(); 3847 TCGv t1 = tcg_temp_new(); 3848 3849 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3850 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3851 tcg_gen_mov_tl(t1, src); 3852 } else { 3853 tcg_gen_ext32u_tl(t1, src); 3854 } 3855 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3856 cpu_gpr[(rt + 2) & 31], t0); 3857 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3858 tcg_gen_mov_tl(dst, t0); 3859 3860 tcg_temp_free(t0); 3861 tcg_temp_free(t1); 3862 } 3863 break; 3864 3865 case 24: /* Fetch and increment bounded */ 3866 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3867 need_serial = true; 3868 } else { 3869 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3870 } 3871 break; 3872 case 25: /* Fetch and increment equal */ 3873 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3874 need_serial = true; 3875 } else { 3876 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3877 } 3878 break; 3879 case 28: /* Fetch and decrement bounded */ 3880 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3881 need_serial = true; 3882 } else { 3883 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3884 } 3885 break; 3886 3887 default: 3888 /* invoke data storage error handler */ 3889 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3890 } 3891 tcg_temp_free(EA); 3892 3893 if (need_serial) { 3894 /* Restart with exclusive lock. */ 3895 gen_helper_exit_atomic(cpu_env); 3896 ctx->base.is_jmp = DISAS_NORETURN; 3897 } 3898 } 3899 3900 static void gen_lwat(DisasContext *ctx) 3901 { 3902 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3903 } 3904 3905 #ifdef TARGET_PPC64 3906 static void gen_ldat(DisasContext *ctx) 3907 { 3908 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3909 } 3910 #endif 3911 3912 static void gen_st_atomic(DisasContext *ctx, MemOp memop) 3913 { 3914 uint32_t gpr_FC = FC(ctx->opcode); 3915 TCGv EA = tcg_temp_new(); 3916 TCGv src, discard; 3917 3918 gen_addr_register(ctx, EA); 3919 src = cpu_gpr[rD(ctx->opcode)]; 3920 discard = tcg_temp_new(); 3921 3922 memop |= MO_ALIGN; 3923 switch (gpr_FC) { 3924 case 0: /* add and Store */ 3925 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3926 break; 3927 case 1: /* xor and Store */ 3928 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3929 break; 3930 case 2: /* Or and Store */ 3931 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3932 break; 3933 case 3: /* 'and' and Store */ 3934 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3935 break; 3936 case 4: /* Store max unsigned */ 3937 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3938 break; 3939 case 5: /* Store max signed */ 3940 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3941 break; 3942 case 6: /* Store min unsigned */ 3943 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3944 break; 3945 case 7: /* Store min signed */ 3946 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3947 break; 3948 case 24: /* Store twin */ 3949 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3950 /* Restart with exclusive lock. */ 3951 gen_helper_exit_atomic(cpu_env); 3952 ctx->base.is_jmp = DISAS_NORETURN; 3953 } else { 3954 TCGv t = tcg_temp_new(); 3955 TCGv t2 = tcg_temp_new(); 3956 TCGv s = tcg_temp_new(); 3957 TCGv s2 = tcg_temp_new(); 3958 TCGv ea_plus_s = tcg_temp_new(); 3959 3960 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3961 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3962 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3963 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3964 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3965 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3966 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3967 3968 tcg_temp_free(ea_plus_s); 3969 tcg_temp_free(s2); 3970 tcg_temp_free(s); 3971 tcg_temp_free(t2); 3972 tcg_temp_free(t); 3973 } 3974 break; 3975 default: 3976 /* invoke data storage error handler */ 3977 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3978 } 3979 tcg_temp_free(discard); 3980 tcg_temp_free(EA); 3981 } 3982 3983 static void gen_stwat(DisasContext *ctx) 3984 { 3985 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3986 } 3987 3988 #ifdef TARGET_PPC64 3989 static void gen_stdat(DisasContext *ctx) 3990 { 3991 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3992 } 3993 #endif 3994 3995 static void gen_conditional_store(DisasContext *ctx, MemOp memop) 3996 { 3997 TCGLabel *l1 = gen_new_label(); 3998 TCGLabel *l2 = gen_new_label(); 3999 TCGv t0 = tcg_temp_new(); 4000 int reg = rS(ctx->opcode); 4001 4002 gen_set_access_type(ctx, ACCESS_RES); 4003 gen_addr_reg_index(ctx, t0); 4004 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 4005 tcg_temp_free(t0); 4006 4007 t0 = tcg_temp_new(); 4008 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 4009 cpu_gpr[reg], ctx->mem_idx, 4010 DEF_MEMOP(memop) | MO_ALIGN); 4011 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 4012 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 4013 tcg_gen_or_tl(t0, t0, cpu_so); 4014 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 4015 tcg_temp_free(t0); 4016 tcg_gen_br(l2); 4017 4018 gen_set_label(l1); 4019 4020 /* 4021 * Address mismatch implies failure. But we still need to provide 4022 * the memory barrier semantics of the instruction. 4023 */ 4024 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 4025 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4026 4027 gen_set_label(l2); 4028 tcg_gen_movi_tl(cpu_reserve, -1); 4029 } 4030 4031 #define STCX(name, memop) \ 4032 static void gen_##name(DisasContext *ctx) \ 4033 { \ 4034 gen_conditional_store(ctx, memop); \ 4035 } 4036 4037 STCX(stbcx_, DEF_MEMOP(MO_UB)) 4038 STCX(sthcx_, DEF_MEMOP(MO_UW)) 4039 STCX(stwcx_, DEF_MEMOP(MO_UL)) 4040 4041 #if defined(TARGET_PPC64) 4042 /* ldarx */ 4043 LARX(ldarx, DEF_MEMOP(MO_Q)) 4044 /* stdcx. */ 4045 STCX(stdcx_, DEF_MEMOP(MO_Q)) 4046 4047 /* lqarx */ 4048 static void gen_lqarx(DisasContext *ctx) 4049 { 4050 int rd = rD(ctx->opcode); 4051 TCGv EA, hi, lo; 4052 4053 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 4054 (rd == rB(ctx->opcode)))) { 4055 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4056 return; 4057 } 4058 4059 gen_set_access_type(ctx, ACCESS_RES); 4060 EA = tcg_temp_new(); 4061 gen_addr_reg_index(ctx, EA); 4062 4063 /* Note that the low part is always in RD+1, even in LE mode. */ 4064 lo = cpu_gpr[rd + 1]; 4065 hi = cpu_gpr[rd]; 4066 4067 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4068 if (HAVE_ATOMIC128) { 4069 TCGv_i32 oi = tcg_temp_new_i32(); 4070 if (ctx->le_mode) { 4071 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN, 4072 ctx->mem_idx)); 4073 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 4074 } else { 4075 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN, 4076 ctx->mem_idx)); 4077 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 4078 } 4079 tcg_temp_free_i32(oi); 4080 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 4081 } else { 4082 /* Restart with exclusive lock. */ 4083 gen_helper_exit_atomic(cpu_env); 4084 ctx->base.is_jmp = DISAS_NORETURN; 4085 tcg_temp_free(EA); 4086 return; 4087 } 4088 } else if (ctx->le_mode) { 4089 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 4090 tcg_gen_mov_tl(cpu_reserve, EA); 4091 gen_addr_add(ctx, EA, EA, 8); 4092 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 4093 } else { 4094 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 4095 tcg_gen_mov_tl(cpu_reserve, EA); 4096 gen_addr_add(ctx, EA, EA, 8); 4097 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 4098 } 4099 tcg_temp_free(EA); 4100 4101 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 4102 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 4103 } 4104 4105 /* stqcx. */ 4106 static void gen_stqcx_(DisasContext *ctx) 4107 { 4108 int rs = rS(ctx->opcode); 4109 TCGv EA, hi, lo; 4110 4111 if (unlikely(rs & 1)) { 4112 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4113 return; 4114 } 4115 4116 gen_set_access_type(ctx, ACCESS_RES); 4117 EA = tcg_temp_new(); 4118 gen_addr_reg_index(ctx, EA); 4119 4120 /* Note that the low part is always in RS+1, even in LE mode. */ 4121 lo = cpu_gpr[rs + 1]; 4122 hi = cpu_gpr[rs]; 4123 4124 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4125 if (HAVE_CMPXCHG128) { 4126 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_128) | MO_ALIGN); 4127 if (ctx->le_mode) { 4128 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 4129 EA, lo, hi, oi); 4130 } else { 4131 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 4132 EA, lo, hi, oi); 4133 } 4134 tcg_temp_free_i32(oi); 4135 } else { 4136 /* Restart with exclusive lock. */ 4137 gen_helper_exit_atomic(cpu_env); 4138 ctx->base.is_jmp = DISAS_NORETURN; 4139 } 4140 tcg_temp_free(EA); 4141 } else { 4142 TCGLabel *lab_fail = gen_new_label(); 4143 TCGLabel *lab_over = gen_new_label(); 4144 TCGv_i64 t0 = tcg_temp_new_i64(); 4145 TCGv_i64 t1 = tcg_temp_new_i64(); 4146 4147 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 4148 tcg_temp_free(EA); 4149 4150 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 4151 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4152 ? offsetof(CPUPPCState, reserve_val2) 4153 : offsetof(CPUPPCState, reserve_val))); 4154 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4155 4156 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4157 gen_qemu_ld64_i64(ctx, t0, t0); 4158 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4159 ? offsetof(CPUPPCState, reserve_val) 4160 : offsetof(CPUPPCState, reserve_val2))); 4161 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4162 4163 /* Success */ 4164 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 4165 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4166 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 4167 4168 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4169 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4170 tcg_gen_br(lab_over); 4171 4172 gen_set_label(lab_fail); 4173 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4174 4175 gen_set_label(lab_over); 4176 tcg_gen_movi_tl(cpu_reserve, -1); 4177 tcg_temp_free_i64(t0); 4178 tcg_temp_free_i64(t1); 4179 } 4180 } 4181 #endif /* defined(TARGET_PPC64) */ 4182 4183 /* sync */ 4184 static void gen_sync(DisasContext *ctx) 4185 { 4186 uint32_t l = (ctx->opcode >> 21) & 3; 4187 4188 /* 4189 * We may need to check for a pending TLB flush. 4190 * 4191 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 4192 * 4193 * Additionally, this can only happen in kernel mode however so 4194 * check MSR_PR as well. 4195 */ 4196 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 4197 gen_check_tlb_flush(ctx, true); 4198 } 4199 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 4200 } 4201 4202 /* wait */ 4203 static void gen_wait(DisasContext *ctx) 4204 { 4205 TCGv_i32 t0 = tcg_const_i32(1); 4206 tcg_gen_st_i32(t0, cpu_env, 4207 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 4208 tcg_temp_free_i32(t0); 4209 /* Stop translation, as the CPU is supposed to sleep from now */ 4210 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4211 } 4212 4213 #if defined(TARGET_PPC64) 4214 static void gen_doze(DisasContext *ctx) 4215 { 4216 #if defined(CONFIG_USER_ONLY) 4217 GEN_PRIV; 4218 #else 4219 TCGv_i32 t; 4220 4221 CHK_HV; 4222 t = tcg_const_i32(PPC_PM_DOZE); 4223 gen_helper_pminsn(cpu_env, t); 4224 tcg_temp_free_i32(t); 4225 /* Stop translation, as the CPU is supposed to sleep from now */ 4226 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4227 #endif /* defined(CONFIG_USER_ONLY) */ 4228 } 4229 4230 static void gen_nap(DisasContext *ctx) 4231 { 4232 #if defined(CONFIG_USER_ONLY) 4233 GEN_PRIV; 4234 #else 4235 TCGv_i32 t; 4236 4237 CHK_HV; 4238 t = tcg_const_i32(PPC_PM_NAP); 4239 gen_helper_pminsn(cpu_env, t); 4240 tcg_temp_free_i32(t); 4241 /* Stop translation, as the CPU is supposed to sleep from now */ 4242 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4243 #endif /* defined(CONFIG_USER_ONLY) */ 4244 } 4245 4246 static void gen_stop(DisasContext *ctx) 4247 { 4248 #if defined(CONFIG_USER_ONLY) 4249 GEN_PRIV; 4250 #else 4251 TCGv_i32 t; 4252 4253 CHK_HV; 4254 t = tcg_const_i32(PPC_PM_STOP); 4255 gen_helper_pminsn(cpu_env, t); 4256 tcg_temp_free_i32(t); 4257 /* Stop translation, as the CPU is supposed to sleep from now */ 4258 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4259 #endif /* defined(CONFIG_USER_ONLY) */ 4260 } 4261 4262 static void gen_sleep(DisasContext *ctx) 4263 { 4264 #if defined(CONFIG_USER_ONLY) 4265 GEN_PRIV; 4266 #else 4267 TCGv_i32 t; 4268 4269 CHK_HV; 4270 t = tcg_const_i32(PPC_PM_SLEEP); 4271 gen_helper_pminsn(cpu_env, t); 4272 tcg_temp_free_i32(t); 4273 /* Stop translation, as the CPU is supposed to sleep from now */ 4274 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4275 #endif /* defined(CONFIG_USER_ONLY) */ 4276 } 4277 4278 static void gen_rvwinkle(DisasContext *ctx) 4279 { 4280 #if defined(CONFIG_USER_ONLY) 4281 GEN_PRIV; 4282 #else 4283 TCGv_i32 t; 4284 4285 CHK_HV; 4286 t = tcg_const_i32(PPC_PM_RVWINKLE); 4287 gen_helper_pminsn(cpu_env, t); 4288 tcg_temp_free_i32(t); 4289 /* Stop translation, as the CPU is supposed to sleep from now */ 4290 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4291 #endif /* defined(CONFIG_USER_ONLY) */ 4292 } 4293 #endif /* #if defined(TARGET_PPC64) */ 4294 4295 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 4296 { 4297 #if defined(TARGET_PPC64) 4298 if (ctx->has_cfar) { 4299 tcg_gen_movi_tl(cpu_cfar, nip); 4300 } 4301 #endif 4302 } 4303 4304 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 4305 { 4306 return translator_use_goto_tb(&ctx->base, dest); 4307 } 4308 4309 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 4310 { 4311 if (unlikely(ctx->singlestep_enabled)) { 4312 gen_debug_exception(ctx); 4313 } else { 4314 tcg_gen_lookup_and_goto_ptr(); 4315 } 4316 } 4317 4318 /*** Branch ***/ 4319 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 4320 { 4321 if (NARROW_MODE(ctx)) { 4322 dest = (uint32_t) dest; 4323 } 4324 if (use_goto_tb(ctx, dest)) { 4325 tcg_gen_goto_tb(n); 4326 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4327 tcg_gen_exit_tb(ctx->base.tb, n); 4328 } else { 4329 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4330 gen_lookup_and_goto_ptr(ctx); 4331 } 4332 } 4333 4334 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 4335 { 4336 if (NARROW_MODE(ctx)) { 4337 nip = (uint32_t)nip; 4338 } 4339 tcg_gen_movi_tl(cpu_lr, nip); 4340 } 4341 4342 /* b ba bl bla */ 4343 static void gen_b(DisasContext *ctx) 4344 { 4345 target_ulong li, target; 4346 4347 /* sign extend LI */ 4348 li = LI(ctx->opcode); 4349 li = (li ^ 0x02000000) - 0x02000000; 4350 if (likely(AA(ctx->opcode) == 0)) { 4351 target = ctx->cia + li; 4352 } else { 4353 target = li; 4354 } 4355 if (LK(ctx->opcode)) { 4356 gen_setlr(ctx, ctx->base.pc_next); 4357 } 4358 gen_update_cfar(ctx, ctx->cia); 4359 gen_goto_tb(ctx, 0, target); 4360 ctx->base.is_jmp = DISAS_NORETURN; 4361 } 4362 4363 #define BCOND_IM 0 4364 #define BCOND_LR 1 4365 #define BCOND_CTR 2 4366 #define BCOND_TAR 3 4367 4368 static void gen_bcond(DisasContext *ctx, int type) 4369 { 4370 uint32_t bo = BO(ctx->opcode); 4371 TCGLabel *l1; 4372 TCGv target; 4373 4374 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 4375 target = tcg_temp_local_new(); 4376 if (type == BCOND_CTR) { 4377 tcg_gen_mov_tl(target, cpu_ctr); 4378 } else if (type == BCOND_TAR) { 4379 gen_load_spr(target, SPR_TAR); 4380 } else { 4381 tcg_gen_mov_tl(target, cpu_lr); 4382 } 4383 } else { 4384 target = NULL; 4385 } 4386 if (LK(ctx->opcode)) { 4387 gen_setlr(ctx, ctx->base.pc_next); 4388 } 4389 l1 = gen_new_label(); 4390 if ((bo & 0x4) == 0) { 4391 /* Decrement and test CTR */ 4392 TCGv temp = tcg_temp_new(); 4393 4394 if (type == BCOND_CTR) { 4395 /* 4396 * All ISAs up to v3 describe this form of bcctr as invalid but 4397 * some processors, ie. 64-bit server processors compliant with 4398 * arch 2.x, do implement a "test and decrement" logic instead, 4399 * as described in their respective UMs. This logic involves CTR 4400 * to act as both the branch target and a counter, which makes 4401 * it basically useless and thus never used in real code. 4402 * 4403 * This form was hence chosen to trigger extra micro-architectural 4404 * side-effect on real HW needed for the Spectre v2 workaround. 4405 * It is up to guests that implement such workaround, ie. linux, to 4406 * use this form in a way it just triggers the side-effect without 4407 * doing anything else harmful. 4408 */ 4409 if (unlikely(!is_book3s_arch2x(ctx))) { 4410 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4411 tcg_temp_free(temp); 4412 tcg_temp_free(target); 4413 return; 4414 } 4415 4416 if (NARROW_MODE(ctx)) { 4417 tcg_gen_ext32u_tl(temp, cpu_ctr); 4418 } else { 4419 tcg_gen_mov_tl(temp, cpu_ctr); 4420 } 4421 if (bo & 0x2) { 4422 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4423 } else { 4424 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4425 } 4426 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4427 } else { 4428 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4429 if (NARROW_MODE(ctx)) { 4430 tcg_gen_ext32u_tl(temp, cpu_ctr); 4431 } else { 4432 tcg_gen_mov_tl(temp, cpu_ctr); 4433 } 4434 if (bo & 0x2) { 4435 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4436 } else { 4437 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4438 } 4439 } 4440 tcg_temp_free(temp); 4441 } 4442 if ((bo & 0x10) == 0) { 4443 /* Test CR */ 4444 uint32_t bi = BI(ctx->opcode); 4445 uint32_t mask = 0x08 >> (bi & 0x03); 4446 TCGv_i32 temp = tcg_temp_new_i32(); 4447 4448 if (bo & 0x8) { 4449 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4450 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 4451 } else { 4452 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4453 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 4454 } 4455 tcg_temp_free_i32(temp); 4456 } 4457 gen_update_cfar(ctx, ctx->cia); 4458 if (type == BCOND_IM) { 4459 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 4460 if (likely(AA(ctx->opcode) == 0)) { 4461 gen_goto_tb(ctx, 0, ctx->cia + li); 4462 } else { 4463 gen_goto_tb(ctx, 0, li); 4464 } 4465 } else { 4466 if (NARROW_MODE(ctx)) { 4467 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 4468 } else { 4469 tcg_gen_andi_tl(cpu_nip, target, ~3); 4470 } 4471 gen_lookup_and_goto_ptr(ctx); 4472 tcg_temp_free(target); 4473 } 4474 if ((bo & 0x14) != 0x14) { 4475 /* fallthrough case */ 4476 gen_set_label(l1); 4477 gen_goto_tb(ctx, 1, ctx->base.pc_next); 4478 } 4479 ctx->base.is_jmp = DISAS_NORETURN; 4480 } 4481 4482 static void gen_bc(DisasContext *ctx) 4483 { 4484 gen_bcond(ctx, BCOND_IM); 4485 } 4486 4487 static void gen_bcctr(DisasContext *ctx) 4488 { 4489 gen_bcond(ctx, BCOND_CTR); 4490 } 4491 4492 static void gen_bclr(DisasContext *ctx) 4493 { 4494 gen_bcond(ctx, BCOND_LR); 4495 } 4496 4497 static void gen_bctar(DisasContext *ctx) 4498 { 4499 gen_bcond(ctx, BCOND_TAR); 4500 } 4501 4502 /*** Condition register logical ***/ 4503 #define GEN_CRLOGIC(name, tcg_op, opc) \ 4504 static void glue(gen_, name)(DisasContext *ctx) \ 4505 { \ 4506 uint8_t bitmask; \ 4507 int sh; \ 4508 TCGv_i32 t0, t1; \ 4509 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 4510 t0 = tcg_temp_new_i32(); \ 4511 if (sh > 0) \ 4512 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 4513 else if (sh < 0) \ 4514 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 4515 else \ 4516 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 4517 t1 = tcg_temp_new_i32(); \ 4518 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 4519 if (sh > 0) \ 4520 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 4521 else if (sh < 0) \ 4522 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 4523 else \ 4524 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 4525 tcg_op(t0, t0, t1); \ 4526 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 4527 tcg_gen_andi_i32(t0, t0, bitmask); \ 4528 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 4529 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 4530 tcg_temp_free_i32(t0); \ 4531 tcg_temp_free_i32(t1); \ 4532 } 4533 4534 /* crand */ 4535 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 4536 /* crandc */ 4537 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 4538 /* creqv */ 4539 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 4540 /* crnand */ 4541 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 4542 /* crnor */ 4543 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 4544 /* cror */ 4545 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 4546 /* crorc */ 4547 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 4548 /* crxor */ 4549 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 4550 4551 /* mcrf */ 4552 static void gen_mcrf(DisasContext *ctx) 4553 { 4554 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 4555 } 4556 4557 /*** System linkage ***/ 4558 4559 /* rfi (supervisor only) */ 4560 static void gen_rfi(DisasContext *ctx) 4561 { 4562 #if defined(CONFIG_USER_ONLY) 4563 GEN_PRIV; 4564 #else 4565 /* 4566 * This instruction doesn't exist anymore on 64-bit server 4567 * processors compliant with arch 2.x 4568 */ 4569 if (is_book3s_arch2x(ctx)) { 4570 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4571 return; 4572 } 4573 /* Restore CPU state */ 4574 CHK_SV; 4575 gen_icount_io_start(ctx); 4576 gen_update_cfar(ctx, ctx->cia); 4577 gen_helper_rfi(cpu_env); 4578 ctx->base.is_jmp = DISAS_EXIT; 4579 #endif 4580 } 4581 4582 #if defined(TARGET_PPC64) 4583 static void gen_rfid(DisasContext *ctx) 4584 { 4585 #if defined(CONFIG_USER_ONLY) 4586 GEN_PRIV; 4587 #else 4588 /* Restore CPU state */ 4589 CHK_SV; 4590 gen_icount_io_start(ctx); 4591 gen_update_cfar(ctx, ctx->cia); 4592 gen_helper_rfid(cpu_env); 4593 ctx->base.is_jmp = DISAS_EXIT; 4594 #endif 4595 } 4596 4597 #if !defined(CONFIG_USER_ONLY) 4598 static void gen_rfscv(DisasContext *ctx) 4599 { 4600 #if defined(CONFIG_USER_ONLY) 4601 GEN_PRIV; 4602 #else 4603 /* Restore CPU state */ 4604 CHK_SV; 4605 gen_icount_io_start(ctx); 4606 gen_update_cfar(ctx, ctx->cia); 4607 gen_helper_rfscv(cpu_env); 4608 ctx->base.is_jmp = DISAS_EXIT; 4609 #endif 4610 } 4611 #endif 4612 4613 static void gen_hrfid(DisasContext *ctx) 4614 { 4615 #if defined(CONFIG_USER_ONLY) 4616 GEN_PRIV; 4617 #else 4618 /* Restore CPU state */ 4619 CHK_HV; 4620 gen_helper_hrfid(cpu_env); 4621 ctx->base.is_jmp = DISAS_EXIT; 4622 #endif 4623 } 4624 #endif 4625 4626 /* sc */ 4627 #if defined(CONFIG_USER_ONLY) 4628 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 4629 #else 4630 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 4631 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED 4632 #endif 4633 static void gen_sc(DisasContext *ctx) 4634 { 4635 uint32_t lev; 4636 4637 lev = (ctx->opcode >> 5) & 0x7F; 4638 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 4639 } 4640 4641 #if defined(TARGET_PPC64) 4642 #if !defined(CONFIG_USER_ONLY) 4643 static void gen_scv(DisasContext *ctx) 4644 { 4645 uint32_t lev = (ctx->opcode >> 5) & 0x7F; 4646 4647 /* Set the PC back to the faulting instruction. */ 4648 gen_update_nip(ctx, ctx->cia); 4649 gen_helper_scv(cpu_env, tcg_constant_i32(lev)); 4650 4651 ctx->base.is_jmp = DISAS_NORETURN; 4652 } 4653 #endif 4654 #endif 4655 4656 /*** Trap ***/ 4657 4658 /* Check for unconditional traps (always or never) */ 4659 static bool check_unconditional_trap(DisasContext *ctx) 4660 { 4661 /* Trap never */ 4662 if (TO(ctx->opcode) == 0) { 4663 return true; 4664 } 4665 /* Trap always */ 4666 if (TO(ctx->opcode) == 31) { 4667 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 4668 return true; 4669 } 4670 return false; 4671 } 4672 4673 /* tw */ 4674 static void gen_tw(DisasContext *ctx) 4675 { 4676 TCGv_i32 t0; 4677 4678 if (check_unconditional_trap(ctx)) { 4679 return; 4680 } 4681 t0 = tcg_const_i32(TO(ctx->opcode)); 4682 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4683 t0); 4684 tcg_temp_free_i32(t0); 4685 } 4686 4687 /* twi */ 4688 static void gen_twi(DisasContext *ctx) 4689 { 4690 TCGv t0; 4691 TCGv_i32 t1; 4692 4693 if (check_unconditional_trap(ctx)) { 4694 return; 4695 } 4696 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4697 t1 = tcg_const_i32(TO(ctx->opcode)); 4698 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4699 tcg_temp_free(t0); 4700 tcg_temp_free_i32(t1); 4701 } 4702 4703 #if defined(TARGET_PPC64) 4704 /* td */ 4705 static void gen_td(DisasContext *ctx) 4706 { 4707 TCGv_i32 t0; 4708 4709 if (check_unconditional_trap(ctx)) { 4710 return; 4711 } 4712 t0 = tcg_const_i32(TO(ctx->opcode)); 4713 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4714 t0); 4715 tcg_temp_free_i32(t0); 4716 } 4717 4718 /* tdi */ 4719 static void gen_tdi(DisasContext *ctx) 4720 { 4721 TCGv t0; 4722 TCGv_i32 t1; 4723 4724 if (check_unconditional_trap(ctx)) { 4725 return; 4726 } 4727 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4728 t1 = tcg_const_i32(TO(ctx->opcode)); 4729 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4730 tcg_temp_free(t0); 4731 tcg_temp_free_i32(t1); 4732 } 4733 #endif 4734 4735 /*** Processor control ***/ 4736 4737 /* mcrxr */ 4738 static void gen_mcrxr(DisasContext *ctx) 4739 { 4740 TCGv_i32 t0 = tcg_temp_new_i32(); 4741 TCGv_i32 t1 = tcg_temp_new_i32(); 4742 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4743 4744 tcg_gen_trunc_tl_i32(t0, cpu_so); 4745 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4746 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4747 tcg_gen_shli_i32(t0, t0, 3); 4748 tcg_gen_shli_i32(t1, t1, 2); 4749 tcg_gen_shli_i32(dst, dst, 1); 4750 tcg_gen_or_i32(dst, dst, t0); 4751 tcg_gen_or_i32(dst, dst, t1); 4752 tcg_temp_free_i32(t0); 4753 tcg_temp_free_i32(t1); 4754 4755 tcg_gen_movi_tl(cpu_so, 0); 4756 tcg_gen_movi_tl(cpu_ov, 0); 4757 tcg_gen_movi_tl(cpu_ca, 0); 4758 } 4759 4760 #ifdef TARGET_PPC64 4761 /* mcrxrx */ 4762 static void gen_mcrxrx(DisasContext *ctx) 4763 { 4764 TCGv t0 = tcg_temp_new(); 4765 TCGv t1 = tcg_temp_new(); 4766 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4767 4768 /* copy OV and OV32 */ 4769 tcg_gen_shli_tl(t0, cpu_ov, 1); 4770 tcg_gen_or_tl(t0, t0, cpu_ov32); 4771 tcg_gen_shli_tl(t0, t0, 2); 4772 /* copy CA and CA32 */ 4773 tcg_gen_shli_tl(t1, cpu_ca, 1); 4774 tcg_gen_or_tl(t1, t1, cpu_ca32); 4775 tcg_gen_or_tl(t0, t0, t1); 4776 tcg_gen_trunc_tl_i32(dst, t0); 4777 tcg_temp_free(t0); 4778 tcg_temp_free(t1); 4779 } 4780 #endif 4781 4782 /* mfcr mfocrf */ 4783 static void gen_mfcr(DisasContext *ctx) 4784 { 4785 uint32_t crm, crn; 4786 4787 if (likely(ctx->opcode & 0x00100000)) { 4788 crm = CRM(ctx->opcode); 4789 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4790 crn = ctz32(crm); 4791 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4792 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4793 cpu_gpr[rD(ctx->opcode)], crn * 4); 4794 } 4795 } else { 4796 TCGv_i32 t0 = tcg_temp_new_i32(); 4797 tcg_gen_mov_i32(t0, cpu_crf[0]); 4798 tcg_gen_shli_i32(t0, t0, 4); 4799 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4800 tcg_gen_shli_i32(t0, t0, 4); 4801 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4802 tcg_gen_shli_i32(t0, t0, 4); 4803 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4804 tcg_gen_shli_i32(t0, t0, 4); 4805 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4806 tcg_gen_shli_i32(t0, t0, 4); 4807 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4808 tcg_gen_shli_i32(t0, t0, 4); 4809 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4810 tcg_gen_shli_i32(t0, t0, 4); 4811 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4812 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4813 tcg_temp_free_i32(t0); 4814 } 4815 } 4816 4817 /* mfmsr */ 4818 static void gen_mfmsr(DisasContext *ctx) 4819 { 4820 CHK_SV; 4821 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4822 } 4823 4824 /* mfspr */ 4825 static inline void gen_op_mfspr(DisasContext *ctx) 4826 { 4827 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4828 uint32_t sprn = SPR(ctx->opcode); 4829 4830 #if defined(CONFIG_USER_ONLY) 4831 read_cb = ctx->spr_cb[sprn].uea_read; 4832 #else 4833 if (ctx->pr) { 4834 read_cb = ctx->spr_cb[sprn].uea_read; 4835 } else if (ctx->hv) { 4836 read_cb = ctx->spr_cb[sprn].hea_read; 4837 } else { 4838 read_cb = ctx->spr_cb[sprn].oea_read; 4839 } 4840 #endif 4841 if (likely(read_cb != NULL)) { 4842 if (likely(read_cb != SPR_NOACCESS)) { 4843 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4844 } else { 4845 /* Privilege exception */ 4846 /* 4847 * This is a hack to avoid warnings when running Linux: 4848 * this OS breaks the PowerPC virtualisation model, 4849 * allowing userland application to read the PVR 4850 */ 4851 if (sprn != SPR_PVR) { 4852 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4853 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4854 ctx->cia); 4855 } 4856 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4857 } 4858 } else { 4859 /* ISA 2.07 defines these as no-ops */ 4860 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4861 (sprn >= 808 && sprn <= 811)) { 4862 /* This is a nop */ 4863 return; 4864 } 4865 /* Not defined */ 4866 qemu_log_mask(LOG_GUEST_ERROR, 4867 "Trying to read invalid spr %d (0x%03x) at " 4868 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 4869 4870 /* 4871 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 4872 * generate a priv, a hv emu or a no-op 4873 */ 4874 if (sprn & 0x10) { 4875 if (ctx->pr) { 4876 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4877 } 4878 } else { 4879 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4880 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4881 } 4882 } 4883 } 4884 } 4885 4886 static void gen_mfspr(DisasContext *ctx) 4887 { 4888 gen_op_mfspr(ctx); 4889 } 4890 4891 /* mftb */ 4892 static void gen_mftb(DisasContext *ctx) 4893 { 4894 gen_op_mfspr(ctx); 4895 } 4896 4897 /* mtcrf mtocrf*/ 4898 static void gen_mtcrf(DisasContext *ctx) 4899 { 4900 uint32_t crm, crn; 4901 4902 crm = CRM(ctx->opcode); 4903 if (likely((ctx->opcode & 0x00100000))) { 4904 if (crm && ((crm & (crm - 1)) == 0)) { 4905 TCGv_i32 temp = tcg_temp_new_i32(); 4906 crn = ctz32(crm); 4907 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4908 tcg_gen_shri_i32(temp, temp, crn * 4); 4909 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4910 tcg_temp_free_i32(temp); 4911 } 4912 } else { 4913 TCGv_i32 temp = tcg_temp_new_i32(); 4914 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4915 for (crn = 0 ; crn < 8 ; crn++) { 4916 if (crm & (1 << crn)) { 4917 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4918 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4919 } 4920 } 4921 tcg_temp_free_i32(temp); 4922 } 4923 } 4924 4925 /* mtmsr */ 4926 #if defined(TARGET_PPC64) 4927 static void gen_mtmsrd(DisasContext *ctx) 4928 { 4929 if (unlikely(!is_book3s_arch2x(ctx))) { 4930 gen_invalid(ctx); 4931 return; 4932 } 4933 4934 CHK_SV; 4935 4936 #if !defined(CONFIG_USER_ONLY) 4937 gen_icount_io_start(ctx); 4938 if (ctx->opcode & 0x00010000) { 4939 /* L=1 form only updates EE and RI */ 4940 TCGv t0 = tcg_temp_new(); 4941 TCGv t1 = tcg_temp_new(); 4942 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], 4943 (1 << MSR_RI) | (1 << MSR_EE)); 4944 tcg_gen_andi_tl(t1, cpu_msr, 4945 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4946 tcg_gen_or_tl(t1, t1, t0); 4947 4948 gen_helper_store_msr(cpu_env, t1); 4949 tcg_temp_free(t0); 4950 tcg_temp_free(t1); 4951 4952 } else { 4953 /* 4954 * XXX: we need to update nip before the store if we enter 4955 * power saving mode, we will exit the loop directly from 4956 * ppc_store_msr 4957 */ 4958 gen_update_nip(ctx, ctx->base.pc_next); 4959 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4960 } 4961 /* Must stop the translation as machine state (may have) changed */ 4962 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 4963 #endif /* !defined(CONFIG_USER_ONLY) */ 4964 } 4965 #endif /* defined(TARGET_PPC64) */ 4966 4967 static void gen_mtmsr(DisasContext *ctx) 4968 { 4969 CHK_SV; 4970 4971 #if !defined(CONFIG_USER_ONLY) 4972 gen_icount_io_start(ctx); 4973 if (ctx->opcode & 0x00010000) { 4974 /* L=1 form only updates EE and RI */ 4975 TCGv t0 = tcg_temp_new(); 4976 TCGv t1 = tcg_temp_new(); 4977 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], 4978 (1 << MSR_RI) | (1 << MSR_EE)); 4979 tcg_gen_andi_tl(t1, cpu_msr, 4980 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4981 tcg_gen_or_tl(t1, t1, t0); 4982 4983 gen_helper_store_msr(cpu_env, t1); 4984 tcg_temp_free(t0); 4985 tcg_temp_free(t1); 4986 4987 } else { 4988 TCGv msr = tcg_temp_new(); 4989 4990 /* 4991 * XXX: we need to update nip before the store if we enter 4992 * power saving mode, we will exit the loop directly from 4993 * ppc_store_msr 4994 */ 4995 gen_update_nip(ctx, ctx->base.pc_next); 4996 #if defined(TARGET_PPC64) 4997 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4998 #else 4999 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 5000 #endif 5001 gen_helper_store_msr(cpu_env, msr); 5002 tcg_temp_free(msr); 5003 } 5004 /* Must stop the translation as machine state (may have) changed */ 5005 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 5006 #endif 5007 } 5008 5009 /* mtspr */ 5010 static void gen_mtspr(DisasContext *ctx) 5011 { 5012 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 5013 uint32_t sprn = SPR(ctx->opcode); 5014 5015 #if defined(CONFIG_USER_ONLY) 5016 write_cb = ctx->spr_cb[sprn].uea_write; 5017 #else 5018 if (ctx->pr) { 5019 write_cb = ctx->spr_cb[sprn].uea_write; 5020 } else if (ctx->hv) { 5021 write_cb = ctx->spr_cb[sprn].hea_write; 5022 } else { 5023 write_cb = ctx->spr_cb[sprn].oea_write; 5024 } 5025 #endif 5026 if (likely(write_cb != NULL)) { 5027 if (likely(write_cb != SPR_NOACCESS)) { 5028 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 5029 } else { 5030 /* Privilege exception */ 5031 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 5032 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 5033 ctx->cia); 5034 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 5035 } 5036 } else { 5037 /* ISA 2.07 defines these as no-ops */ 5038 if ((ctx->insns_flags2 & PPC2_ISA207S) && 5039 (sprn >= 808 && sprn <= 811)) { 5040 /* This is a nop */ 5041 return; 5042 } 5043 5044 /* Not defined */ 5045 qemu_log_mask(LOG_GUEST_ERROR, 5046 "Trying to write invalid spr %d (0x%03x) at " 5047 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 5048 5049 5050 /* 5051 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 5052 * generate a priv, a hv emu or a no-op 5053 */ 5054 if (sprn & 0x10) { 5055 if (ctx->pr) { 5056 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5057 } 5058 } else { 5059 if (ctx->pr || sprn == 0) { 5060 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5061 } 5062 } 5063 } 5064 } 5065 5066 #if defined(TARGET_PPC64) 5067 /* setb */ 5068 static void gen_setb(DisasContext *ctx) 5069 { 5070 TCGv_i32 t0 = tcg_temp_new_i32(); 5071 TCGv_i32 t8 = tcg_temp_new_i32(); 5072 TCGv_i32 tm1 = tcg_temp_new_i32(); 5073 int crf = crfS(ctx->opcode); 5074 5075 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 5076 tcg_gen_movi_i32(t8, 8); 5077 tcg_gen_movi_i32(tm1, -1); 5078 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 5079 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 5080 5081 tcg_temp_free_i32(t0); 5082 tcg_temp_free_i32(t8); 5083 tcg_temp_free_i32(tm1); 5084 } 5085 #endif 5086 5087 /*** Cache management ***/ 5088 5089 /* dcbf */ 5090 static void gen_dcbf(DisasContext *ctx) 5091 { 5092 /* XXX: specification says this is treated as a load by the MMU */ 5093 TCGv t0; 5094 gen_set_access_type(ctx, ACCESS_CACHE); 5095 t0 = tcg_temp_new(); 5096 gen_addr_reg_index(ctx, t0); 5097 gen_qemu_ld8u(ctx, t0, t0); 5098 tcg_temp_free(t0); 5099 } 5100 5101 /* dcbfep (external PID dcbf) */ 5102 static void gen_dcbfep(DisasContext *ctx) 5103 { 5104 /* XXX: specification says this is treated as a load by the MMU */ 5105 TCGv t0; 5106 CHK_SV; 5107 gen_set_access_type(ctx, ACCESS_CACHE); 5108 t0 = tcg_temp_new(); 5109 gen_addr_reg_index(ctx, t0); 5110 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5111 tcg_temp_free(t0); 5112 } 5113 5114 /* dcbi (Supervisor only) */ 5115 static void gen_dcbi(DisasContext *ctx) 5116 { 5117 #if defined(CONFIG_USER_ONLY) 5118 GEN_PRIV; 5119 #else 5120 TCGv EA, val; 5121 5122 CHK_SV; 5123 EA = tcg_temp_new(); 5124 gen_set_access_type(ctx, ACCESS_CACHE); 5125 gen_addr_reg_index(ctx, EA); 5126 val = tcg_temp_new(); 5127 /* XXX: specification says this should be treated as a store by the MMU */ 5128 gen_qemu_ld8u(ctx, val, EA); 5129 gen_qemu_st8(ctx, val, EA); 5130 tcg_temp_free(val); 5131 tcg_temp_free(EA); 5132 #endif /* defined(CONFIG_USER_ONLY) */ 5133 } 5134 5135 /* dcdst */ 5136 static void gen_dcbst(DisasContext *ctx) 5137 { 5138 /* XXX: specification say this is treated as a load by the MMU */ 5139 TCGv t0; 5140 gen_set_access_type(ctx, ACCESS_CACHE); 5141 t0 = tcg_temp_new(); 5142 gen_addr_reg_index(ctx, t0); 5143 gen_qemu_ld8u(ctx, t0, t0); 5144 tcg_temp_free(t0); 5145 } 5146 5147 /* dcbstep (dcbstep External PID version) */ 5148 static void gen_dcbstep(DisasContext *ctx) 5149 { 5150 /* XXX: specification say this is treated as a load by the MMU */ 5151 TCGv t0; 5152 gen_set_access_type(ctx, ACCESS_CACHE); 5153 t0 = tcg_temp_new(); 5154 gen_addr_reg_index(ctx, t0); 5155 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5156 tcg_temp_free(t0); 5157 } 5158 5159 /* dcbt */ 5160 static void gen_dcbt(DisasContext *ctx) 5161 { 5162 /* 5163 * interpreted as no-op 5164 * XXX: specification say this is treated as a load by the MMU but 5165 * does not generate any exception 5166 */ 5167 } 5168 5169 /* dcbtep */ 5170 static void gen_dcbtep(DisasContext *ctx) 5171 { 5172 /* 5173 * interpreted as no-op 5174 * XXX: specification say this is treated as a load by the MMU but 5175 * does not generate any exception 5176 */ 5177 } 5178 5179 /* dcbtst */ 5180 static void gen_dcbtst(DisasContext *ctx) 5181 { 5182 /* 5183 * interpreted as no-op 5184 * XXX: specification say this is treated as a load by the MMU but 5185 * does not generate any exception 5186 */ 5187 } 5188 5189 /* dcbtstep */ 5190 static void gen_dcbtstep(DisasContext *ctx) 5191 { 5192 /* 5193 * interpreted as no-op 5194 * XXX: specification say this is treated as a load by the MMU but 5195 * does not generate any exception 5196 */ 5197 } 5198 5199 /* dcbtls */ 5200 static void gen_dcbtls(DisasContext *ctx) 5201 { 5202 /* Always fails locking the cache */ 5203 TCGv t0 = tcg_temp_new(); 5204 gen_load_spr(t0, SPR_Exxx_L1CSR0); 5205 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 5206 gen_store_spr(SPR_Exxx_L1CSR0, t0); 5207 tcg_temp_free(t0); 5208 } 5209 5210 /* dcbz */ 5211 static void gen_dcbz(DisasContext *ctx) 5212 { 5213 TCGv tcgv_addr; 5214 TCGv_i32 tcgv_op; 5215 5216 gen_set_access_type(ctx, ACCESS_CACHE); 5217 tcgv_addr = tcg_temp_new(); 5218 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5219 gen_addr_reg_index(ctx, tcgv_addr); 5220 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 5221 tcg_temp_free(tcgv_addr); 5222 tcg_temp_free_i32(tcgv_op); 5223 } 5224 5225 /* dcbzep */ 5226 static void gen_dcbzep(DisasContext *ctx) 5227 { 5228 TCGv tcgv_addr; 5229 TCGv_i32 tcgv_op; 5230 5231 gen_set_access_type(ctx, ACCESS_CACHE); 5232 tcgv_addr = tcg_temp_new(); 5233 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5234 gen_addr_reg_index(ctx, tcgv_addr); 5235 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 5236 tcg_temp_free(tcgv_addr); 5237 tcg_temp_free_i32(tcgv_op); 5238 } 5239 5240 /* dst / dstt */ 5241 static void gen_dst(DisasContext *ctx) 5242 { 5243 if (rA(ctx->opcode) == 0) { 5244 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5245 } else { 5246 /* interpreted as no-op */ 5247 } 5248 } 5249 5250 /* dstst /dststt */ 5251 static void gen_dstst(DisasContext *ctx) 5252 { 5253 if (rA(ctx->opcode) == 0) { 5254 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5255 } else { 5256 /* interpreted as no-op */ 5257 } 5258 5259 } 5260 5261 /* dss / dssall */ 5262 static void gen_dss(DisasContext *ctx) 5263 { 5264 /* interpreted as no-op */ 5265 } 5266 5267 /* icbi */ 5268 static void gen_icbi(DisasContext *ctx) 5269 { 5270 TCGv t0; 5271 gen_set_access_type(ctx, ACCESS_CACHE); 5272 t0 = tcg_temp_new(); 5273 gen_addr_reg_index(ctx, t0); 5274 gen_helper_icbi(cpu_env, t0); 5275 tcg_temp_free(t0); 5276 } 5277 5278 /* icbiep */ 5279 static void gen_icbiep(DisasContext *ctx) 5280 { 5281 TCGv t0; 5282 gen_set_access_type(ctx, ACCESS_CACHE); 5283 t0 = tcg_temp_new(); 5284 gen_addr_reg_index(ctx, t0); 5285 gen_helper_icbiep(cpu_env, t0); 5286 tcg_temp_free(t0); 5287 } 5288 5289 /* Optional: */ 5290 /* dcba */ 5291 static void gen_dcba(DisasContext *ctx) 5292 { 5293 /* 5294 * interpreted as no-op 5295 * XXX: specification say this is treated as a store by the MMU 5296 * but does not generate any exception 5297 */ 5298 } 5299 5300 /*** Segment register manipulation ***/ 5301 /* Supervisor only: */ 5302 5303 /* mfsr */ 5304 static void gen_mfsr(DisasContext *ctx) 5305 { 5306 #if defined(CONFIG_USER_ONLY) 5307 GEN_PRIV; 5308 #else 5309 TCGv t0; 5310 5311 CHK_SV; 5312 t0 = tcg_const_tl(SR(ctx->opcode)); 5313 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5314 tcg_temp_free(t0); 5315 #endif /* defined(CONFIG_USER_ONLY) */ 5316 } 5317 5318 /* mfsrin */ 5319 static void gen_mfsrin(DisasContext *ctx) 5320 { 5321 #if defined(CONFIG_USER_ONLY) 5322 GEN_PRIV; 5323 #else 5324 TCGv t0; 5325 5326 CHK_SV; 5327 t0 = tcg_temp_new(); 5328 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5329 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5330 tcg_temp_free(t0); 5331 #endif /* defined(CONFIG_USER_ONLY) */ 5332 } 5333 5334 /* mtsr */ 5335 static void gen_mtsr(DisasContext *ctx) 5336 { 5337 #if defined(CONFIG_USER_ONLY) 5338 GEN_PRIV; 5339 #else 5340 TCGv t0; 5341 5342 CHK_SV; 5343 t0 = tcg_const_tl(SR(ctx->opcode)); 5344 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5345 tcg_temp_free(t0); 5346 #endif /* defined(CONFIG_USER_ONLY) */ 5347 } 5348 5349 /* mtsrin */ 5350 static void gen_mtsrin(DisasContext *ctx) 5351 { 5352 #if defined(CONFIG_USER_ONLY) 5353 GEN_PRIV; 5354 #else 5355 TCGv t0; 5356 CHK_SV; 5357 5358 t0 = tcg_temp_new(); 5359 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5360 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 5361 tcg_temp_free(t0); 5362 #endif /* defined(CONFIG_USER_ONLY) */ 5363 } 5364 5365 #if defined(TARGET_PPC64) 5366 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 5367 5368 /* mfsr */ 5369 static void gen_mfsr_64b(DisasContext *ctx) 5370 { 5371 #if defined(CONFIG_USER_ONLY) 5372 GEN_PRIV; 5373 #else 5374 TCGv t0; 5375 5376 CHK_SV; 5377 t0 = tcg_const_tl(SR(ctx->opcode)); 5378 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5379 tcg_temp_free(t0); 5380 #endif /* defined(CONFIG_USER_ONLY) */ 5381 } 5382 5383 /* mfsrin */ 5384 static void gen_mfsrin_64b(DisasContext *ctx) 5385 { 5386 #if defined(CONFIG_USER_ONLY) 5387 GEN_PRIV; 5388 #else 5389 TCGv t0; 5390 5391 CHK_SV; 5392 t0 = tcg_temp_new(); 5393 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5394 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5395 tcg_temp_free(t0); 5396 #endif /* defined(CONFIG_USER_ONLY) */ 5397 } 5398 5399 /* mtsr */ 5400 static void gen_mtsr_64b(DisasContext *ctx) 5401 { 5402 #if defined(CONFIG_USER_ONLY) 5403 GEN_PRIV; 5404 #else 5405 TCGv t0; 5406 5407 CHK_SV; 5408 t0 = tcg_const_tl(SR(ctx->opcode)); 5409 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5410 tcg_temp_free(t0); 5411 #endif /* defined(CONFIG_USER_ONLY) */ 5412 } 5413 5414 /* mtsrin */ 5415 static void gen_mtsrin_64b(DisasContext *ctx) 5416 { 5417 #if defined(CONFIG_USER_ONLY) 5418 GEN_PRIV; 5419 #else 5420 TCGv t0; 5421 5422 CHK_SV; 5423 t0 = tcg_temp_new(); 5424 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5425 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5426 tcg_temp_free(t0); 5427 #endif /* defined(CONFIG_USER_ONLY) */ 5428 } 5429 5430 /* slbmte */ 5431 static void gen_slbmte(DisasContext *ctx) 5432 { 5433 #if defined(CONFIG_USER_ONLY) 5434 GEN_PRIV; 5435 #else 5436 CHK_SV; 5437 5438 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 5439 cpu_gpr[rS(ctx->opcode)]); 5440 #endif /* defined(CONFIG_USER_ONLY) */ 5441 } 5442 5443 static void gen_slbmfee(DisasContext *ctx) 5444 { 5445 #if defined(CONFIG_USER_ONLY) 5446 GEN_PRIV; 5447 #else 5448 CHK_SV; 5449 5450 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5451 cpu_gpr[rB(ctx->opcode)]); 5452 #endif /* defined(CONFIG_USER_ONLY) */ 5453 } 5454 5455 static void gen_slbmfev(DisasContext *ctx) 5456 { 5457 #if defined(CONFIG_USER_ONLY) 5458 GEN_PRIV; 5459 #else 5460 CHK_SV; 5461 5462 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5463 cpu_gpr[rB(ctx->opcode)]); 5464 #endif /* defined(CONFIG_USER_ONLY) */ 5465 } 5466 5467 static void gen_slbfee_(DisasContext *ctx) 5468 { 5469 #if defined(CONFIG_USER_ONLY) 5470 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5471 #else 5472 TCGLabel *l1, *l2; 5473 5474 if (unlikely(ctx->pr)) { 5475 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5476 return; 5477 } 5478 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5479 cpu_gpr[rB(ctx->opcode)]); 5480 l1 = gen_new_label(); 5481 l2 = gen_new_label(); 5482 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5483 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 5484 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 5485 tcg_gen_br(l2); 5486 gen_set_label(l1); 5487 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 5488 gen_set_label(l2); 5489 #endif 5490 } 5491 #endif /* defined(TARGET_PPC64) */ 5492 5493 /*** Lookaside buffer management ***/ 5494 /* Optional & supervisor only: */ 5495 5496 /* tlbia */ 5497 static void gen_tlbia(DisasContext *ctx) 5498 { 5499 #if defined(CONFIG_USER_ONLY) 5500 GEN_PRIV; 5501 #else 5502 CHK_HV; 5503 5504 gen_helper_tlbia(cpu_env); 5505 #endif /* defined(CONFIG_USER_ONLY) */ 5506 } 5507 5508 /* tlbiel */ 5509 static void gen_tlbiel(DisasContext *ctx) 5510 { 5511 #if defined(CONFIG_USER_ONLY) 5512 GEN_PRIV; 5513 #else 5514 bool psr = (ctx->opcode >> 17) & 0x1; 5515 5516 if (ctx->pr || (!ctx->hv && !psr && ctx->hr)) { 5517 /* 5518 * tlbiel is privileged except when PSR=0 and HR=1, making it 5519 * hypervisor privileged. 5520 */ 5521 GEN_PRIV; 5522 } 5523 5524 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5525 #endif /* defined(CONFIG_USER_ONLY) */ 5526 } 5527 5528 /* tlbie */ 5529 static void gen_tlbie(DisasContext *ctx) 5530 { 5531 #if defined(CONFIG_USER_ONLY) 5532 GEN_PRIV; 5533 #else 5534 bool psr = (ctx->opcode >> 17) & 0x1; 5535 TCGv_i32 t1; 5536 5537 if (ctx->pr) { 5538 /* tlbie is privileged... */ 5539 GEN_PRIV; 5540 } else if (!ctx->hv) { 5541 if (!ctx->gtse || (!psr && ctx->hr)) { 5542 /* 5543 * ... except when GTSE=0 or when PSR=0 and HR=1, making it 5544 * hypervisor privileged. 5545 */ 5546 GEN_PRIV; 5547 } 5548 } 5549 5550 if (NARROW_MODE(ctx)) { 5551 TCGv t0 = tcg_temp_new(); 5552 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 5553 gen_helper_tlbie(cpu_env, t0); 5554 tcg_temp_free(t0); 5555 } else { 5556 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5557 } 5558 t1 = tcg_temp_new_i32(); 5559 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5560 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 5561 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5562 tcg_temp_free_i32(t1); 5563 #endif /* defined(CONFIG_USER_ONLY) */ 5564 } 5565 5566 /* tlbsync */ 5567 static void gen_tlbsync(DisasContext *ctx) 5568 { 5569 #if defined(CONFIG_USER_ONLY) 5570 GEN_PRIV; 5571 #else 5572 5573 if (ctx->gtse) { 5574 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 5575 } else { 5576 CHK_HV; /* Else hypervisor privileged */ 5577 } 5578 5579 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 5580 if (ctx->insns_flags & PPC_BOOKE) { 5581 gen_check_tlb_flush(ctx, true); 5582 } 5583 #endif /* defined(CONFIG_USER_ONLY) */ 5584 } 5585 5586 #if defined(TARGET_PPC64) 5587 /* slbia */ 5588 static void gen_slbia(DisasContext *ctx) 5589 { 5590 #if defined(CONFIG_USER_ONLY) 5591 GEN_PRIV; 5592 #else 5593 uint32_t ih = (ctx->opcode >> 21) & 0x7; 5594 TCGv_i32 t0 = tcg_const_i32(ih); 5595 5596 CHK_SV; 5597 5598 gen_helper_slbia(cpu_env, t0); 5599 tcg_temp_free_i32(t0); 5600 #endif /* defined(CONFIG_USER_ONLY) */ 5601 } 5602 5603 /* slbie */ 5604 static void gen_slbie(DisasContext *ctx) 5605 { 5606 #if defined(CONFIG_USER_ONLY) 5607 GEN_PRIV; 5608 #else 5609 CHK_SV; 5610 5611 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5612 #endif /* defined(CONFIG_USER_ONLY) */ 5613 } 5614 5615 /* slbieg */ 5616 static void gen_slbieg(DisasContext *ctx) 5617 { 5618 #if defined(CONFIG_USER_ONLY) 5619 GEN_PRIV; 5620 #else 5621 CHK_SV; 5622 5623 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5624 #endif /* defined(CONFIG_USER_ONLY) */ 5625 } 5626 5627 /* slbsync */ 5628 static void gen_slbsync(DisasContext *ctx) 5629 { 5630 #if defined(CONFIG_USER_ONLY) 5631 GEN_PRIV; 5632 #else 5633 CHK_SV; 5634 gen_check_tlb_flush(ctx, true); 5635 #endif /* defined(CONFIG_USER_ONLY) */ 5636 } 5637 5638 #endif /* defined(TARGET_PPC64) */ 5639 5640 /*** External control ***/ 5641 /* Optional: */ 5642 5643 /* eciwx */ 5644 static void gen_eciwx(DisasContext *ctx) 5645 { 5646 TCGv t0; 5647 /* Should check EAR[E] ! */ 5648 gen_set_access_type(ctx, ACCESS_EXT); 5649 t0 = tcg_temp_new(); 5650 gen_addr_reg_index(ctx, t0); 5651 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5652 DEF_MEMOP(MO_UL | MO_ALIGN)); 5653 tcg_temp_free(t0); 5654 } 5655 5656 /* ecowx */ 5657 static void gen_ecowx(DisasContext *ctx) 5658 { 5659 TCGv t0; 5660 /* Should check EAR[E] ! */ 5661 gen_set_access_type(ctx, ACCESS_EXT); 5662 t0 = tcg_temp_new(); 5663 gen_addr_reg_index(ctx, t0); 5664 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5665 DEF_MEMOP(MO_UL | MO_ALIGN)); 5666 tcg_temp_free(t0); 5667 } 5668 5669 /* PowerPC 601 specific instructions */ 5670 5671 /* abs - abs. */ 5672 static void gen_abs(DisasContext *ctx) 5673 { 5674 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5675 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5676 5677 tcg_gen_abs_tl(d, a); 5678 if (unlikely(Rc(ctx->opcode) != 0)) { 5679 gen_set_Rc0(ctx, d); 5680 } 5681 } 5682 5683 /* abso - abso. */ 5684 static void gen_abso(DisasContext *ctx) 5685 { 5686 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5687 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5688 5689 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000); 5690 tcg_gen_abs_tl(d, a); 5691 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 5692 if (unlikely(Rc(ctx->opcode) != 0)) { 5693 gen_set_Rc0(ctx, d); 5694 } 5695 } 5696 5697 /* clcs */ 5698 static void gen_clcs(DisasContext *ctx) 5699 { 5700 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5701 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5702 tcg_temp_free_i32(t0); 5703 /* Rc=1 sets CR0 to an undefined state */ 5704 } 5705 5706 /* div - div. */ 5707 static void gen_div(DisasContext *ctx) 5708 { 5709 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5710 cpu_gpr[rB(ctx->opcode)]); 5711 if (unlikely(Rc(ctx->opcode) != 0)) { 5712 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5713 } 5714 } 5715 5716 /* divo - divo. */ 5717 static void gen_divo(DisasContext *ctx) 5718 { 5719 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5720 cpu_gpr[rB(ctx->opcode)]); 5721 if (unlikely(Rc(ctx->opcode) != 0)) { 5722 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5723 } 5724 } 5725 5726 /* divs - divs. */ 5727 static void gen_divs(DisasContext *ctx) 5728 { 5729 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5730 cpu_gpr[rB(ctx->opcode)]); 5731 if (unlikely(Rc(ctx->opcode) != 0)) { 5732 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5733 } 5734 } 5735 5736 /* divso - divso. */ 5737 static void gen_divso(DisasContext *ctx) 5738 { 5739 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5740 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5741 if (unlikely(Rc(ctx->opcode) != 0)) { 5742 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5743 } 5744 } 5745 5746 /* doz - doz. */ 5747 static void gen_doz(DisasContext *ctx) 5748 { 5749 TCGLabel *l1 = gen_new_label(); 5750 TCGLabel *l2 = gen_new_label(); 5751 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5752 cpu_gpr[rA(ctx->opcode)], l1); 5753 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 5754 cpu_gpr[rA(ctx->opcode)]); 5755 tcg_gen_br(l2); 5756 gen_set_label(l1); 5757 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5758 gen_set_label(l2); 5759 if (unlikely(Rc(ctx->opcode) != 0)) { 5760 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5761 } 5762 } 5763 5764 /* dozo - dozo. */ 5765 static void gen_dozo(DisasContext *ctx) 5766 { 5767 TCGLabel *l1 = gen_new_label(); 5768 TCGLabel *l2 = gen_new_label(); 5769 TCGv t0 = tcg_temp_new(); 5770 TCGv t1 = tcg_temp_new(); 5771 TCGv t2 = tcg_temp_new(); 5772 /* Start with XER OV disabled, the most likely case */ 5773 tcg_gen_movi_tl(cpu_ov, 0); 5774 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5775 cpu_gpr[rA(ctx->opcode)], l1); 5776 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5777 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5778 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5779 tcg_gen_andc_tl(t1, t1, t2); 5780 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5781 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5782 tcg_gen_movi_tl(cpu_ov, 1); 5783 tcg_gen_movi_tl(cpu_so, 1); 5784 tcg_gen_br(l2); 5785 gen_set_label(l1); 5786 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5787 gen_set_label(l2); 5788 tcg_temp_free(t0); 5789 tcg_temp_free(t1); 5790 tcg_temp_free(t2); 5791 if (unlikely(Rc(ctx->opcode) != 0)) { 5792 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5793 } 5794 } 5795 5796 /* dozi */ 5797 static void gen_dozi(DisasContext *ctx) 5798 { 5799 target_long simm = SIMM(ctx->opcode); 5800 TCGLabel *l1 = gen_new_label(); 5801 TCGLabel *l2 = gen_new_label(); 5802 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5803 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5804 tcg_gen_br(l2); 5805 gen_set_label(l1); 5806 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5807 gen_set_label(l2); 5808 if (unlikely(Rc(ctx->opcode) != 0)) { 5809 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5810 } 5811 } 5812 5813 /* lscbx - lscbx. */ 5814 static void gen_lscbx(DisasContext *ctx) 5815 { 5816 TCGv t0 = tcg_temp_new(); 5817 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5818 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5819 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5820 5821 gen_addr_reg_index(ctx, t0); 5822 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5823 tcg_temp_free_i32(t1); 5824 tcg_temp_free_i32(t2); 5825 tcg_temp_free_i32(t3); 5826 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5827 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5828 if (unlikely(Rc(ctx->opcode) != 0)) { 5829 gen_set_Rc0(ctx, t0); 5830 } 5831 tcg_temp_free(t0); 5832 } 5833 5834 /* maskg - maskg. */ 5835 static void gen_maskg(DisasContext *ctx) 5836 { 5837 TCGLabel *l1 = gen_new_label(); 5838 TCGv t0 = tcg_temp_new(); 5839 TCGv t1 = tcg_temp_new(); 5840 TCGv t2 = tcg_temp_new(); 5841 TCGv t3 = tcg_temp_new(); 5842 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5843 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5844 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5845 tcg_gen_addi_tl(t2, t0, 1); 5846 tcg_gen_shr_tl(t2, t3, t2); 5847 tcg_gen_shr_tl(t3, t3, t1); 5848 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5849 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5850 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5851 gen_set_label(l1); 5852 tcg_temp_free(t0); 5853 tcg_temp_free(t1); 5854 tcg_temp_free(t2); 5855 tcg_temp_free(t3); 5856 if (unlikely(Rc(ctx->opcode) != 0)) { 5857 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5858 } 5859 } 5860 5861 /* maskir - maskir. */ 5862 static void gen_maskir(DisasContext *ctx) 5863 { 5864 TCGv t0 = tcg_temp_new(); 5865 TCGv t1 = tcg_temp_new(); 5866 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5867 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5868 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5869 tcg_temp_free(t0); 5870 tcg_temp_free(t1); 5871 if (unlikely(Rc(ctx->opcode) != 0)) { 5872 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5873 } 5874 } 5875 5876 /* mul - mul. */ 5877 static void gen_mul(DisasContext *ctx) 5878 { 5879 TCGv_i64 t0 = tcg_temp_new_i64(); 5880 TCGv_i64 t1 = tcg_temp_new_i64(); 5881 TCGv t2 = tcg_temp_new(); 5882 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5883 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5884 tcg_gen_mul_i64(t0, t0, t1); 5885 tcg_gen_trunc_i64_tl(t2, t0); 5886 gen_store_spr(SPR_MQ, t2); 5887 tcg_gen_shri_i64(t1, t0, 32); 5888 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5889 tcg_temp_free_i64(t0); 5890 tcg_temp_free_i64(t1); 5891 tcg_temp_free(t2); 5892 if (unlikely(Rc(ctx->opcode) != 0)) { 5893 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5894 } 5895 } 5896 5897 /* mulo - mulo. */ 5898 static void gen_mulo(DisasContext *ctx) 5899 { 5900 TCGLabel *l1 = gen_new_label(); 5901 TCGv_i64 t0 = tcg_temp_new_i64(); 5902 TCGv_i64 t1 = tcg_temp_new_i64(); 5903 TCGv t2 = tcg_temp_new(); 5904 /* Start with XER OV disabled, the most likely case */ 5905 tcg_gen_movi_tl(cpu_ov, 0); 5906 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5907 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5908 tcg_gen_mul_i64(t0, t0, t1); 5909 tcg_gen_trunc_i64_tl(t2, t0); 5910 gen_store_spr(SPR_MQ, t2); 5911 tcg_gen_shri_i64(t1, t0, 32); 5912 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5913 tcg_gen_ext32s_i64(t1, t0); 5914 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5915 tcg_gen_movi_tl(cpu_ov, 1); 5916 tcg_gen_movi_tl(cpu_so, 1); 5917 gen_set_label(l1); 5918 tcg_temp_free_i64(t0); 5919 tcg_temp_free_i64(t1); 5920 tcg_temp_free(t2); 5921 if (unlikely(Rc(ctx->opcode) != 0)) { 5922 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5923 } 5924 } 5925 5926 /* nabs - nabs. */ 5927 static void gen_nabs(DisasContext *ctx) 5928 { 5929 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5930 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5931 5932 tcg_gen_abs_tl(d, a); 5933 tcg_gen_neg_tl(d, d); 5934 if (unlikely(Rc(ctx->opcode) != 0)) { 5935 gen_set_Rc0(ctx, d); 5936 } 5937 } 5938 5939 /* nabso - nabso. */ 5940 static void gen_nabso(DisasContext *ctx) 5941 { 5942 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5943 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5944 5945 tcg_gen_abs_tl(d, a); 5946 tcg_gen_neg_tl(d, d); 5947 /* nabs never overflows */ 5948 tcg_gen_movi_tl(cpu_ov, 0); 5949 if (unlikely(Rc(ctx->opcode) != 0)) { 5950 gen_set_Rc0(ctx, d); 5951 } 5952 } 5953 5954 /* rlmi - rlmi. */ 5955 static void gen_rlmi(DisasContext *ctx) 5956 { 5957 uint32_t mb = MB(ctx->opcode); 5958 uint32_t me = ME(ctx->opcode); 5959 TCGv t0 = tcg_temp_new(); 5960 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5961 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5962 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5963 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 5964 ~MASK(mb, me)); 5965 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5966 tcg_temp_free(t0); 5967 if (unlikely(Rc(ctx->opcode) != 0)) { 5968 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5969 } 5970 } 5971 5972 /* rrib - rrib. */ 5973 static void gen_rrib(DisasContext *ctx) 5974 { 5975 TCGv t0 = tcg_temp_new(); 5976 TCGv t1 = tcg_temp_new(); 5977 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5978 tcg_gen_movi_tl(t1, 0x80000000); 5979 tcg_gen_shr_tl(t1, t1, t0); 5980 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5981 tcg_gen_and_tl(t0, t0, t1); 5982 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5983 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5984 tcg_temp_free(t0); 5985 tcg_temp_free(t1); 5986 if (unlikely(Rc(ctx->opcode) != 0)) { 5987 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5988 } 5989 } 5990 5991 /* sle - sle. */ 5992 static void gen_sle(DisasContext *ctx) 5993 { 5994 TCGv t0 = tcg_temp_new(); 5995 TCGv t1 = tcg_temp_new(); 5996 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5997 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5998 tcg_gen_subfi_tl(t1, 32, t1); 5999 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6000 tcg_gen_or_tl(t1, t0, t1); 6001 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6002 gen_store_spr(SPR_MQ, t1); 6003 tcg_temp_free(t0); 6004 tcg_temp_free(t1); 6005 if (unlikely(Rc(ctx->opcode) != 0)) { 6006 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6007 } 6008 } 6009 6010 /* sleq - sleq. */ 6011 static void gen_sleq(DisasContext *ctx) 6012 { 6013 TCGv t0 = tcg_temp_new(); 6014 TCGv t1 = tcg_temp_new(); 6015 TCGv t2 = tcg_temp_new(); 6016 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6017 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 6018 tcg_gen_shl_tl(t2, t2, t0); 6019 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6020 gen_load_spr(t1, SPR_MQ); 6021 gen_store_spr(SPR_MQ, t0); 6022 tcg_gen_and_tl(t0, t0, t2); 6023 tcg_gen_andc_tl(t1, t1, t2); 6024 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6025 tcg_temp_free(t0); 6026 tcg_temp_free(t1); 6027 tcg_temp_free(t2); 6028 if (unlikely(Rc(ctx->opcode) != 0)) { 6029 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6030 } 6031 } 6032 6033 /* sliq - sliq. */ 6034 static void gen_sliq(DisasContext *ctx) 6035 { 6036 int sh = SH(ctx->opcode); 6037 TCGv t0 = tcg_temp_new(); 6038 TCGv t1 = tcg_temp_new(); 6039 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6040 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6041 tcg_gen_or_tl(t1, t0, t1); 6042 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6043 gen_store_spr(SPR_MQ, t1); 6044 tcg_temp_free(t0); 6045 tcg_temp_free(t1); 6046 if (unlikely(Rc(ctx->opcode) != 0)) { 6047 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6048 } 6049 } 6050 6051 /* slliq - slliq. */ 6052 static void gen_slliq(DisasContext *ctx) 6053 { 6054 int sh = SH(ctx->opcode); 6055 TCGv t0 = tcg_temp_new(); 6056 TCGv t1 = tcg_temp_new(); 6057 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6058 gen_load_spr(t1, SPR_MQ); 6059 gen_store_spr(SPR_MQ, t0); 6060 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 6061 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 6062 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6063 tcg_temp_free(t0); 6064 tcg_temp_free(t1); 6065 if (unlikely(Rc(ctx->opcode) != 0)) { 6066 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6067 } 6068 } 6069 6070 /* sllq - sllq. */ 6071 static void gen_sllq(DisasContext *ctx) 6072 { 6073 TCGLabel *l1 = gen_new_label(); 6074 TCGLabel *l2 = gen_new_label(); 6075 TCGv t0 = tcg_temp_local_new(); 6076 TCGv t1 = tcg_temp_local_new(); 6077 TCGv t2 = tcg_temp_local_new(); 6078 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6079 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6080 tcg_gen_shl_tl(t1, t1, t2); 6081 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6082 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6083 gen_load_spr(t0, SPR_MQ); 6084 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6085 tcg_gen_br(l2); 6086 gen_set_label(l1); 6087 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6088 gen_load_spr(t2, SPR_MQ); 6089 tcg_gen_andc_tl(t1, t2, t1); 6090 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6091 gen_set_label(l2); 6092 tcg_temp_free(t0); 6093 tcg_temp_free(t1); 6094 tcg_temp_free(t2); 6095 if (unlikely(Rc(ctx->opcode) != 0)) { 6096 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6097 } 6098 } 6099 6100 /* slq - slq. */ 6101 static void gen_slq(DisasContext *ctx) 6102 { 6103 TCGLabel *l1 = gen_new_label(); 6104 TCGv t0 = tcg_temp_new(); 6105 TCGv t1 = tcg_temp_new(); 6106 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6107 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6108 tcg_gen_subfi_tl(t1, 32, t1); 6109 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6110 tcg_gen_or_tl(t1, t0, t1); 6111 gen_store_spr(SPR_MQ, t1); 6112 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6113 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6114 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6115 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6116 gen_set_label(l1); 6117 tcg_temp_free(t0); 6118 tcg_temp_free(t1); 6119 if (unlikely(Rc(ctx->opcode) != 0)) { 6120 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6121 } 6122 } 6123 6124 /* sraiq - sraiq. */ 6125 static void gen_sraiq(DisasContext *ctx) 6126 { 6127 int sh = SH(ctx->opcode); 6128 TCGLabel *l1 = gen_new_label(); 6129 TCGv t0 = tcg_temp_new(); 6130 TCGv t1 = tcg_temp_new(); 6131 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6132 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6133 tcg_gen_or_tl(t0, t0, t1); 6134 gen_store_spr(SPR_MQ, t0); 6135 tcg_gen_movi_tl(cpu_ca, 0); 6136 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6137 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 6138 tcg_gen_movi_tl(cpu_ca, 1); 6139 gen_set_label(l1); 6140 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 6141 tcg_temp_free(t0); 6142 tcg_temp_free(t1); 6143 if (unlikely(Rc(ctx->opcode) != 0)) { 6144 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6145 } 6146 } 6147 6148 /* sraq - sraq. */ 6149 static void gen_sraq(DisasContext *ctx) 6150 { 6151 TCGLabel *l1 = gen_new_label(); 6152 TCGLabel *l2 = gen_new_label(); 6153 TCGv t0 = tcg_temp_new(); 6154 TCGv t1 = tcg_temp_local_new(); 6155 TCGv t2 = tcg_temp_local_new(); 6156 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6157 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6158 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 6159 tcg_gen_subfi_tl(t2, 32, t2); 6160 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 6161 tcg_gen_or_tl(t0, t0, t2); 6162 gen_store_spr(SPR_MQ, t0); 6163 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6164 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 6165 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 6166 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 6167 gen_set_label(l1); 6168 tcg_temp_free(t0); 6169 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 6170 tcg_gen_movi_tl(cpu_ca, 0); 6171 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 6172 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 6173 tcg_gen_movi_tl(cpu_ca, 1); 6174 gen_set_label(l2); 6175 tcg_temp_free(t1); 6176 tcg_temp_free(t2); 6177 if (unlikely(Rc(ctx->opcode) != 0)) { 6178 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6179 } 6180 } 6181 6182 /* sre - sre. */ 6183 static void gen_sre(DisasContext *ctx) 6184 { 6185 TCGv t0 = tcg_temp_new(); 6186 TCGv t1 = tcg_temp_new(); 6187 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6188 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6189 tcg_gen_subfi_tl(t1, 32, t1); 6190 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6191 tcg_gen_or_tl(t1, t0, t1); 6192 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6193 gen_store_spr(SPR_MQ, t1); 6194 tcg_temp_free(t0); 6195 tcg_temp_free(t1); 6196 if (unlikely(Rc(ctx->opcode) != 0)) { 6197 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6198 } 6199 } 6200 6201 /* srea - srea. */ 6202 static void gen_srea(DisasContext *ctx) 6203 { 6204 TCGv t0 = tcg_temp_new(); 6205 TCGv t1 = tcg_temp_new(); 6206 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6207 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6208 gen_store_spr(SPR_MQ, t0); 6209 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 6210 tcg_temp_free(t0); 6211 tcg_temp_free(t1); 6212 if (unlikely(Rc(ctx->opcode) != 0)) { 6213 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6214 } 6215 } 6216 6217 /* sreq */ 6218 static void gen_sreq(DisasContext *ctx) 6219 { 6220 TCGv t0 = tcg_temp_new(); 6221 TCGv t1 = tcg_temp_new(); 6222 TCGv t2 = tcg_temp_new(); 6223 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6224 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6225 tcg_gen_shr_tl(t1, t1, t0); 6226 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6227 gen_load_spr(t2, SPR_MQ); 6228 gen_store_spr(SPR_MQ, t0); 6229 tcg_gen_and_tl(t0, t0, t1); 6230 tcg_gen_andc_tl(t2, t2, t1); 6231 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6232 tcg_temp_free(t0); 6233 tcg_temp_free(t1); 6234 tcg_temp_free(t2); 6235 if (unlikely(Rc(ctx->opcode) != 0)) { 6236 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6237 } 6238 } 6239 6240 /* sriq */ 6241 static void gen_sriq(DisasContext *ctx) 6242 { 6243 int sh = SH(ctx->opcode); 6244 TCGv t0 = tcg_temp_new(); 6245 TCGv t1 = tcg_temp_new(); 6246 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6247 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6248 tcg_gen_or_tl(t1, t0, t1); 6249 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6250 gen_store_spr(SPR_MQ, t1); 6251 tcg_temp_free(t0); 6252 tcg_temp_free(t1); 6253 if (unlikely(Rc(ctx->opcode) != 0)) { 6254 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6255 } 6256 } 6257 6258 /* srliq */ 6259 static void gen_srliq(DisasContext *ctx) 6260 { 6261 int sh = SH(ctx->opcode); 6262 TCGv t0 = tcg_temp_new(); 6263 TCGv t1 = tcg_temp_new(); 6264 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6265 gen_load_spr(t1, SPR_MQ); 6266 gen_store_spr(SPR_MQ, t0); 6267 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 6268 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 6269 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6270 tcg_temp_free(t0); 6271 tcg_temp_free(t1); 6272 if (unlikely(Rc(ctx->opcode) != 0)) { 6273 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6274 } 6275 } 6276 6277 /* srlq */ 6278 static void gen_srlq(DisasContext *ctx) 6279 { 6280 TCGLabel *l1 = gen_new_label(); 6281 TCGLabel *l2 = gen_new_label(); 6282 TCGv t0 = tcg_temp_local_new(); 6283 TCGv t1 = tcg_temp_local_new(); 6284 TCGv t2 = tcg_temp_local_new(); 6285 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6286 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6287 tcg_gen_shr_tl(t2, t1, t2); 6288 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6289 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6290 gen_load_spr(t0, SPR_MQ); 6291 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6292 tcg_gen_br(l2); 6293 gen_set_label(l1); 6294 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6295 tcg_gen_and_tl(t0, t0, t2); 6296 gen_load_spr(t1, SPR_MQ); 6297 tcg_gen_andc_tl(t1, t1, t2); 6298 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6299 gen_set_label(l2); 6300 tcg_temp_free(t0); 6301 tcg_temp_free(t1); 6302 tcg_temp_free(t2); 6303 if (unlikely(Rc(ctx->opcode) != 0)) { 6304 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6305 } 6306 } 6307 6308 /* srq */ 6309 static void gen_srq(DisasContext *ctx) 6310 { 6311 TCGLabel *l1 = gen_new_label(); 6312 TCGv t0 = tcg_temp_new(); 6313 TCGv t1 = tcg_temp_new(); 6314 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6315 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6316 tcg_gen_subfi_tl(t1, 32, t1); 6317 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6318 tcg_gen_or_tl(t1, t0, t1); 6319 gen_store_spr(SPR_MQ, t1); 6320 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6321 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6322 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6323 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6324 gen_set_label(l1); 6325 tcg_temp_free(t0); 6326 tcg_temp_free(t1); 6327 if (unlikely(Rc(ctx->opcode) != 0)) { 6328 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6329 } 6330 } 6331 6332 /* PowerPC 602 specific instructions */ 6333 6334 /* dsa */ 6335 static void gen_dsa(DisasContext *ctx) 6336 { 6337 /* XXX: TODO */ 6338 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6339 } 6340 6341 /* esa */ 6342 static void gen_esa(DisasContext *ctx) 6343 { 6344 /* XXX: TODO */ 6345 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6346 } 6347 6348 /* mfrom */ 6349 static void gen_mfrom(DisasContext *ctx) 6350 { 6351 #if defined(CONFIG_USER_ONLY) 6352 GEN_PRIV; 6353 #else 6354 CHK_SV; 6355 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 6356 #endif /* defined(CONFIG_USER_ONLY) */ 6357 } 6358 6359 /* 602 - 603 - G2 TLB management */ 6360 6361 /* tlbld */ 6362 static void gen_tlbld_6xx(DisasContext *ctx) 6363 { 6364 #if defined(CONFIG_USER_ONLY) 6365 GEN_PRIV; 6366 #else 6367 CHK_SV; 6368 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6369 #endif /* defined(CONFIG_USER_ONLY) */ 6370 } 6371 6372 /* tlbli */ 6373 static void gen_tlbli_6xx(DisasContext *ctx) 6374 { 6375 #if defined(CONFIG_USER_ONLY) 6376 GEN_PRIV; 6377 #else 6378 CHK_SV; 6379 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6380 #endif /* defined(CONFIG_USER_ONLY) */ 6381 } 6382 6383 /* 74xx TLB management */ 6384 6385 /* tlbld */ 6386 static void gen_tlbld_74xx(DisasContext *ctx) 6387 { 6388 #if defined(CONFIG_USER_ONLY) 6389 GEN_PRIV; 6390 #else 6391 CHK_SV; 6392 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6393 #endif /* defined(CONFIG_USER_ONLY) */ 6394 } 6395 6396 /* tlbli */ 6397 static void gen_tlbli_74xx(DisasContext *ctx) 6398 { 6399 #if defined(CONFIG_USER_ONLY) 6400 GEN_PRIV; 6401 #else 6402 CHK_SV; 6403 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6404 #endif /* defined(CONFIG_USER_ONLY) */ 6405 } 6406 6407 /* POWER instructions not in PowerPC 601 */ 6408 6409 /* clf */ 6410 static void gen_clf(DisasContext *ctx) 6411 { 6412 /* Cache line flush: implemented as no-op */ 6413 } 6414 6415 /* cli */ 6416 static void gen_cli(DisasContext *ctx) 6417 { 6418 #if defined(CONFIG_USER_ONLY) 6419 GEN_PRIV; 6420 #else 6421 /* Cache line invalidate: privileged and treated as no-op */ 6422 CHK_SV; 6423 #endif /* defined(CONFIG_USER_ONLY) */ 6424 } 6425 6426 /* dclst */ 6427 static void gen_dclst(DisasContext *ctx) 6428 { 6429 /* Data cache line store: treated as no-op */ 6430 } 6431 6432 static void gen_mfsri(DisasContext *ctx) 6433 { 6434 #if defined(CONFIG_USER_ONLY) 6435 GEN_PRIV; 6436 #else 6437 int ra = rA(ctx->opcode); 6438 int rd = rD(ctx->opcode); 6439 TCGv t0; 6440 6441 CHK_SV; 6442 t0 = tcg_temp_new(); 6443 gen_addr_reg_index(ctx, t0); 6444 tcg_gen_extract_tl(t0, t0, 28, 4); 6445 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 6446 tcg_temp_free(t0); 6447 if (ra != 0 && ra != rd) { 6448 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 6449 } 6450 #endif /* defined(CONFIG_USER_ONLY) */ 6451 } 6452 6453 static void gen_rac(DisasContext *ctx) 6454 { 6455 #if defined(CONFIG_USER_ONLY) 6456 GEN_PRIV; 6457 #else 6458 TCGv t0; 6459 6460 CHK_SV; 6461 t0 = tcg_temp_new(); 6462 gen_addr_reg_index(ctx, t0); 6463 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6464 tcg_temp_free(t0); 6465 #endif /* defined(CONFIG_USER_ONLY) */ 6466 } 6467 6468 static void gen_rfsvc(DisasContext *ctx) 6469 { 6470 #if defined(CONFIG_USER_ONLY) 6471 GEN_PRIV; 6472 #else 6473 CHK_SV; 6474 6475 gen_helper_rfsvc(cpu_env); 6476 ctx->base.is_jmp = DISAS_EXIT; 6477 #endif /* defined(CONFIG_USER_ONLY) */ 6478 } 6479 6480 /* svc is not implemented for now */ 6481 6482 /* BookE specific instructions */ 6483 6484 /* XXX: not implemented on 440 ? */ 6485 static void gen_mfapidi(DisasContext *ctx) 6486 { 6487 /* XXX: TODO */ 6488 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6489 } 6490 6491 /* XXX: not implemented on 440 ? */ 6492 static void gen_tlbiva(DisasContext *ctx) 6493 { 6494 #if defined(CONFIG_USER_ONLY) 6495 GEN_PRIV; 6496 #else 6497 TCGv t0; 6498 6499 CHK_SV; 6500 t0 = tcg_temp_new(); 6501 gen_addr_reg_index(ctx, t0); 6502 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6503 tcg_temp_free(t0); 6504 #endif /* defined(CONFIG_USER_ONLY) */ 6505 } 6506 6507 /* All 405 MAC instructions are translated here */ 6508 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 6509 int ra, int rb, int rt, int Rc) 6510 { 6511 TCGv t0, t1; 6512 6513 t0 = tcg_temp_local_new(); 6514 t1 = tcg_temp_local_new(); 6515 6516 switch (opc3 & 0x0D) { 6517 case 0x05: 6518 /* macchw - macchw. - macchwo - macchwo. */ 6519 /* macchws - macchws. - macchwso - macchwso. */ 6520 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 6521 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 6522 /* mulchw - mulchw. */ 6523 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6524 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6525 tcg_gen_ext16s_tl(t1, t1); 6526 break; 6527 case 0x04: 6528 /* macchwu - macchwu. - macchwuo - macchwuo. */ 6529 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 6530 /* mulchwu - mulchwu. */ 6531 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6532 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6533 tcg_gen_ext16u_tl(t1, t1); 6534 break; 6535 case 0x01: 6536 /* machhw - machhw. - machhwo - machhwo. */ 6537 /* machhws - machhws. - machhwso - machhwso. */ 6538 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 6539 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 6540 /* mulhhw - mulhhw. */ 6541 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 6542 tcg_gen_ext16s_tl(t0, t0); 6543 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6544 tcg_gen_ext16s_tl(t1, t1); 6545 break; 6546 case 0x00: 6547 /* machhwu - machhwu. - machhwuo - machhwuo. */ 6548 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 6549 /* mulhhwu - mulhhwu. */ 6550 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 6551 tcg_gen_ext16u_tl(t0, t0); 6552 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6553 tcg_gen_ext16u_tl(t1, t1); 6554 break; 6555 case 0x0D: 6556 /* maclhw - maclhw. - maclhwo - maclhwo. */ 6557 /* maclhws - maclhws. - maclhwso - maclhwso. */ 6558 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 6559 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 6560 /* mullhw - mullhw. */ 6561 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6562 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 6563 break; 6564 case 0x0C: 6565 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 6566 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 6567 /* mullhwu - mullhwu. */ 6568 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6569 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 6570 break; 6571 } 6572 if (opc2 & 0x04) { 6573 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 6574 tcg_gen_mul_tl(t1, t0, t1); 6575 if (opc2 & 0x02) { 6576 /* nmultiply-and-accumulate (0x0E) */ 6577 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 6578 } else { 6579 /* multiply-and-accumulate (0x0C) */ 6580 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 6581 } 6582 6583 if (opc3 & 0x12) { 6584 /* Check overflow and/or saturate */ 6585 TCGLabel *l1 = gen_new_label(); 6586 6587 if (opc3 & 0x10) { 6588 /* Start with XER OV disabled, the most likely case */ 6589 tcg_gen_movi_tl(cpu_ov, 0); 6590 } 6591 if (opc3 & 0x01) { 6592 /* Signed */ 6593 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 6594 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 6595 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 6596 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 6597 if (opc3 & 0x02) { 6598 /* Saturate */ 6599 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 6600 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 6601 } 6602 } else { 6603 /* Unsigned */ 6604 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 6605 if (opc3 & 0x02) { 6606 /* Saturate */ 6607 tcg_gen_movi_tl(t0, UINT32_MAX); 6608 } 6609 } 6610 if (opc3 & 0x10) { 6611 /* Check overflow */ 6612 tcg_gen_movi_tl(cpu_ov, 1); 6613 tcg_gen_movi_tl(cpu_so, 1); 6614 } 6615 gen_set_label(l1); 6616 tcg_gen_mov_tl(cpu_gpr[rt], t0); 6617 } 6618 } else { 6619 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 6620 } 6621 tcg_temp_free(t0); 6622 tcg_temp_free(t1); 6623 if (unlikely(Rc) != 0) { 6624 /* Update Rc0 */ 6625 gen_set_Rc0(ctx, cpu_gpr[rt]); 6626 } 6627 } 6628 6629 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 6630 static void glue(gen_, name)(DisasContext *ctx) \ 6631 { \ 6632 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 6633 rD(ctx->opcode), Rc(ctx->opcode)); \ 6634 } 6635 6636 /* macchw - macchw. */ 6637 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 6638 /* macchwo - macchwo. */ 6639 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 6640 /* macchws - macchws. */ 6641 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 6642 /* macchwso - macchwso. */ 6643 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 6644 /* macchwsu - macchwsu. */ 6645 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 6646 /* macchwsuo - macchwsuo. */ 6647 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 6648 /* macchwu - macchwu. */ 6649 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 6650 /* macchwuo - macchwuo. */ 6651 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 6652 /* machhw - machhw. */ 6653 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 6654 /* machhwo - machhwo. */ 6655 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 6656 /* machhws - machhws. */ 6657 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 6658 /* machhwso - machhwso. */ 6659 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 6660 /* machhwsu - machhwsu. */ 6661 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 6662 /* machhwsuo - machhwsuo. */ 6663 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 6664 /* machhwu - machhwu. */ 6665 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 6666 /* machhwuo - machhwuo. */ 6667 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 6668 /* maclhw - maclhw. */ 6669 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 6670 /* maclhwo - maclhwo. */ 6671 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 6672 /* maclhws - maclhws. */ 6673 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 6674 /* maclhwso - maclhwso. */ 6675 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 6676 /* maclhwu - maclhwu. */ 6677 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 6678 /* maclhwuo - maclhwuo. */ 6679 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 6680 /* maclhwsu - maclhwsu. */ 6681 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 6682 /* maclhwsuo - maclhwsuo. */ 6683 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 6684 /* nmacchw - nmacchw. */ 6685 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 6686 /* nmacchwo - nmacchwo. */ 6687 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 6688 /* nmacchws - nmacchws. */ 6689 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 6690 /* nmacchwso - nmacchwso. */ 6691 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 6692 /* nmachhw - nmachhw. */ 6693 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 6694 /* nmachhwo - nmachhwo. */ 6695 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 6696 /* nmachhws - nmachhws. */ 6697 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 6698 /* nmachhwso - nmachhwso. */ 6699 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 6700 /* nmaclhw - nmaclhw. */ 6701 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 6702 /* nmaclhwo - nmaclhwo. */ 6703 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 6704 /* nmaclhws - nmaclhws. */ 6705 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 6706 /* nmaclhwso - nmaclhwso. */ 6707 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 6708 6709 /* mulchw - mulchw. */ 6710 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 6711 /* mulchwu - mulchwu. */ 6712 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 6713 /* mulhhw - mulhhw. */ 6714 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 6715 /* mulhhwu - mulhhwu. */ 6716 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 6717 /* mullhw - mullhw. */ 6718 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 6719 /* mullhwu - mullhwu. */ 6720 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 6721 6722 /* mfdcr */ 6723 static void gen_mfdcr(DisasContext *ctx) 6724 { 6725 #if defined(CONFIG_USER_ONLY) 6726 GEN_PRIV; 6727 #else 6728 TCGv dcrn; 6729 6730 CHK_SV; 6731 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6732 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6733 tcg_temp_free(dcrn); 6734 #endif /* defined(CONFIG_USER_ONLY) */ 6735 } 6736 6737 /* mtdcr */ 6738 static void gen_mtdcr(DisasContext *ctx) 6739 { 6740 #if defined(CONFIG_USER_ONLY) 6741 GEN_PRIV; 6742 #else 6743 TCGv dcrn; 6744 6745 CHK_SV; 6746 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6747 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6748 tcg_temp_free(dcrn); 6749 #endif /* defined(CONFIG_USER_ONLY) */ 6750 } 6751 6752 /* mfdcrx */ 6753 /* XXX: not implemented on 440 ? */ 6754 static void gen_mfdcrx(DisasContext *ctx) 6755 { 6756 #if defined(CONFIG_USER_ONLY) 6757 GEN_PRIV; 6758 #else 6759 CHK_SV; 6760 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6761 cpu_gpr[rA(ctx->opcode)]); 6762 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6763 #endif /* defined(CONFIG_USER_ONLY) */ 6764 } 6765 6766 /* mtdcrx */ 6767 /* XXX: not implemented on 440 ? */ 6768 static void gen_mtdcrx(DisasContext *ctx) 6769 { 6770 #if defined(CONFIG_USER_ONLY) 6771 GEN_PRIV; 6772 #else 6773 CHK_SV; 6774 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6775 cpu_gpr[rS(ctx->opcode)]); 6776 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6777 #endif /* defined(CONFIG_USER_ONLY) */ 6778 } 6779 6780 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6781 static void gen_mfdcrux(DisasContext *ctx) 6782 { 6783 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6784 cpu_gpr[rA(ctx->opcode)]); 6785 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6786 } 6787 6788 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6789 static void gen_mtdcrux(DisasContext *ctx) 6790 { 6791 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6792 cpu_gpr[rS(ctx->opcode)]); 6793 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6794 } 6795 6796 /* dccci */ 6797 static void gen_dccci(DisasContext *ctx) 6798 { 6799 CHK_SV; 6800 /* interpreted as no-op */ 6801 } 6802 6803 /* dcread */ 6804 static void gen_dcread(DisasContext *ctx) 6805 { 6806 #if defined(CONFIG_USER_ONLY) 6807 GEN_PRIV; 6808 #else 6809 TCGv EA, val; 6810 6811 CHK_SV; 6812 gen_set_access_type(ctx, ACCESS_CACHE); 6813 EA = tcg_temp_new(); 6814 gen_addr_reg_index(ctx, EA); 6815 val = tcg_temp_new(); 6816 gen_qemu_ld32u(ctx, val, EA); 6817 tcg_temp_free(val); 6818 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6819 tcg_temp_free(EA); 6820 #endif /* defined(CONFIG_USER_ONLY) */ 6821 } 6822 6823 /* icbt */ 6824 static void gen_icbt_40x(DisasContext *ctx) 6825 { 6826 /* 6827 * interpreted as no-op 6828 * XXX: specification say this is treated as a load by the MMU but 6829 * does not generate any exception 6830 */ 6831 } 6832 6833 /* iccci */ 6834 static void gen_iccci(DisasContext *ctx) 6835 { 6836 CHK_SV; 6837 /* interpreted as no-op */ 6838 } 6839 6840 /* icread */ 6841 static void gen_icread(DisasContext *ctx) 6842 { 6843 CHK_SV; 6844 /* interpreted as no-op */ 6845 } 6846 6847 /* rfci (supervisor only) */ 6848 static void gen_rfci_40x(DisasContext *ctx) 6849 { 6850 #if defined(CONFIG_USER_ONLY) 6851 GEN_PRIV; 6852 #else 6853 CHK_SV; 6854 /* Restore CPU state */ 6855 gen_helper_40x_rfci(cpu_env); 6856 ctx->base.is_jmp = DISAS_EXIT; 6857 #endif /* defined(CONFIG_USER_ONLY) */ 6858 } 6859 6860 static void gen_rfci(DisasContext *ctx) 6861 { 6862 #if defined(CONFIG_USER_ONLY) 6863 GEN_PRIV; 6864 #else 6865 CHK_SV; 6866 /* Restore CPU state */ 6867 gen_helper_rfci(cpu_env); 6868 ctx->base.is_jmp = DISAS_EXIT; 6869 #endif /* defined(CONFIG_USER_ONLY) */ 6870 } 6871 6872 /* BookE specific */ 6873 6874 /* XXX: not implemented on 440 ? */ 6875 static void gen_rfdi(DisasContext *ctx) 6876 { 6877 #if defined(CONFIG_USER_ONLY) 6878 GEN_PRIV; 6879 #else 6880 CHK_SV; 6881 /* Restore CPU state */ 6882 gen_helper_rfdi(cpu_env); 6883 ctx->base.is_jmp = DISAS_EXIT; 6884 #endif /* defined(CONFIG_USER_ONLY) */ 6885 } 6886 6887 /* XXX: not implemented on 440 ? */ 6888 static void gen_rfmci(DisasContext *ctx) 6889 { 6890 #if defined(CONFIG_USER_ONLY) 6891 GEN_PRIV; 6892 #else 6893 CHK_SV; 6894 /* Restore CPU state */ 6895 gen_helper_rfmci(cpu_env); 6896 ctx->base.is_jmp = DISAS_EXIT; 6897 #endif /* defined(CONFIG_USER_ONLY) */ 6898 } 6899 6900 /* TLB management - PowerPC 405 implementation */ 6901 6902 /* tlbre */ 6903 static void gen_tlbre_40x(DisasContext *ctx) 6904 { 6905 #if defined(CONFIG_USER_ONLY) 6906 GEN_PRIV; 6907 #else 6908 CHK_SV; 6909 switch (rB(ctx->opcode)) { 6910 case 0: 6911 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6912 cpu_gpr[rA(ctx->opcode)]); 6913 break; 6914 case 1: 6915 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6916 cpu_gpr[rA(ctx->opcode)]); 6917 break; 6918 default: 6919 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6920 break; 6921 } 6922 #endif /* defined(CONFIG_USER_ONLY) */ 6923 } 6924 6925 /* tlbsx - tlbsx. */ 6926 static void gen_tlbsx_40x(DisasContext *ctx) 6927 { 6928 #if defined(CONFIG_USER_ONLY) 6929 GEN_PRIV; 6930 #else 6931 TCGv t0; 6932 6933 CHK_SV; 6934 t0 = tcg_temp_new(); 6935 gen_addr_reg_index(ctx, t0); 6936 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6937 tcg_temp_free(t0); 6938 if (Rc(ctx->opcode)) { 6939 TCGLabel *l1 = gen_new_label(); 6940 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6941 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6942 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6943 gen_set_label(l1); 6944 } 6945 #endif /* defined(CONFIG_USER_ONLY) */ 6946 } 6947 6948 /* tlbwe */ 6949 static void gen_tlbwe_40x(DisasContext *ctx) 6950 { 6951 #if defined(CONFIG_USER_ONLY) 6952 GEN_PRIV; 6953 #else 6954 CHK_SV; 6955 6956 switch (rB(ctx->opcode)) { 6957 case 0: 6958 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6959 cpu_gpr[rS(ctx->opcode)]); 6960 break; 6961 case 1: 6962 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6963 cpu_gpr[rS(ctx->opcode)]); 6964 break; 6965 default: 6966 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6967 break; 6968 } 6969 #endif /* defined(CONFIG_USER_ONLY) */ 6970 } 6971 6972 /* TLB management - PowerPC 440 implementation */ 6973 6974 /* tlbre */ 6975 static void gen_tlbre_440(DisasContext *ctx) 6976 { 6977 #if defined(CONFIG_USER_ONLY) 6978 GEN_PRIV; 6979 #else 6980 CHK_SV; 6981 6982 switch (rB(ctx->opcode)) { 6983 case 0: 6984 case 1: 6985 case 2: 6986 { 6987 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6988 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6989 t0, cpu_gpr[rA(ctx->opcode)]); 6990 tcg_temp_free_i32(t0); 6991 } 6992 break; 6993 default: 6994 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6995 break; 6996 } 6997 #endif /* defined(CONFIG_USER_ONLY) */ 6998 } 6999 7000 /* tlbsx - tlbsx. */ 7001 static void gen_tlbsx_440(DisasContext *ctx) 7002 { 7003 #if defined(CONFIG_USER_ONLY) 7004 GEN_PRIV; 7005 #else 7006 TCGv t0; 7007 7008 CHK_SV; 7009 t0 = tcg_temp_new(); 7010 gen_addr_reg_index(ctx, t0); 7011 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 7012 tcg_temp_free(t0); 7013 if (Rc(ctx->opcode)) { 7014 TCGLabel *l1 = gen_new_label(); 7015 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 7016 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 7017 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 7018 gen_set_label(l1); 7019 } 7020 #endif /* defined(CONFIG_USER_ONLY) */ 7021 } 7022 7023 /* tlbwe */ 7024 static void gen_tlbwe_440(DisasContext *ctx) 7025 { 7026 #if defined(CONFIG_USER_ONLY) 7027 GEN_PRIV; 7028 #else 7029 CHK_SV; 7030 switch (rB(ctx->opcode)) { 7031 case 0: 7032 case 1: 7033 case 2: 7034 { 7035 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 7036 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 7037 cpu_gpr[rS(ctx->opcode)]); 7038 tcg_temp_free_i32(t0); 7039 } 7040 break; 7041 default: 7042 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7043 break; 7044 } 7045 #endif /* defined(CONFIG_USER_ONLY) */ 7046 } 7047 7048 /* TLB management - PowerPC BookE 2.06 implementation */ 7049 7050 /* tlbre */ 7051 static void gen_tlbre_booke206(DisasContext *ctx) 7052 { 7053 #if defined(CONFIG_USER_ONLY) 7054 GEN_PRIV; 7055 #else 7056 CHK_SV; 7057 gen_helper_booke206_tlbre(cpu_env); 7058 #endif /* defined(CONFIG_USER_ONLY) */ 7059 } 7060 7061 /* tlbsx - tlbsx. */ 7062 static void gen_tlbsx_booke206(DisasContext *ctx) 7063 { 7064 #if defined(CONFIG_USER_ONLY) 7065 GEN_PRIV; 7066 #else 7067 TCGv t0; 7068 7069 CHK_SV; 7070 if (rA(ctx->opcode)) { 7071 t0 = tcg_temp_new(); 7072 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 7073 } else { 7074 t0 = tcg_const_tl(0); 7075 } 7076 7077 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 7078 gen_helper_booke206_tlbsx(cpu_env, t0); 7079 tcg_temp_free(t0); 7080 #endif /* defined(CONFIG_USER_ONLY) */ 7081 } 7082 7083 /* tlbwe */ 7084 static void gen_tlbwe_booke206(DisasContext *ctx) 7085 { 7086 #if defined(CONFIG_USER_ONLY) 7087 GEN_PRIV; 7088 #else 7089 CHK_SV; 7090 gen_helper_booke206_tlbwe(cpu_env); 7091 #endif /* defined(CONFIG_USER_ONLY) */ 7092 } 7093 7094 static void gen_tlbivax_booke206(DisasContext *ctx) 7095 { 7096 #if defined(CONFIG_USER_ONLY) 7097 GEN_PRIV; 7098 #else 7099 TCGv t0; 7100 7101 CHK_SV; 7102 t0 = tcg_temp_new(); 7103 gen_addr_reg_index(ctx, t0); 7104 gen_helper_booke206_tlbivax(cpu_env, t0); 7105 tcg_temp_free(t0); 7106 #endif /* defined(CONFIG_USER_ONLY) */ 7107 } 7108 7109 static void gen_tlbilx_booke206(DisasContext *ctx) 7110 { 7111 #if defined(CONFIG_USER_ONLY) 7112 GEN_PRIV; 7113 #else 7114 TCGv t0; 7115 7116 CHK_SV; 7117 t0 = tcg_temp_new(); 7118 gen_addr_reg_index(ctx, t0); 7119 7120 switch ((ctx->opcode >> 21) & 0x3) { 7121 case 0: 7122 gen_helper_booke206_tlbilx0(cpu_env, t0); 7123 break; 7124 case 1: 7125 gen_helper_booke206_tlbilx1(cpu_env, t0); 7126 break; 7127 case 3: 7128 gen_helper_booke206_tlbilx3(cpu_env, t0); 7129 break; 7130 default: 7131 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7132 break; 7133 } 7134 7135 tcg_temp_free(t0); 7136 #endif /* defined(CONFIG_USER_ONLY) */ 7137 } 7138 7139 7140 /* wrtee */ 7141 static void gen_wrtee(DisasContext *ctx) 7142 { 7143 #if defined(CONFIG_USER_ONLY) 7144 GEN_PRIV; 7145 #else 7146 TCGv t0; 7147 7148 CHK_SV; 7149 t0 = tcg_temp_new(); 7150 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 7151 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7152 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 7153 tcg_temp_free(t0); 7154 /* 7155 * Stop translation to have a chance to raise an exception if we 7156 * just set msr_ee to 1 7157 */ 7158 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7159 #endif /* defined(CONFIG_USER_ONLY) */ 7160 } 7161 7162 /* wrteei */ 7163 static void gen_wrteei(DisasContext *ctx) 7164 { 7165 #if defined(CONFIG_USER_ONLY) 7166 GEN_PRIV; 7167 #else 7168 CHK_SV; 7169 if (ctx->opcode & 0x00008000) { 7170 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 7171 /* Stop translation to have a chance to raise an exception */ 7172 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7173 } else { 7174 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7175 } 7176 #endif /* defined(CONFIG_USER_ONLY) */ 7177 } 7178 7179 /* PowerPC 440 specific instructions */ 7180 7181 /* dlmzb */ 7182 static void gen_dlmzb(DisasContext *ctx) 7183 { 7184 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 7185 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 7186 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 7187 tcg_temp_free_i32(t0); 7188 } 7189 7190 /* mbar replaces eieio on 440 */ 7191 static void gen_mbar(DisasContext *ctx) 7192 { 7193 /* interpreted as no-op */ 7194 } 7195 7196 /* msync replaces sync on 440 */ 7197 static void gen_msync_4xx(DisasContext *ctx) 7198 { 7199 /* Only e500 seems to treat reserved bits as invalid */ 7200 if ((ctx->insns_flags2 & PPC2_BOOKE206) && 7201 (ctx->opcode & 0x03FFF801)) { 7202 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7203 } 7204 /* otherwise interpreted as no-op */ 7205 } 7206 7207 /* icbt */ 7208 static void gen_icbt_440(DisasContext *ctx) 7209 { 7210 /* 7211 * interpreted as no-op 7212 * XXX: specification say this is treated as a load by the MMU but 7213 * does not generate any exception 7214 */ 7215 } 7216 7217 /* Embedded.Processor Control */ 7218 7219 static void gen_msgclr(DisasContext *ctx) 7220 { 7221 #if defined(CONFIG_USER_ONLY) 7222 GEN_PRIV; 7223 #else 7224 CHK_HV; 7225 if (is_book3s_arch2x(ctx)) { 7226 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7227 } else { 7228 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7229 } 7230 #endif /* defined(CONFIG_USER_ONLY) */ 7231 } 7232 7233 static void gen_msgsnd(DisasContext *ctx) 7234 { 7235 #if defined(CONFIG_USER_ONLY) 7236 GEN_PRIV; 7237 #else 7238 CHK_HV; 7239 if (is_book3s_arch2x(ctx)) { 7240 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7241 } else { 7242 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7243 } 7244 #endif /* defined(CONFIG_USER_ONLY) */ 7245 } 7246 7247 #if defined(TARGET_PPC64) 7248 static void gen_msgclrp(DisasContext *ctx) 7249 { 7250 #if defined(CONFIG_USER_ONLY) 7251 GEN_PRIV; 7252 #else 7253 CHK_SV; 7254 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7255 #endif /* defined(CONFIG_USER_ONLY) */ 7256 } 7257 7258 static void gen_msgsndp(DisasContext *ctx) 7259 { 7260 #if defined(CONFIG_USER_ONLY) 7261 GEN_PRIV; 7262 #else 7263 CHK_SV; 7264 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7265 #endif /* defined(CONFIG_USER_ONLY) */ 7266 } 7267 #endif 7268 7269 static void gen_msgsync(DisasContext *ctx) 7270 { 7271 #if defined(CONFIG_USER_ONLY) 7272 GEN_PRIV; 7273 #else 7274 CHK_HV; 7275 #endif /* defined(CONFIG_USER_ONLY) */ 7276 /* interpreted as no-op */ 7277 } 7278 7279 #if defined(TARGET_PPC64) 7280 static void gen_maddld(DisasContext *ctx) 7281 { 7282 TCGv_i64 t1 = tcg_temp_new_i64(); 7283 7284 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 7285 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 7286 tcg_temp_free_i64(t1); 7287 } 7288 7289 /* maddhd maddhdu */ 7290 static void gen_maddhd_maddhdu(DisasContext *ctx) 7291 { 7292 TCGv_i64 lo = tcg_temp_new_i64(); 7293 TCGv_i64 hi = tcg_temp_new_i64(); 7294 TCGv_i64 t1 = tcg_temp_new_i64(); 7295 7296 if (Rc(ctx->opcode)) { 7297 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7298 cpu_gpr[rB(ctx->opcode)]); 7299 tcg_gen_movi_i64(t1, 0); 7300 } else { 7301 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7302 cpu_gpr[rB(ctx->opcode)]); 7303 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 7304 } 7305 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 7306 cpu_gpr[rC(ctx->opcode)], t1); 7307 tcg_temp_free_i64(lo); 7308 tcg_temp_free_i64(hi); 7309 tcg_temp_free_i64(t1); 7310 } 7311 #endif /* defined(TARGET_PPC64) */ 7312 7313 static void gen_tbegin(DisasContext *ctx) 7314 { 7315 if (unlikely(!ctx->tm_enabled)) { 7316 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7317 return; 7318 } 7319 gen_helper_tbegin(cpu_env); 7320 } 7321 7322 #define GEN_TM_NOOP(name) \ 7323 static inline void gen_##name(DisasContext *ctx) \ 7324 { \ 7325 if (unlikely(!ctx->tm_enabled)) { \ 7326 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7327 return; \ 7328 } \ 7329 /* \ 7330 * Because tbegin always fails in QEMU, these user \ 7331 * space instructions all have a simple implementation: \ 7332 * \ 7333 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7334 * = 0b0 || 0b00 || 0b0 \ 7335 */ \ 7336 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7337 } 7338 7339 GEN_TM_NOOP(tend); 7340 GEN_TM_NOOP(tabort); 7341 GEN_TM_NOOP(tabortwc); 7342 GEN_TM_NOOP(tabortwci); 7343 GEN_TM_NOOP(tabortdc); 7344 GEN_TM_NOOP(tabortdci); 7345 GEN_TM_NOOP(tsr); 7346 7347 static inline void gen_cp_abort(DisasContext *ctx) 7348 { 7349 /* Do Nothing */ 7350 } 7351 7352 #define GEN_CP_PASTE_NOOP(name) \ 7353 static inline void gen_##name(DisasContext *ctx) \ 7354 { \ 7355 /* \ 7356 * Generate invalid exception until we have an \ 7357 * implementation of the copy paste facility \ 7358 */ \ 7359 gen_invalid(ctx); \ 7360 } 7361 7362 GEN_CP_PASTE_NOOP(copy) 7363 GEN_CP_PASTE_NOOP(paste) 7364 7365 static void gen_tcheck(DisasContext *ctx) 7366 { 7367 if (unlikely(!ctx->tm_enabled)) { 7368 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7369 return; 7370 } 7371 /* 7372 * Because tbegin always fails, the tcheck implementation is 7373 * simple: 7374 * 7375 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 7376 * = 0b1 || 0b00 || 0b0 7377 */ 7378 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 7379 } 7380 7381 #if defined(CONFIG_USER_ONLY) 7382 #define GEN_TM_PRIV_NOOP(name) \ 7383 static inline void gen_##name(DisasContext *ctx) \ 7384 { \ 7385 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 7386 } 7387 7388 #else 7389 7390 #define GEN_TM_PRIV_NOOP(name) \ 7391 static inline void gen_##name(DisasContext *ctx) \ 7392 { \ 7393 CHK_SV; \ 7394 if (unlikely(!ctx->tm_enabled)) { \ 7395 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7396 return; \ 7397 } \ 7398 /* \ 7399 * Because tbegin always fails, the implementation is \ 7400 * simple: \ 7401 * \ 7402 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7403 * = 0b0 || 0b00 | 0b0 \ 7404 */ \ 7405 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7406 } 7407 7408 #endif 7409 7410 GEN_TM_PRIV_NOOP(treclaim); 7411 GEN_TM_PRIV_NOOP(trechkpt); 7412 7413 static inline void get_fpr(TCGv_i64 dst, int regno) 7414 { 7415 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno)); 7416 } 7417 7418 static inline void set_fpr(int regno, TCGv_i64 src) 7419 { 7420 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno)); 7421 } 7422 7423 static inline void get_avr64(TCGv_i64 dst, int regno, bool high) 7424 { 7425 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high)); 7426 } 7427 7428 static inline void set_avr64(int regno, TCGv_i64 src, bool high) 7429 { 7430 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high)); 7431 } 7432 7433 /* 7434 * Helpers for decodetree used by !function for decoding arguments. 7435 */ 7436 static int times_4(DisasContext *ctx, int x) 7437 { 7438 return x * 4; 7439 } 7440 7441 /* 7442 * Helpers for trans_* functions to check for specific insns flags. 7443 * Use token pasting to ensure that we use the proper flag with the 7444 * proper variable. 7445 */ 7446 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \ 7447 do { \ 7448 if (((CTX)->insns_flags & PPC_##NAME) == 0) { \ 7449 return false; \ 7450 } \ 7451 } while (0) 7452 7453 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \ 7454 do { \ 7455 if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \ 7456 return false; \ 7457 } \ 7458 } while (0) 7459 7460 /* Then special-case the check for 64-bit so that we elide code for ppc32. */ 7461 #if TARGET_LONG_BITS == 32 7462 # define REQUIRE_64BIT(CTX) return false 7463 #else 7464 # define REQUIRE_64BIT(CTX) REQUIRE_INSNS_FLAGS(CTX, 64B) 7465 #endif 7466 7467 /* 7468 * Helpers for implementing sets of trans_* functions. 7469 * Defer the implementation of NAME to FUNC, with optional extra arguments. 7470 */ 7471 #define TRANS(NAME, FUNC, ...) \ 7472 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7473 { return FUNC(ctx, a, __VA_ARGS__); } 7474 7475 #define TRANS64(NAME, FUNC, ...) \ 7476 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7477 { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); } 7478 7479 /* TODO: More TRANS* helpers for extra insn_flags checks. */ 7480 7481 7482 #include "decode-insn32.c.inc" 7483 #include "decode-insn64.c.inc" 7484 #include "translate/fixedpoint-impl.c.inc" 7485 7486 #include "translate/fp-impl.c.inc" 7487 7488 #include "translate/vmx-impl.c.inc" 7489 7490 #include "translate/vsx-impl.c.inc" 7491 #include "translate/vector-impl.c.inc" 7492 7493 #include "translate/dfp-impl.c.inc" 7494 7495 #include "translate/spe-impl.c.inc" 7496 7497 /* Handles lfdp, lxsd, lxssp */ 7498 static void gen_dform39(DisasContext *ctx) 7499 { 7500 switch (ctx->opcode & 0x3) { 7501 case 0: /* lfdp */ 7502 if (ctx->insns_flags2 & PPC2_ISA205) { 7503 return gen_lfdp(ctx); 7504 } 7505 break; 7506 case 2: /* lxsd */ 7507 if (ctx->insns_flags2 & PPC2_ISA300) { 7508 return gen_lxsd(ctx); 7509 } 7510 break; 7511 case 3: /* lxssp */ 7512 if (ctx->insns_flags2 & PPC2_ISA300) { 7513 return gen_lxssp(ctx); 7514 } 7515 break; 7516 } 7517 return gen_invalid(ctx); 7518 } 7519 7520 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 7521 static void gen_dform3D(DisasContext *ctx) 7522 { 7523 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 7524 switch (ctx->opcode & 0x7) { 7525 case 1: /* lxv */ 7526 if (ctx->insns_flags2 & PPC2_ISA300) { 7527 return gen_lxv(ctx); 7528 } 7529 break; 7530 case 5: /* stxv */ 7531 if (ctx->insns_flags2 & PPC2_ISA300) { 7532 return gen_stxv(ctx); 7533 } 7534 break; 7535 } 7536 } else { /* DS-FORM */ 7537 switch (ctx->opcode & 0x3) { 7538 case 0: /* stfdp */ 7539 if (ctx->insns_flags2 & PPC2_ISA205) { 7540 return gen_stfdp(ctx); 7541 } 7542 break; 7543 case 2: /* stxsd */ 7544 if (ctx->insns_flags2 & PPC2_ISA300) { 7545 return gen_stxsd(ctx); 7546 } 7547 break; 7548 case 3: /* stxssp */ 7549 if (ctx->insns_flags2 & PPC2_ISA300) { 7550 return gen_stxssp(ctx); 7551 } 7552 break; 7553 } 7554 } 7555 return gen_invalid(ctx); 7556 } 7557 7558 #if defined(TARGET_PPC64) 7559 /* brd */ 7560 static void gen_brd(DisasContext *ctx) 7561 { 7562 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7563 } 7564 7565 /* brw */ 7566 static void gen_brw(DisasContext *ctx) 7567 { 7568 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7569 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32); 7570 7571 } 7572 7573 /* brh */ 7574 static void gen_brh(DisasContext *ctx) 7575 { 7576 TCGv_i64 t0 = tcg_temp_new_i64(); 7577 TCGv_i64 t1 = tcg_temp_new_i64(); 7578 TCGv_i64 t2 = tcg_temp_new_i64(); 7579 7580 tcg_gen_movi_i64(t0, 0x00ff00ff00ff00ffull); 7581 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8); 7582 tcg_gen_and_i64(t2, t1, t0); 7583 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], t0); 7584 tcg_gen_shli_i64(t1, t1, 8); 7585 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2); 7586 7587 tcg_temp_free_i64(t0); 7588 tcg_temp_free_i64(t1); 7589 tcg_temp_free_i64(t2); 7590 } 7591 #endif 7592 7593 static opcode_t opcodes[] = { 7594 #if defined(TARGET_PPC64) 7595 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310), 7596 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310), 7597 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310), 7598 #endif 7599 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 7600 #if defined(TARGET_PPC64) 7601 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 7602 #endif 7603 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 7604 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 7605 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 7606 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7607 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7608 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 7609 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 7610 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 7611 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 7612 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7613 #if defined(TARGET_PPC64) 7614 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 7615 #endif 7616 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 7617 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 7618 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7619 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7620 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7621 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 7622 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 7623 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 7624 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7625 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 7626 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 7627 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 7628 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7629 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7630 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7631 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7632 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 7633 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 7634 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 7635 #if defined(TARGET_PPC64) 7636 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 7637 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 7638 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 7639 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 7640 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 7641 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 7642 #endif 7643 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7644 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7645 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7646 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 7647 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 7648 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 7649 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 7650 #if defined(TARGET_PPC64) 7651 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 7652 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 7653 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 7654 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 7655 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 7656 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 7657 PPC_NONE, PPC2_ISA300), 7658 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 7659 PPC_NONE, PPC2_ISA300), 7660 #endif 7661 #if defined(TARGET_PPC64) 7662 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 7663 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 7664 #endif 7665 /* handles lfdp, lxsd, lxssp */ 7666 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7667 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 7668 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7669 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7670 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7671 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 7672 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 7673 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 7674 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 7675 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 7676 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 7677 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7678 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7679 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 7680 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 7681 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 7682 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7683 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7684 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 7685 #if defined(TARGET_PPC64) 7686 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 7687 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 7688 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 7689 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 7690 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 7691 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 7692 #endif 7693 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 7694 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 7695 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 7696 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7697 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7698 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 7699 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 7700 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 7701 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 7702 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 7703 #if defined(TARGET_PPC64) 7704 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 7705 #if !defined(CONFIG_USER_ONLY) 7706 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7707 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7708 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7709 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300), 7710 #endif 7711 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7712 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7713 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7714 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7715 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7716 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 7717 #endif 7718 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7719 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW), 7720 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW), 7721 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 7722 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7723 #if defined(TARGET_PPC64) 7724 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 7725 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 7726 #endif 7727 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 7728 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 7729 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 7730 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 7731 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 7732 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 7733 #if defined(TARGET_PPC64) 7734 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 7735 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 7736 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 7737 #endif 7738 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 7739 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 7740 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 7741 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7742 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 7743 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 7744 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7745 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 7746 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7747 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 7748 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7749 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 7750 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 7751 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7752 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 7753 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 7754 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 7755 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 7756 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7757 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 7758 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 7759 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 7760 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 7761 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 7762 #if defined(TARGET_PPC64) 7763 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 7764 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 7765 PPC_SEGMENT_64B), 7766 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 7767 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 7768 PPC_SEGMENT_64B), 7769 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 7770 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 7771 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 7772 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 7773 #endif 7774 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 7775 /* 7776 * XXX Those instructions will need to be handled differently for 7777 * different ISA versions 7778 */ 7779 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 7780 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 7781 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 7782 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 7783 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 7784 #if defined(TARGET_PPC64) 7785 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 7786 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 7787 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 7788 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7789 #endif 7790 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 7791 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 7792 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 7793 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 7794 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 7795 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 7796 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 7797 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 7798 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 7799 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 7800 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 7801 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7802 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 7803 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 7804 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 7805 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 7806 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 7807 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 7808 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 7809 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7810 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 7811 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 7812 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 7813 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 7814 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 7815 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 7816 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 7817 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 7818 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 7819 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 7820 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 7821 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 7822 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 7823 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 7824 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 7825 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 7826 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 7827 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 7828 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 7829 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 7830 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 7831 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 7832 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 7833 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 7834 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7835 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7836 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7837 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7838 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7839 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7840 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7841 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7842 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7843 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7844 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7845 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7846 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7847 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7848 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7849 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7850 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7851 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7852 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7853 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7854 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7855 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7856 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7857 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7858 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7859 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7860 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7861 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7862 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7863 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7864 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7865 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7866 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7867 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7868 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7869 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7870 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7871 PPC_NONE, PPC2_BOOKE206), 7872 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7873 PPC_NONE, PPC2_BOOKE206), 7874 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7875 PPC_NONE, PPC2_BOOKE206), 7876 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7877 PPC_NONE, PPC2_BOOKE206), 7878 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7879 PPC_NONE, PPC2_BOOKE206), 7880 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7881 PPC_NONE, PPC2_PRCNTL), 7882 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7883 PPC_NONE, PPC2_PRCNTL), 7884 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7885 PPC_NONE, PPC2_PRCNTL), 7886 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7887 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7888 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7889 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7890 PPC_BOOKE, PPC2_BOOKE206), 7891 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE), 7892 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7893 PPC_BOOKE, PPC2_BOOKE206), 7894 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7895 PPC_440_SPEC), 7896 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7897 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7898 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7899 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7900 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7901 #if defined(TARGET_PPC64) 7902 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7903 PPC2_ISA300), 7904 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7905 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001, 7906 PPC_NONE, PPC2_ISA207S), 7907 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001, 7908 PPC_NONE, PPC2_ISA207S), 7909 #endif 7910 7911 #undef GEN_INT_ARITH_ADD 7912 #undef GEN_INT_ARITH_ADD_CONST 7913 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7914 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7915 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7916 add_ca, compute_ca, compute_ov) \ 7917 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7918 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7919 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7920 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7921 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7922 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7923 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7924 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7925 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7926 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 7927 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7928 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7929 7930 #undef GEN_INT_ARITH_DIVW 7931 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7932 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7933 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7934 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7935 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7936 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7937 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7938 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7939 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7940 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7941 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7942 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7943 7944 #if defined(TARGET_PPC64) 7945 #undef GEN_INT_ARITH_DIVD 7946 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7947 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7948 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7949 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7950 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7951 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7952 7953 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7954 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7955 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7956 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7957 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7958 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7959 7960 #undef GEN_INT_ARITH_MUL_HELPER 7961 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7962 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7963 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7964 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7965 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7966 #endif 7967 7968 #undef GEN_INT_ARITH_SUBF 7969 #undef GEN_INT_ARITH_SUBF_CONST 7970 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7971 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7972 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7973 add_ca, compute_ca, compute_ov) \ 7974 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7975 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7976 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7977 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7978 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7979 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7980 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7981 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7982 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7983 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7984 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7985 7986 #undef GEN_LOGICAL1 7987 #undef GEN_LOGICAL2 7988 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7989 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7990 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7991 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7992 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7993 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7994 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7995 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7996 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7997 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7998 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7999 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 8000 #if defined(TARGET_PPC64) 8001 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 8002 #endif 8003 8004 #if defined(TARGET_PPC64) 8005 #undef GEN_PPC64_R2 8006 #undef GEN_PPC64_R4 8007 #define GEN_PPC64_R2(name, opc1, opc2) \ 8008 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 8009 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 8010 PPC_64B) 8011 #define GEN_PPC64_R4(name, opc1, opc2) \ 8012 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 8013 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 8014 PPC_64B), \ 8015 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 8016 PPC_64B), \ 8017 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 8018 PPC_64B) 8019 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 8020 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 8021 GEN_PPC64_R4(rldic, 0x1E, 0x04), 8022 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 8023 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 8024 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 8025 #endif 8026 8027 #undef GEN_LDX_E 8028 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 8029 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 8030 8031 #if defined(TARGET_PPC64) 8032 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 8033 8034 /* HV/P7 and later only */ 8035 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 8036 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 8037 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 8038 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 8039 #endif 8040 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 8041 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 8042 8043 /* External PID based load */ 8044 #undef GEN_LDEPX 8045 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 8046 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8047 0x00000001, PPC_NONE, PPC2_BOOKE206), 8048 8049 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 8050 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 8051 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 8052 #if defined(TARGET_PPC64) 8053 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 8054 #endif 8055 8056 #undef GEN_STX_E 8057 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 8058 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 8059 8060 #if defined(TARGET_PPC64) 8061 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 8062 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 8063 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 8064 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 8065 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 8066 #endif 8067 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 8068 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 8069 8070 #undef GEN_STEPX 8071 #define GEN_STEPX(name, ldop, opc2, opc3) \ 8072 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8073 0x00000001, PPC_NONE, PPC2_BOOKE206), 8074 8075 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 8076 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 8077 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 8078 #if defined(TARGET_PPC64) 8079 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04) 8080 #endif 8081 8082 #undef GEN_CRLOGIC 8083 #define GEN_CRLOGIC(name, tcg_op, opc) \ 8084 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 8085 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 8086 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 8087 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 8088 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 8089 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 8090 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 8091 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 8092 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 8093 8094 #undef GEN_MAC_HANDLER 8095 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 8096 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 8097 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 8098 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 8099 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 8100 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 8101 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 8102 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 8103 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 8104 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 8105 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 8106 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 8107 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 8108 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 8109 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 8110 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 8111 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 8112 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 8113 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 8114 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 8115 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 8116 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 8117 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 8118 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 8119 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 8120 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 8121 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 8122 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 8123 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 8124 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 8125 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 8126 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 8127 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 8128 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 8129 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 8130 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 8131 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 8132 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 8133 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 8134 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 8135 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 8136 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 8137 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 8138 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 8139 8140 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 8141 PPC_NONE, PPC2_TM), 8142 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 8143 PPC_NONE, PPC2_TM), 8144 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 8145 PPC_NONE, PPC2_TM), 8146 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 8147 PPC_NONE, PPC2_TM), 8148 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 8149 PPC_NONE, PPC2_TM), 8150 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 8151 PPC_NONE, PPC2_TM), 8152 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 8153 PPC_NONE, PPC2_TM), 8154 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 8155 PPC_NONE, PPC2_TM), 8156 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 8157 PPC_NONE, PPC2_TM), 8158 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 8159 PPC_NONE, PPC2_TM), 8160 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 8161 PPC_NONE, PPC2_TM), 8162 8163 #include "translate/fp-ops.c.inc" 8164 8165 #include "translate/vmx-ops.c.inc" 8166 8167 #include "translate/vsx-ops.c.inc" 8168 8169 #include "translate/dfp-ops.c.inc" 8170 8171 #include "translate/spe-ops.c.inc" 8172 }; 8173 8174 /*****************************************************************************/ 8175 /* Opcode types */ 8176 enum { 8177 PPC_DIRECT = 0, /* Opcode routine */ 8178 PPC_INDIRECT = 1, /* Indirect opcode table */ 8179 }; 8180 8181 #define PPC_OPCODE_MASK 0x3 8182 8183 static inline int is_indirect_opcode(void *handler) 8184 { 8185 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT; 8186 } 8187 8188 static inline opc_handler_t **ind_table(void *handler) 8189 { 8190 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK); 8191 } 8192 8193 /* Instruction table creation */ 8194 /* Opcodes tables creation */ 8195 static void fill_new_table(opc_handler_t **table, int len) 8196 { 8197 int i; 8198 8199 for (i = 0; i < len; i++) { 8200 table[i] = &invalid_handler; 8201 } 8202 } 8203 8204 static int create_new_table(opc_handler_t **table, unsigned char idx) 8205 { 8206 opc_handler_t **tmp; 8207 8208 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN); 8209 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN); 8210 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT); 8211 8212 return 0; 8213 } 8214 8215 static int insert_in_table(opc_handler_t **table, unsigned char idx, 8216 opc_handler_t *handler) 8217 { 8218 if (table[idx] != &invalid_handler) { 8219 return -1; 8220 } 8221 table[idx] = handler; 8222 8223 return 0; 8224 } 8225 8226 static int register_direct_insn(opc_handler_t **ppc_opcodes, 8227 unsigned char idx, opc_handler_t *handler) 8228 { 8229 if (insert_in_table(ppc_opcodes, idx, handler) < 0) { 8230 printf("*** ERROR: opcode %02x already assigned in main " 8231 "opcode table\n", idx); 8232 return -1; 8233 } 8234 8235 return 0; 8236 } 8237 8238 static int register_ind_in_table(opc_handler_t **table, 8239 unsigned char idx1, unsigned char idx2, 8240 opc_handler_t *handler) 8241 { 8242 if (table[idx1] == &invalid_handler) { 8243 if (create_new_table(table, idx1) < 0) { 8244 printf("*** ERROR: unable to create indirect table " 8245 "idx=%02x\n", idx1); 8246 return -1; 8247 } 8248 } else { 8249 if (!is_indirect_opcode(table[idx1])) { 8250 printf("*** ERROR: idx %02x already assigned to a direct " 8251 "opcode\n", idx1); 8252 return -1; 8253 } 8254 } 8255 if (handler != NULL && 8256 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) { 8257 printf("*** ERROR: opcode %02x already assigned in " 8258 "opcode table %02x\n", idx2, idx1); 8259 return -1; 8260 } 8261 8262 return 0; 8263 } 8264 8265 static int register_ind_insn(opc_handler_t **ppc_opcodes, 8266 unsigned char idx1, unsigned char idx2, 8267 opc_handler_t *handler) 8268 { 8269 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler); 8270 } 8271 8272 static int register_dblind_insn(opc_handler_t **ppc_opcodes, 8273 unsigned char idx1, unsigned char idx2, 8274 unsigned char idx3, opc_handler_t *handler) 8275 { 8276 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8277 printf("*** ERROR: unable to join indirect table idx " 8278 "[%02x-%02x]\n", idx1, idx2); 8279 return -1; 8280 } 8281 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3, 8282 handler) < 0) { 8283 printf("*** ERROR: unable to insert opcode " 8284 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8285 return -1; 8286 } 8287 8288 return 0; 8289 } 8290 8291 static int register_trplind_insn(opc_handler_t **ppc_opcodes, 8292 unsigned char idx1, unsigned char idx2, 8293 unsigned char idx3, unsigned char idx4, 8294 opc_handler_t *handler) 8295 { 8296 opc_handler_t **table; 8297 8298 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8299 printf("*** ERROR: unable to join indirect table idx " 8300 "[%02x-%02x]\n", idx1, idx2); 8301 return -1; 8302 } 8303 table = ind_table(ppc_opcodes[idx1]); 8304 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) { 8305 printf("*** ERROR: unable to join 2nd-level indirect table idx " 8306 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8307 return -1; 8308 } 8309 table = ind_table(table[idx2]); 8310 if (register_ind_in_table(table, idx3, idx4, handler) < 0) { 8311 printf("*** ERROR: unable to insert opcode " 8312 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4); 8313 return -1; 8314 } 8315 return 0; 8316 } 8317 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn) 8318 { 8319 if (insn->opc2 != 0xFF) { 8320 if (insn->opc3 != 0xFF) { 8321 if (insn->opc4 != 0xFF) { 8322 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8323 insn->opc3, insn->opc4, 8324 &insn->handler) < 0) { 8325 return -1; 8326 } 8327 } else { 8328 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8329 insn->opc3, &insn->handler) < 0) { 8330 return -1; 8331 } 8332 } 8333 } else { 8334 if (register_ind_insn(ppc_opcodes, insn->opc1, 8335 insn->opc2, &insn->handler) < 0) { 8336 return -1; 8337 } 8338 } 8339 } else { 8340 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) { 8341 return -1; 8342 } 8343 } 8344 8345 return 0; 8346 } 8347 8348 static int test_opcode_table(opc_handler_t **table, int len) 8349 { 8350 int i, count, tmp; 8351 8352 for (i = 0, count = 0; i < len; i++) { 8353 /* Consistency fixup */ 8354 if (table[i] == NULL) { 8355 table[i] = &invalid_handler; 8356 } 8357 if (table[i] != &invalid_handler) { 8358 if (is_indirect_opcode(table[i])) { 8359 tmp = test_opcode_table(ind_table(table[i]), 8360 PPC_CPU_INDIRECT_OPCODES_LEN); 8361 if (tmp == 0) { 8362 free(table[i]); 8363 table[i] = &invalid_handler; 8364 } else { 8365 count++; 8366 } 8367 } else { 8368 count++; 8369 } 8370 } 8371 } 8372 8373 return count; 8374 } 8375 8376 static void fix_opcode_tables(opc_handler_t **ppc_opcodes) 8377 { 8378 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) { 8379 printf("*** WARNING: no opcode defined !\n"); 8380 } 8381 } 8382 8383 /*****************************************************************************/ 8384 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp) 8385 { 8386 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu); 8387 opcode_t *opc; 8388 8389 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN); 8390 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) { 8391 if (((opc->handler.type & pcc->insns_flags) != 0) || 8392 ((opc->handler.type2 & pcc->insns_flags2) != 0)) { 8393 if (register_insn(cpu->opcodes, opc) < 0) { 8394 error_setg(errp, "ERROR initializing PowerPC instruction " 8395 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2, 8396 opc->opc3); 8397 return; 8398 } 8399 } 8400 } 8401 fix_opcode_tables(cpu->opcodes); 8402 fflush(stdout); 8403 fflush(stderr); 8404 } 8405 8406 void destroy_ppc_opcodes(PowerPCCPU *cpu) 8407 { 8408 opc_handler_t **table, **table_2; 8409 int i, j, k; 8410 8411 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) { 8412 if (cpu->opcodes[i] == &invalid_handler) { 8413 continue; 8414 } 8415 if (is_indirect_opcode(cpu->opcodes[i])) { 8416 table = ind_table(cpu->opcodes[i]); 8417 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) { 8418 if (table[j] == &invalid_handler) { 8419 continue; 8420 } 8421 if (is_indirect_opcode(table[j])) { 8422 table_2 = ind_table(table[j]); 8423 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) { 8424 if (table_2[k] != &invalid_handler && 8425 is_indirect_opcode(table_2[k])) { 8426 g_free((opc_handler_t *)((uintptr_t)table_2[k] & 8427 ~PPC_INDIRECT)); 8428 } 8429 } 8430 g_free((opc_handler_t *)((uintptr_t)table[j] & 8431 ~PPC_INDIRECT)); 8432 } 8433 } 8434 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] & 8435 ~PPC_INDIRECT)); 8436 } 8437 } 8438 } 8439 8440 int ppc_fixup_cpu(PowerPCCPU *cpu) 8441 { 8442 CPUPPCState *env = &cpu->env; 8443 8444 /* 8445 * TCG doesn't (yet) emulate some groups of instructions that are 8446 * implemented on some otherwise supported CPUs (e.g. VSX and 8447 * decimal floating point instructions on POWER7). We remove 8448 * unsupported instruction groups from the cpu state's instruction 8449 * masks and hope the guest can cope. For at least the pseries 8450 * machine, the unavailability of these instructions can be 8451 * advertised to the guest via the device tree. 8452 */ 8453 if ((env->insns_flags & ~PPC_TCG_INSNS) 8454 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) { 8455 warn_report("Disabling some instructions which are not " 8456 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")", 8457 env->insns_flags & ~PPC_TCG_INSNS, 8458 env->insns_flags2 & ~PPC_TCG_INSNS2); 8459 } 8460 env->insns_flags &= PPC_TCG_INSNS; 8461 env->insns_flags2 &= PPC_TCG_INSNS2; 8462 return 0; 8463 } 8464 8465 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn) 8466 { 8467 opc_handler_t **table, *handler; 8468 uint32_t inval; 8469 8470 ctx->opcode = insn; 8471 8472 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 8473 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8474 ctx->le_mode ? "little" : "big"); 8475 8476 table = cpu->opcodes; 8477 handler = table[opc1(insn)]; 8478 if (is_indirect_opcode(handler)) { 8479 table = ind_table(handler); 8480 handler = table[opc2(insn)]; 8481 if (is_indirect_opcode(handler)) { 8482 table = ind_table(handler); 8483 handler = table[opc3(insn)]; 8484 if (is_indirect_opcode(handler)) { 8485 table = ind_table(handler); 8486 handler = table[opc4(insn)]; 8487 } 8488 } 8489 } 8490 8491 /* Is opcode *REALLY* valid ? */ 8492 if (unlikely(handler->handler == &gen_invalid)) { 8493 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 8494 "%02x - %02x - %02x - %02x (%08x) " 8495 TARGET_FMT_lx "\n", 8496 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8497 insn, ctx->cia); 8498 return false; 8499 } 8500 8501 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 8502 && Rc(insn))) { 8503 inval = handler->inval2; 8504 } else { 8505 inval = handler->inval1; 8506 } 8507 8508 if (unlikely((insn & inval) != 0)) { 8509 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 8510 "%02x - %02x - %02x - %02x (%08x) " 8511 TARGET_FMT_lx "\n", insn & inval, 8512 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8513 insn, ctx->cia); 8514 return false; 8515 } 8516 8517 handler->handler(ctx); 8518 return true; 8519 } 8520 8521 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 8522 { 8523 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8524 CPUPPCState *env = cs->env_ptr; 8525 uint32_t hflags = ctx->base.tb->flags; 8526 8527 ctx->spr_cb = env->spr_cb; 8528 ctx->pr = (hflags >> HFLAGS_PR) & 1; 8529 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7; 8530 ctx->dr = (hflags >> HFLAGS_DR) & 1; 8531 ctx->hv = (hflags >> HFLAGS_HV) & 1; 8532 ctx->insns_flags = env->insns_flags; 8533 ctx->insns_flags2 = env->insns_flags2; 8534 ctx->access_type = -1; 8535 ctx->need_access_type = !mmu_is_64bit(env->mmu_model); 8536 ctx->le_mode = (hflags >> HFLAGS_LE) & 1; 8537 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 8538 ctx->flags = env->flags; 8539 #if defined(TARGET_PPC64) 8540 ctx->sf_mode = (hflags >> HFLAGS_64) & 1; 8541 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 8542 #endif 8543 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 8544 || env->mmu_model == POWERPC_MMU_601 8545 || env->mmu_model & POWERPC_MMU_64; 8546 8547 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1; 8548 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1; 8549 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1; 8550 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1; 8551 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1; 8552 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1; 8553 ctx->hr = (hflags >> HFLAGS_HR) & 1; 8554 8555 ctx->singlestep_enabled = 0; 8556 if ((hflags >> HFLAGS_SE) & 1) { 8557 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 8558 ctx->base.max_insns = 1; 8559 } 8560 if ((hflags >> HFLAGS_BE) & 1) { 8561 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 8562 } 8563 } 8564 8565 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 8566 { 8567 } 8568 8569 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 8570 { 8571 tcg_gen_insn_start(dcbase->pc_next); 8572 } 8573 8574 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn) 8575 { 8576 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 8577 return opc1(insn) == 1; 8578 } 8579 8580 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 8581 { 8582 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8583 PowerPCCPU *cpu = POWERPC_CPU(cs); 8584 CPUPPCState *env = cs->env_ptr; 8585 target_ulong pc; 8586 uint32_t insn; 8587 bool ok; 8588 8589 LOG_DISAS("----------------\n"); 8590 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 8591 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 8592 8593 ctx->cia = pc = ctx->base.pc_next; 8594 insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx)); 8595 ctx->base.pc_next = pc += 4; 8596 8597 if (!is_prefix_insn(ctx, insn)) { 8598 ok = (decode_insn32(ctx, insn) || 8599 decode_legacy(cpu, ctx, insn)); 8600 } else if ((pc & 63) == 0) { 8601 /* 8602 * Power v3.1, section 1.9 Exceptions: 8603 * attempt to execute a prefixed instruction that crosses a 8604 * 64-byte address boundary (system alignment error). 8605 */ 8606 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN); 8607 ok = true; 8608 } else { 8609 uint32_t insn2 = translator_ldl_swap(env, dcbase, pc, 8610 need_byteswap(ctx)); 8611 ctx->base.pc_next = pc += 4; 8612 ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn)); 8613 } 8614 if (!ok) { 8615 gen_invalid(ctx); 8616 } 8617 8618 /* End the TB when crossing a page boundary. */ 8619 if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) { 8620 ctx->base.is_jmp = DISAS_TOO_MANY; 8621 } 8622 8623 translator_loop_temp_check(&ctx->base); 8624 } 8625 8626 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 8627 { 8628 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8629 DisasJumpType is_jmp = ctx->base.is_jmp; 8630 target_ulong nip = ctx->base.pc_next; 8631 8632 if (is_jmp == DISAS_NORETURN) { 8633 /* We have already exited the TB. */ 8634 return; 8635 } 8636 8637 /* Honor single stepping. */ 8638 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP) 8639 && (nip <= 0x100 || nip > 0xf00)) { 8640 switch (is_jmp) { 8641 case DISAS_TOO_MANY: 8642 case DISAS_EXIT_UPDATE: 8643 case DISAS_CHAIN_UPDATE: 8644 gen_update_nip(ctx, nip); 8645 break; 8646 case DISAS_EXIT: 8647 case DISAS_CHAIN: 8648 break; 8649 default: 8650 g_assert_not_reached(); 8651 } 8652 8653 gen_debug_exception(ctx); 8654 return; 8655 } 8656 8657 switch (is_jmp) { 8658 case DISAS_TOO_MANY: 8659 if (use_goto_tb(ctx, nip)) { 8660 tcg_gen_goto_tb(0); 8661 gen_update_nip(ctx, nip); 8662 tcg_gen_exit_tb(ctx->base.tb, 0); 8663 break; 8664 } 8665 /* fall through */ 8666 case DISAS_CHAIN_UPDATE: 8667 gen_update_nip(ctx, nip); 8668 /* fall through */ 8669 case DISAS_CHAIN: 8670 tcg_gen_lookup_and_goto_ptr(); 8671 break; 8672 8673 case DISAS_EXIT_UPDATE: 8674 gen_update_nip(ctx, nip); 8675 /* fall through */ 8676 case DISAS_EXIT: 8677 tcg_gen_exit_tb(NULL, 0); 8678 break; 8679 8680 default: 8681 g_assert_not_reached(); 8682 } 8683 } 8684 8685 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 8686 { 8687 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 8688 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 8689 } 8690 8691 static const TranslatorOps ppc_tr_ops = { 8692 .init_disas_context = ppc_tr_init_disas_context, 8693 .tb_start = ppc_tr_tb_start, 8694 .insn_start = ppc_tr_insn_start, 8695 .translate_insn = ppc_tr_translate_insn, 8696 .tb_stop = ppc_tr_tb_stop, 8697 .disas_log = ppc_tr_disas_log, 8698 }; 8699 8700 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns) 8701 { 8702 DisasContext ctx; 8703 8704 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns); 8705 } 8706 8707 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 8708 target_ulong *data) 8709 { 8710 env->nip = data[0]; 8711 } 8712