1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "qemu/host-utils.h" 28 #include "exec/cpu_ldst.h" 29 30 #include "exec/helper-proto.h" 31 #include "exec/helper-gen.h" 32 33 #include "trace-tcg.h" 34 #include "exec/translator.h" 35 #include "exec/log.h" 36 #include "qemu/atomic128.h" 37 38 39 #define CPU_SINGLE_STEP 0x1 40 #define CPU_BRANCH_STEP 0x2 41 #define GDBSTUB_SINGLE_STEP 0x4 42 43 /* Include definitions for instructions classes and implementations flags */ 44 //#define PPC_DEBUG_DISAS 45 //#define DO_PPC_STATISTICS 46 47 #ifdef PPC_DEBUG_DISAS 48 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 49 #else 50 # define LOG_DISAS(...) do { } while (0) 51 #endif 52 /*****************************************************************************/ 53 /* Code translation helpers */ 54 55 /* global register indexes */ 56 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 57 + 10*4 + 22*5 /* SPE GPRh */ 58 + 8*5 /* CRF */]; 59 static TCGv cpu_gpr[32]; 60 static TCGv cpu_gprh[32]; 61 static TCGv_i32 cpu_crf[8]; 62 static TCGv cpu_nip; 63 static TCGv cpu_msr; 64 static TCGv cpu_ctr; 65 static TCGv cpu_lr; 66 #if defined(TARGET_PPC64) 67 static TCGv cpu_cfar; 68 #endif 69 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 70 static TCGv cpu_reserve; 71 static TCGv cpu_reserve_val; 72 static TCGv cpu_fpscr; 73 static TCGv_i32 cpu_access_type; 74 75 #include "exec/gen-icount.h" 76 77 void ppc_translate_init(void) 78 { 79 int i; 80 char* p; 81 size_t cpu_reg_names_size; 82 83 p = cpu_reg_names; 84 cpu_reg_names_size = sizeof(cpu_reg_names); 85 86 for (i = 0; i < 8; i++) { 87 snprintf(p, cpu_reg_names_size, "crf%d", i); 88 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 89 offsetof(CPUPPCState, crf[i]), p); 90 p += 5; 91 cpu_reg_names_size -= 5; 92 } 93 94 for (i = 0; i < 32; i++) { 95 snprintf(p, cpu_reg_names_size, "r%d", i); 96 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 97 offsetof(CPUPPCState, gpr[i]), p); 98 p += (i < 10) ? 3 : 4; 99 cpu_reg_names_size -= (i < 10) ? 3 : 4; 100 snprintf(p, cpu_reg_names_size, "r%dH", i); 101 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 102 offsetof(CPUPPCState, gprh[i]), p); 103 p += (i < 10) ? 4 : 5; 104 cpu_reg_names_size -= (i < 10) ? 4 : 5; 105 } 106 107 cpu_nip = tcg_global_mem_new(cpu_env, 108 offsetof(CPUPPCState, nip), "nip"); 109 110 cpu_msr = tcg_global_mem_new(cpu_env, 111 offsetof(CPUPPCState, msr), "msr"); 112 113 cpu_ctr = tcg_global_mem_new(cpu_env, 114 offsetof(CPUPPCState, ctr), "ctr"); 115 116 cpu_lr = tcg_global_mem_new(cpu_env, 117 offsetof(CPUPPCState, lr), "lr"); 118 119 #if defined(TARGET_PPC64) 120 cpu_cfar = tcg_global_mem_new(cpu_env, 121 offsetof(CPUPPCState, cfar), "cfar"); 122 #endif 123 124 cpu_xer = tcg_global_mem_new(cpu_env, 125 offsetof(CPUPPCState, xer), "xer"); 126 cpu_so = tcg_global_mem_new(cpu_env, 127 offsetof(CPUPPCState, so), "SO"); 128 cpu_ov = tcg_global_mem_new(cpu_env, 129 offsetof(CPUPPCState, ov), "OV"); 130 cpu_ca = tcg_global_mem_new(cpu_env, 131 offsetof(CPUPPCState, ca), "CA"); 132 cpu_ov32 = tcg_global_mem_new(cpu_env, 133 offsetof(CPUPPCState, ov32), "OV32"); 134 cpu_ca32 = tcg_global_mem_new(cpu_env, 135 offsetof(CPUPPCState, ca32), "CA32"); 136 137 cpu_reserve = tcg_global_mem_new(cpu_env, 138 offsetof(CPUPPCState, reserve_addr), 139 "reserve_addr"); 140 cpu_reserve_val = tcg_global_mem_new(cpu_env, 141 offsetof(CPUPPCState, reserve_val), 142 "reserve_val"); 143 144 cpu_fpscr = tcg_global_mem_new(cpu_env, 145 offsetof(CPUPPCState, fpscr), "fpscr"); 146 147 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 148 offsetof(CPUPPCState, access_type), "access_type"); 149 } 150 151 /* internal defines */ 152 struct DisasContext { 153 DisasContextBase base; 154 uint32_t opcode; 155 uint32_t exception; 156 /* Routine used to access memory */ 157 bool pr, hv, dr, le_mode; 158 bool lazy_tlb_flush; 159 bool need_access_type; 160 int mem_idx; 161 int access_type; 162 /* Translation flags */ 163 TCGMemOp default_tcg_memop_mask; 164 #if defined(TARGET_PPC64) 165 bool sf_mode; 166 bool has_cfar; 167 #endif 168 bool fpu_enabled; 169 bool altivec_enabled; 170 bool vsx_enabled; 171 bool spe_enabled; 172 bool tm_enabled; 173 bool gtse; 174 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 175 int singlestep_enabled; 176 uint32_t flags; 177 uint64_t insns_flags; 178 uint64_t insns_flags2; 179 }; 180 181 /* Return true iff byteswap is needed in a scalar memop */ 182 static inline bool need_byteswap(const DisasContext *ctx) 183 { 184 #if defined(TARGET_WORDS_BIGENDIAN) 185 return ctx->le_mode; 186 #else 187 return !ctx->le_mode; 188 #endif 189 } 190 191 /* True when active word size < size of target_long. */ 192 #ifdef TARGET_PPC64 193 # define NARROW_MODE(C) (!(C)->sf_mode) 194 #else 195 # define NARROW_MODE(C) 0 196 #endif 197 198 struct opc_handler_t { 199 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 200 uint32_t inval1; 201 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 202 uint32_t inval2; 203 /* instruction type */ 204 uint64_t type; 205 /* extended instruction type */ 206 uint64_t type2; 207 /* handler */ 208 void (*handler)(DisasContext *ctx); 209 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 210 const char *oname; 211 #endif 212 #if defined(DO_PPC_STATISTICS) 213 uint64_t count; 214 #endif 215 }; 216 217 /* SPR load/store helpers */ 218 static inline void gen_load_spr(TCGv t, int reg) 219 { 220 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 221 } 222 223 static inline void gen_store_spr(int reg, TCGv t) 224 { 225 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 226 } 227 228 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 229 { 230 if (ctx->need_access_type && ctx->access_type != access_type) { 231 tcg_gen_movi_i32(cpu_access_type, access_type); 232 ctx->access_type = access_type; 233 } 234 } 235 236 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 237 { 238 if (NARROW_MODE(ctx)) { 239 nip = (uint32_t)nip; 240 } 241 tcg_gen_movi_tl(cpu_nip, nip); 242 } 243 244 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 245 { 246 TCGv_i32 t0, t1; 247 248 /* These are all synchronous exceptions, we set the PC back to 249 * the faulting instruction 250 */ 251 if (ctx->exception == POWERPC_EXCP_NONE) { 252 gen_update_nip(ctx, ctx->base.pc_next - 4); 253 } 254 t0 = tcg_const_i32(excp); 255 t1 = tcg_const_i32(error); 256 gen_helper_raise_exception_err(cpu_env, t0, t1); 257 tcg_temp_free_i32(t0); 258 tcg_temp_free_i32(t1); 259 ctx->exception = (excp); 260 } 261 262 static void gen_exception(DisasContext *ctx, uint32_t excp) 263 { 264 TCGv_i32 t0; 265 266 /* These are all synchronous exceptions, we set the PC back to 267 * the faulting instruction 268 */ 269 if (ctx->exception == POWERPC_EXCP_NONE) { 270 gen_update_nip(ctx, ctx->base.pc_next - 4); 271 } 272 t0 = tcg_const_i32(excp); 273 gen_helper_raise_exception(cpu_env, t0); 274 tcg_temp_free_i32(t0); 275 ctx->exception = (excp); 276 } 277 278 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 279 target_ulong nip) 280 { 281 TCGv_i32 t0; 282 283 gen_update_nip(ctx, nip); 284 t0 = tcg_const_i32(excp); 285 gen_helper_raise_exception(cpu_env, t0); 286 tcg_temp_free_i32(t0); 287 ctx->exception = (excp); 288 } 289 290 /* Translates the EXCP_TRACE/BRANCH exceptions used on most PowerPCs to 291 * EXCP_DEBUG, if we are running on cores using the debug enable bit (e.g. 292 * BookE). 293 */ 294 static uint32_t gen_prep_dbgex(DisasContext *ctx, uint32_t excp) 295 { 296 if ((ctx->singlestep_enabled & CPU_SINGLE_STEP) 297 && (excp == POWERPC_EXCP_BRANCH)) { 298 /* Trace excpt. has priority */ 299 excp = POWERPC_EXCP_TRACE; 300 } 301 if (ctx->flags & POWERPC_FLAG_DE) { 302 target_ulong dbsr = 0; 303 switch (excp) { 304 case POWERPC_EXCP_TRACE: 305 dbsr = DBCR0_ICMP; 306 break; 307 case POWERPC_EXCP_BRANCH: 308 dbsr = DBCR0_BRT; 309 break; 310 } 311 TCGv t0 = tcg_temp_new(); 312 gen_load_spr(t0, SPR_BOOKE_DBSR); 313 tcg_gen_ori_tl(t0, t0, dbsr); 314 gen_store_spr(SPR_BOOKE_DBSR, t0); 315 tcg_temp_free(t0); 316 return POWERPC_EXCP_DEBUG; 317 } else { 318 return excp; 319 } 320 } 321 322 static void gen_debug_exception(DisasContext *ctx) 323 { 324 TCGv_i32 t0; 325 326 /* These are all synchronous exceptions, we set the PC back to 327 * the faulting instruction 328 */ 329 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 330 (ctx->exception != POWERPC_EXCP_SYNC)) { 331 gen_update_nip(ctx, ctx->base.pc_next); 332 } 333 t0 = tcg_const_i32(EXCP_DEBUG); 334 gen_helper_raise_exception(cpu_env, t0); 335 tcg_temp_free_i32(t0); 336 } 337 338 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 339 { 340 /* Will be converted to program check if needed */ 341 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 342 } 343 344 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 345 { 346 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 347 } 348 349 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 350 { 351 /* Will be converted to program check if needed */ 352 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 353 } 354 355 /* Stop translation */ 356 static inline void gen_stop_exception(DisasContext *ctx) 357 { 358 gen_update_nip(ctx, ctx->base.pc_next); 359 ctx->exception = POWERPC_EXCP_STOP; 360 } 361 362 #ifndef CONFIG_USER_ONLY 363 /* No need to update nip here, as execution flow will change */ 364 static inline void gen_sync_exception(DisasContext *ctx) 365 { 366 ctx->exception = POWERPC_EXCP_SYNC; 367 } 368 #endif 369 370 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 371 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 372 373 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 374 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 375 376 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 377 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 378 379 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 380 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 381 382 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 383 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 384 385 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 386 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 387 388 typedef struct opcode_t { 389 unsigned char opc1, opc2, opc3, opc4; 390 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 391 unsigned char pad[4]; 392 #endif 393 opc_handler_t handler; 394 const char *oname; 395 } opcode_t; 396 397 /* Helpers for priv. check */ 398 #define GEN_PRIV \ 399 do { \ 400 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 401 } while (0) 402 403 #if defined(CONFIG_USER_ONLY) 404 #define CHK_HV GEN_PRIV 405 #define CHK_SV GEN_PRIV 406 #define CHK_HVRM GEN_PRIV 407 #else 408 #define CHK_HV \ 409 do { \ 410 if (unlikely(ctx->pr || !ctx->hv)) { \ 411 GEN_PRIV; \ 412 } \ 413 } while (0) 414 #define CHK_SV \ 415 do { \ 416 if (unlikely(ctx->pr)) { \ 417 GEN_PRIV; \ 418 } \ 419 } while (0) 420 #define CHK_HVRM \ 421 do { \ 422 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 423 GEN_PRIV; \ 424 } \ 425 } while (0) 426 #endif 427 428 #define CHK_NONE 429 430 /*****************************************************************************/ 431 /* PowerPC instructions table */ 432 433 #if defined(DO_PPC_STATISTICS) 434 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 435 { \ 436 .opc1 = op1, \ 437 .opc2 = op2, \ 438 .opc3 = op3, \ 439 .opc4 = 0xff, \ 440 .handler = { \ 441 .inval1 = invl, \ 442 .type = _typ, \ 443 .type2 = _typ2, \ 444 .handler = &gen_##name, \ 445 .oname = stringify(name), \ 446 }, \ 447 .oname = stringify(name), \ 448 } 449 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 450 { \ 451 .opc1 = op1, \ 452 .opc2 = op2, \ 453 .opc3 = op3, \ 454 .opc4 = 0xff, \ 455 .handler = { \ 456 .inval1 = invl1, \ 457 .inval2 = invl2, \ 458 .type = _typ, \ 459 .type2 = _typ2, \ 460 .handler = &gen_##name, \ 461 .oname = stringify(name), \ 462 }, \ 463 .oname = stringify(name), \ 464 } 465 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 466 { \ 467 .opc1 = op1, \ 468 .opc2 = op2, \ 469 .opc3 = op3, \ 470 .opc4 = 0xff, \ 471 .handler = { \ 472 .inval1 = invl, \ 473 .type = _typ, \ 474 .type2 = _typ2, \ 475 .handler = &gen_##name, \ 476 .oname = onam, \ 477 }, \ 478 .oname = onam, \ 479 } 480 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 481 { \ 482 .opc1 = op1, \ 483 .opc2 = op2, \ 484 .opc3 = op3, \ 485 .opc4 = op4, \ 486 .handler = { \ 487 .inval1 = invl, \ 488 .type = _typ, \ 489 .type2 = _typ2, \ 490 .handler = &gen_##name, \ 491 .oname = stringify(name), \ 492 }, \ 493 .oname = stringify(name), \ 494 } 495 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 496 { \ 497 .opc1 = op1, \ 498 .opc2 = op2, \ 499 .opc3 = op3, \ 500 .opc4 = op4, \ 501 .handler = { \ 502 .inval1 = invl, \ 503 .type = _typ, \ 504 .type2 = _typ2, \ 505 .handler = &gen_##name, \ 506 .oname = onam, \ 507 }, \ 508 .oname = onam, \ 509 } 510 #else 511 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 512 { \ 513 .opc1 = op1, \ 514 .opc2 = op2, \ 515 .opc3 = op3, \ 516 .opc4 = 0xff, \ 517 .handler = { \ 518 .inval1 = invl, \ 519 .type = _typ, \ 520 .type2 = _typ2, \ 521 .handler = &gen_##name, \ 522 }, \ 523 .oname = stringify(name), \ 524 } 525 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 526 { \ 527 .opc1 = op1, \ 528 .opc2 = op2, \ 529 .opc3 = op3, \ 530 .opc4 = 0xff, \ 531 .handler = { \ 532 .inval1 = invl1, \ 533 .inval2 = invl2, \ 534 .type = _typ, \ 535 .type2 = _typ2, \ 536 .handler = &gen_##name, \ 537 }, \ 538 .oname = stringify(name), \ 539 } 540 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 541 { \ 542 .opc1 = op1, \ 543 .opc2 = op2, \ 544 .opc3 = op3, \ 545 .opc4 = 0xff, \ 546 .handler = { \ 547 .inval1 = invl, \ 548 .type = _typ, \ 549 .type2 = _typ2, \ 550 .handler = &gen_##name, \ 551 }, \ 552 .oname = onam, \ 553 } 554 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 555 { \ 556 .opc1 = op1, \ 557 .opc2 = op2, \ 558 .opc3 = op3, \ 559 .opc4 = op4, \ 560 .handler = { \ 561 .inval1 = invl, \ 562 .type = _typ, \ 563 .type2 = _typ2, \ 564 .handler = &gen_##name, \ 565 }, \ 566 .oname = stringify(name), \ 567 } 568 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 569 { \ 570 .opc1 = op1, \ 571 .opc2 = op2, \ 572 .opc3 = op3, \ 573 .opc4 = op4, \ 574 .handler = { \ 575 .inval1 = invl, \ 576 .type = _typ, \ 577 .type2 = _typ2, \ 578 .handler = &gen_##name, \ 579 }, \ 580 .oname = onam, \ 581 } 582 #endif 583 584 /* Invalid instruction */ 585 static void gen_invalid(DisasContext *ctx) 586 { 587 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 588 } 589 590 static opc_handler_t invalid_handler = { 591 .inval1 = 0xFFFFFFFF, 592 .inval2 = 0xFFFFFFFF, 593 .type = PPC_NONE, 594 .type2 = PPC_NONE, 595 .handler = gen_invalid, 596 }; 597 598 /*** Integer comparison ***/ 599 600 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 601 { 602 TCGv t0 = tcg_temp_new(); 603 TCGv t1 = tcg_temp_new(); 604 TCGv_i32 t = tcg_temp_new_i32(); 605 606 tcg_gen_movi_tl(t0, CRF_EQ); 607 tcg_gen_movi_tl(t1, CRF_LT); 608 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), t0, arg0, arg1, t1, t0); 609 tcg_gen_movi_tl(t1, CRF_GT); 610 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), t0, arg0, arg1, t1, t0); 611 612 tcg_gen_trunc_tl_i32(t, t0); 613 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 614 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 615 616 tcg_temp_free(t0); 617 tcg_temp_free(t1); 618 tcg_temp_free_i32(t); 619 } 620 621 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 622 { 623 TCGv t0 = tcg_const_tl(arg1); 624 gen_op_cmp(arg0, t0, s, crf); 625 tcg_temp_free(t0); 626 } 627 628 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 629 { 630 TCGv t0, t1; 631 t0 = tcg_temp_new(); 632 t1 = tcg_temp_new(); 633 if (s) { 634 tcg_gen_ext32s_tl(t0, arg0); 635 tcg_gen_ext32s_tl(t1, arg1); 636 } else { 637 tcg_gen_ext32u_tl(t0, arg0); 638 tcg_gen_ext32u_tl(t1, arg1); 639 } 640 gen_op_cmp(t0, t1, s, crf); 641 tcg_temp_free(t1); 642 tcg_temp_free(t0); 643 } 644 645 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 646 { 647 TCGv t0 = tcg_const_tl(arg1); 648 gen_op_cmp32(arg0, t0, s, crf); 649 tcg_temp_free(t0); 650 } 651 652 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 653 { 654 if (NARROW_MODE(ctx)) { 655 gen_op_cmpi32(reg, 0, 1, 0); 656 } else { 657 gen_op_cmpi(reg, 0, 1, 0); 658 } 659 } 660 661 /* cmp */ 662 static void gen_cmp(DisasContext *ctx) 663 { 664 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 665 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 666 1, crfD(ctx->opcode)); 667 } else { 668 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 669 1, crfD(ctx->opcode)); 670 } 671 } 672 673 /* cmpi */ 674 static void gen_cmpi(DisasContext *ctx) 675 { 676 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 677 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 678 1, crfD(ctx->opcode)); 679 } else { 680 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 681 1, crfD(ctx->opcode)); 682 } 683 } 684 685 /* cmpl */ 686 static void gen_cmpl(DisasContext *ctx) 687 { 688 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 689 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 690 0, crfD(ctx->opcode)); 691 } else { 692 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 693 0, crfD(ctx->opcode)); 694 } 695 } 696 697 /* cmpli */ 698 static void gen_cmpli(DisasContext *ctx) 699 { 700 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 701 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 702 0, crfD(ctx->opcode)); 703 } else { 704 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 705 0, crfD(ctx->opcode)); 706 } 707 } 708 709 /* cmprb - range comparison: isupper, isaplha, islower*/ 710 static void gen_cmprb(DisasContext *ctx) 711 { 712 TCGv_i32 src1 = tcg_temp_new_i32(); 713 TCGv_i32 src2 = tcg_temp_new_i32(); 714 TCGv_i32 src2lo = tcg_temp_new_i32(); 715 TCGv_i32 src2hi = tcg_temp_new_i32(); 716 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 717 718 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 719 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 720 721 tcg_gen_andi_i32(src1, src1, 0xFF); 722 tcg_gen_ext8u_i32(src2lo, src2); 723 tcg_gen_shri_i32(src2, src2, 8); 724 tcg_gen_ext8u_i32(src2hi, src2); 725 726 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 727 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 728 tcg_gen_and_i32(crf, src2lo, src2hi); 729 730 if (ctx->opcode & 0x00200000) { 731 tcg_gen_shri_i32(src2, src2, 8); 732 tcg_gen_ext8u_i32(src2lo, src2); 733 tcg_gen_shri_i32(src2, src2, 8); 734 tcg_gen_ext8u_i32(src2hi, src2); 735 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 736 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 737 tcg_gen_and_i32(src2lo, src2lo, src2hi); 738 tcg_gen_or_i32(crf, crf, src2lo); 739 } 740 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 741 tcg_temp_free_i32(src1); 742 tcg_temp_free_i32(src2); 743 tcg_temp_free_i32(src2lo); 744 tcg_temp_free_i32(src2hi); 745 } 746 747 #if defined(TARGET_PPC64) 748 /* cmpeqb */ 749 static void gen_cmpeqb(DisasContext *ctx) 750 { 751 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 752 cpu_gpr[rB(ctx->opcode)]); 753 } 754 #endif 755 756 /* isel (PowerPC 2.03 specification) */ 757 static void gen_isel(DisasContext *ctx) 758 { 759 uint32_t bi = rC(ctx->opcode); 760 uint32_t mask = 0x08 >> (bi & 0x03); 761 TCGv t0 = tcg_temp_new(); 762 TCGv zr; 763 764 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 765 tcg_gen_andi_tl(t0, t0, mask); 766 767 zr = tcg_const_tl(0); 768 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 769 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 770 cpu_gpr[rB(ctx->opcode)]); 771 tcg_temp_free(zr); 772 tcg_temp_free(t0); 773 } 774 775 /* cmpb: PowerPC 2.05 specification */ 776 static void gen_cmpb(DisasContext *ctx) 777 { 778 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 779 cpu_gpr[rB(ctx->opcode)]); 780 } 781 782 /*** Integer arithmetic ***/ 783 784 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 785 TCGv arg1, TCGv arg2, int sub) 786 { 787 TCGv t0 = tcg_temp_new(); 788 789 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 790 tcg_gen_xor_tl(t0, arg1, arg2); 791 if (sub) { 792 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 793 } else { 794 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 795 } 796 tcg_temp_free(t0); 797 if (NARROW_MODE(ctx)) { 798 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 799 if (is_isa300(ctx)) { 800 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 801 } 802 } else { 803 if (is_isa300(ctx)) { 804 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 805 } 806 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 807 } 808 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 809 } 810 811 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 812 TCGv res, TCGv arg0, TCGv arg1, 813 TCGv ca32, int sub) 814 { 815 TCGv t0; 816 817 if (!is_isa300(ctx)) { 818 return; 819 } 820 821 t0 = tcg_temp_new(); 822 if (sub) { 823 tcg_gen_eqv_tl(t0, arg0, arg1); 824 } else { 825 tcg_gen_xor_tl(t0, arg0, arg1); 826 } 827 tcg_gen_xor_tl(t0, t0, res); 828 tcg_gen_extract_tl(ca32, t0, 32, 1); 829 tcg_temp_free(t0); 830 } 831 832 /* Common add function */ 833 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 834 TCGv arg2, TCGv ca, TCGv ca32, 835 bool add_ca, bool compute_ca, 836 bool compute_ov, bool compute_rc0) 837 { 838 TCGv t0 = ret; 839 840 if (compute_ca || compute_ov) { 841 t0 = tcg_temp_new(); 842 } 843 844 if (compute_ca) { 845 if (NARROW_MODE(ctx)) { 846 /* Caution: a non-obvious corner case of the spec is that we 847 must produce the *entire* 64-bit addition, but produce the 848 carry into bit 32. */ 849 TCGv t1 = tcg_temp_new(); 850 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 851 tcg_gen_add_tl(t0, arg1, arg2); 852 if (add_ca) { 853 tcg_gen_add_tl(t0, t0, ca); 854 } 855 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 856 tcg_temp_free(t1); 857 tcg_gen_extract_tl(ca, ca, 32, 1); 858 if (is_isa300(ctx)) { 859 tcg_gen_mov_tl(ca32, ca); 860 } 861 } else { 862 TCGv zero = tcg_const_tl(0); 863 if (add_ca) { 864 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 865 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 866 } else { 867 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 868 } 869 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 870 tcg_temp_free(zero); 871 } 872 } else { 873 tcg_gen_add_tl(t0, arg1, arg2); 874 if (add_ca) { 875 tcg_gen_add_tl(t0, t0, ca); 876 } 877 } 878 879 if (compute_ov) { 880 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 881 } 882 if (unlikely(compute_rc0)) { 883 gen_set_Rc0(ctx, t0); 884 } 885 886 if (t0 != ret) { 887 tcg_gen_mov_tl(ret, t0); 888 tcg_temp_free(t0); 889 } 890 } 891 /* Add functions with two operands */ 892 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 893 static void glue(gen_, name)(DisasContext *ctx) \ 894 { \ 895 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 896 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 897 ca, glue(ca, 32), \ 898 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 899 } 900 /* Add functions with one operand and one immediate */ 901 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 902 add_ca, compute_ca, compute_ov) \ 903 static void glue(gen_, name)(DisasContext *ctx) \ 904 { \ 905 TCGv t0 = tcg_const_tl(const_val); \ 906 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 907 cpu_gpr[rA(ctx->opcode)], t0, \ 908 ca, glue(ca, 32), \ 909 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 910 tcg_temp_free(t0); \ 911 } 912 913 /* add add. addo addo. */ 914 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 915 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 916 /* addc addc. addco addco. */ 917 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 918 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 919 /* adde adde. addeo addeo. */ 920 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 921 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 922 /* addme addme. addmeo addmeo. */ 923 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 924 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 925 /* addex */ 926 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 927 /* addze addze. addzeo addzeo.*/ 928 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 929 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 930 /* addi */ 931 static void gen_addi(DisasContext *ctx) 932 { 933 target_long simm = SIMM(ctx->opcode); 934 935 if (rA(ctx->opcode) == 0) { 936 /* li case */ 937 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 938 } else { 939 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 940 cpu_gpr[rA(ctx->opcode)], simm); 941 } 942 } 943 /* addic addic.*/ 944 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 945 { 946 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 947 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 948 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 949 tcg_temp_free(c); 950 } 951 952 static void gen_addic(DisasContext *ctx) 953 { 954 gen_op_addic(ctx, 0); 955 } 956 957 static void gen_addic_(DisasContext *ctx) 958 { 959 gen_op_addic(ctx, 1); 960 } 961 962 /* addis */ 963 static void gen_addis(DisasContext *ctx) 964 { 965 target_long simm = SIMM(ctx->opcode); 966 967 if (rA(ctx->opcode) == 0) { 968 /* lis case */ 969 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 970 } else { 971 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 972 cpu_gpr[rA(ctx->opcode)], simm << 16); 973 } 974 } 975 976 /* addpcis */ 977 static void gen_addpcis(DisasContext *ctx) 978 { 979 target_long d = DX(ctx->opcode); 980 981 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16)); 982 } 983 984 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 985 TCGv arg2, int sign, int compute_ov) 986 { 987 TCGv_i32 t0 = tcg_temp_new_i32(); 988 TCGv_i32 t1 = tcg_temp_new_i32(); 989 TCGv_i32 t2 = tcg_temp_new_i32(); 990 TCGv_i32 t3 = tcg_temp_new_i32(); 991 992 tcg_gen_trunc_tl_i32(t0, arg1); 993 tcg_gen_trunc_tl_i32(t1, arg2); 994 if (sign) { 995 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 996 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 997 tcg_gen_and_i32(t2, t2, t3); 998 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 999 tcg_gen_or_i32(t2, t2, t3); 1000 tcg_gen_movi_i32(t3, 0); 1001 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1002 tcg_gen_div_i32(t3, t0, t1); 1003 tcg_gen_extu_i32_tl(ret, t3); 1004 } else { 1005 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1006 tcg_gen_movi_i32(t3, 0); 1007 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1008 tcg_gen_divu_i32(t3, t0, t1); 1009 tcg_gen_extu_i32_tl(ret, t3); 1010 } 1011 if (compute_ov) { 1012 tcg_gen_extu_i32_tl(cpu_ov, t2); 1013 if (is_isa300(ctx)) { 1014 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1015 } 1016 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1017 } 1018 tcg_temp_free_i32(t0); 1019 tcg_temp_free_i32(t1); 1020 tcg_temp_free_i32(t2); 1021 tcg_temp_free_i32(t3); 1022 1023 if (unlikely(Rc(ctx->opcode) != 0)) 1024 gen_set_Rc0(ctx, ret); 1025 } 1026 /* Div functions */ 1027 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1028 static void glue(gen_, name)(DisasContext *ctx) \ 1029 { \ 1030 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1031 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1032 sign, compute_ov); \ 1033 } 1034 /* divwu divwu. divwuo divwuo. */ 1035 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1036 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1037 /* divw divw. divwo divwo. */ 1038 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1039 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1040 1041 /* div[wd]eu[o][.] */ 1042 #define GEN_DIVE(name, hlpr, compute_ov) \ 1043 static void gen_##name(DisasContext *ctx) \ 1044 { \ 1045 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1046 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1047 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1048 tcg_temp_free_i32(t0); \ 1049 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1050 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1051 } \ 1052 } 1053 1054 GEN_DIVE(divweu, divweu, 0); 1055 GEN_DIVE(divweuo, divweu, 1); 1056 GEN_DIVE(divwe, divwe, 0); 1057 GEN_DIVE(divweo, divwe, 1); 1058 1059 #if defined(TARGET_PPC64) 1060 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1061 TCGv arg2, int sign, int compute_ov) 1062 { 1063 TCGv_i64 t0 = tcg_temp_new_i64(); 1064 TCGv_i64 t1 = tcg_temp_new_i64(); 1065 TCGv_i64 t2 = tcg_temp_new_i64(); 1066 TCGv_i64 t3 = tcg_temp_new_i64(); 1067 1068 tcg_gen_mov_i64(t0, arg1); 1069 tcg_gen_mov_i64(t1, arg2); 1070 if (sign) { 1071 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1072 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1073 tcg_gen_and_i64(t2, t2, t3); 1074 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1075 tcg_gen_or_i64(t2, t2, t3); 1076 tcg_gen_movi_i64(t3, 0); 1077 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1078 tcg_gen_div_i64(ret, t0, t1); 1079 } else { 1080 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1081 tcg_gen_movi_i64(t3, 0); 1082 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1083 tcg_gen_divu_i64(ret, t0, t1); 1084 } 1085 if (compute_ov) { 1086 tcg_gen_mov_tl(cpu_ov, t2); 1087 if (is_isa300(ctx)) { 1088 tcg_gen_mov_tl(cpu_ov32, t2); 1089 } 1090 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1091 } 1092 tcg_temp_free_i64(t0); 1093 tcg_temp_free_i64(t1); 1094 tcg_temp_free_i64(t2); 1095 tcg_temp_free_i64(t3); 1096 1097 if (unlikely(Rc(ctx->opcode) != 0)) 1098 gen_set_Rc0(ctx, ret); 1099 } 1100 1101 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1102 static void glue(gen_, name)(DisasContext *ctx) \ 1103 { \ 1104 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1105 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1106 sign, compute_ov); \ 1107 } 1108 /* divdu divdu. divduo divduo. */ 1109 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1110 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1111 /* divd divd. divdo divdo. */ 1112 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1113 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1114 1115 GEN_DIVE(divdeu, divdeu, 0); 1116 GEN_DIVE(divdeuo, divdeu, 1); 1117 GEN_DIVE(divde, divde, 0); 1118 GEN_DIVE(divdeo, divde, 1); 1119 #endif 1120 1121 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1122 TCGv arg2, int sign) 1123 { 1124 TCGv_i32 t0 = tcg_temp_new_i32(); 1125 TCGv_i32 t1 = tcg_temp_new_i32(); 1126 1127 tcg_gen_trunc_tl_i32(t0, arg1); 1128 tcg_gen_trunc_tl_i32(t1, arg2); 1129 if (sign) { 1130 TCGv_i32 t2 = tcg_temp_new_i32(); 1131 TCGv_i32 t3 = tcg_temp_new_i32(); 1132 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1133 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1134 tcg_gen_and_i32(t2, t2, t3); 1135 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1136 tcg_gen_or_i32(t2, t2, t3); 1137 tcg_gen_movi_i32(t3, 0); 1138 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1139 tcg_gen_rem_i32(t3, t0, t1); 1140 tcg_gen_ext_i32_tl(ret, t3); 1141 tcg_temp_free_i32(t2); 1142 tcg_temp_free_i32(t3); 1143 } else { 1144 TCGv_i32 t2 = tcg_const_i32(1); 1145 TCGv_i32 t3 = tcg_const_i32(0); 1146 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1147 tcg_gen_remu_i32(t3, t0, t1); 1148 tcg_gen_extu_i32_tl(ret, t3); 1149 tcg_temp_free_i32(t2); 1150 tcg_temp_free_i32(t3); 1151 } 1152 tcg_temp_free_i32(t0); 1153 tcg_temp_free_i32(t1); 1154 } 1155 1156 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1157 static void glue(gen_, name)(DisasContext *ctx) \ 1158 { \ 1159 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1160 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1161 sign); \ 1162 } 1163 1164 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1165 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1166 1167 #if defined(TARGET_PPC64) 1168 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1169 TCGv arg2, int sign) 1170 { 1171 TCGv_i64 t0 = tcg_temp_new_i64(); 1172 TCGv_i64 t1 = tcg_temp_new_i64(); 1173 1174 tcg_gen_mov_i64(t0, arg1); 1175 tcg_gen_mov_i64(t1, arg2); 1176 if (sign) { 1177 TCGv_i64 t2 = tcg_temp_new_i64(); 1178 TCGv_i64 t3 = tcg_temp_new_i64(); 1179 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1180 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1181 tcg_gen_and_i64(t2, t2, t3); 1182 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1183 tcg_gen_or_i64(t2, t2, t3); 1184 tcg_gen_movi_i64(t3, 0); 1185 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1186 tcg_gen_rem_i64(ret, t0, t1); 1187 tcg_temp_free_i64(t2); 1188 tcg_temp_free_i64(t3); 1189 } else { 1190 TCGv_i64 t2 = tcg_const_i64(1); 1191 TCGv_i64 t3 = tcg_const_i64(0); 1192 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1193 tcg_gen_remu_i64(ret, t0, t1); 1194 tcg_temp_free_i64(t2); 1195 tcg_temp_free_i64(t3); 1196 } 1197 tcg_temp_free_i64(t0); 1198 tcg_temp_free_i64(t1); 1199 } 1200 1201 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1202 static void glue(gen_, name)(DisasContext *ctx) \ 1203 { \ 1204 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1205 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1206 sign); \ 1207 } 1208 1209 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1210 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1211 #endif 1212 1213 /* mulhw mulhw. */ 1214 static void gen_mulhw(DisasContext *ctx) 1215 { 1216 TCGv_i32 t0 = tcg_temp_new_i32(); 1217 TCGv_i32 t1 = tcg_temp_new_i32(); 1218 1219 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1220 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1221 tcg_gen_muls2_i32(t0, t1, t0, t1); 1222 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1223 tcg_temp_free_i32(t0); 1224 tcg_temp_free_i32(t1); 1225 if (unlikely(Rc(ctx->opcode) != 0)) 1226 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1227 } 1228 1229 /* mulhwu mulhwu. */ 1230 static void gen_mulhwu(DisasContext *ctx) 1231 { 1232 TCGv_i32 t0 = tcg_temp_new_i32(); 1233 TCGv_i32 t1 = tcg_temp_new_i32(); 1234 1235 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1236 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1237 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1238 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1239 tcg_temp_free_i32(t0); 1240 tcg_temp_free_i32(t1); 1241 if (unlikely(Rc(ctx->opcode) != 0)) 1242 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1243 } 1244 1245 /* mullw mullw. */ 1246 static void gen_mullw(DisasContext *ctx) 1247 { 1248 #if defined(TARGET_PPC64) 1249 TCGv_i64 t0, t1; 1250 t0 = tcg_temp_new_i64(); 1251 t1 = tcg_temp_new_i64(); 1252 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1253 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1254 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1255 tcg_temp_free(t0); 1256 tcg_temp_free(t1); 1257 #else 1258 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1259 cpu_gpr[rB(ctx->opcode)]); 1260 #endif 1261 if (unlikely(Rc(ctx->opcode) != 0)) 1262 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1263 } 1264 1265 /* mullwo mullwo. */ 1266 static void gen_mullwo(DisasContext *ctx) 1267 { 1268 TCGv_i32 t0 = tcg_temp_new_i32(); 1269 TCGv_i32 t1 = tcg_temp_new_i32(); 1270 1271 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1272 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1273 tcg_gen_muls2_i32(t0, t1, t0, t1); 1274 #if defined(TARGET_PPC64) 1275 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1276 #else 1277 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1278 #endif 1279 1280 tcg_gen_sari_i32(t0, t0, 31); 1281 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1282 tcg_gen_extu_i32_tl(cpu_ov, t0); 1283 if (is_isa300(ctx)) { 1284 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1285 } 1286 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1287 1288 tcg_temp_free_i32(t0); 1289 tcg_temp_free_i32(t1); 1290 if (unlikely(Rc(ctx->opcode) != 0)) 1291 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1292 } 1293 1294 /* mulli */ 1295 static void gen_mulli(DisasContext *ctx) 1296 { 1297 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1298 SIMM(ctx->opcode)); 1299 } 1300 1301 #if defined(TARGET_PPC64) 1302 /* mulhd mulhd. */ 1303 static void gen_mulhd(DisasContext *ctx) 1304 { 1305 TCGv lo = tcg_temp_new(); 1306 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1307 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1308 tcg_temp_free(lo); 1309 if (unlikely(Rc(ctx->opcode) != 0)) { 1310 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1311 } 1312 } 1313 1314 /* mulhdu mulhdu. */ 1315 static void gen_mulhdu(DisasContext *ctx) 1316 { 1317 TCGv lo = tcg_temp_new(); 1318 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1319 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1320 tcg_temp_free(lo); 1321 if (unlikely(Rc(ctx->opcode) != 0)) { 1322 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1323 } 1324 } 1325 1326 /* mulld mulld. */ 1327 static void gen_mulld(DisasContext *ctx) 1328 { 1329 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1330 cpu_gpr[rB(ctx->opcode)]); 1331 if (unlikely(Rc(ctx->opcode) != 0)) 1332 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1333 } 1334 1335 /* mulldo mulldo. */ 1336 static void gen_mulldo(DisasContext *ctx) 1337 { 1338 TCGv_i64 t0 = tcg_temp_new_i64(); 1339 TCGv_i64 t1 = tcg_temp_new_i64(); 1340 1341 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1342 cpu_gpr[rB(ctx->opcode)]); 1343 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1344 1345 tcg_gen_sari_i64(t0, t0, 63); 1346 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1347 if (is_isa300(ctx)) { 1348 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1349 } 1350 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1351 1352 tcg_temp_free_i64(t0); 1353 tcg_temp_free_i64(t1); 1354 1355 if (unlikely(Rc(ctx->opcode) != 0)) { 1356 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1357 } 1358 } 1359 #endif 1360 1361 /* Common subf function */ 1362 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1363 TCGv arg2, bool add_ca, bool compute_ca, 1364 bool compute_ov, bool compute_rc0) 1365 { 1366 TCGv t0 = ret; 1367 1368 if (compute_ca || compute_ov) { 1369 t0 = tcg_temp_new(); 1370 } 1371 1372 if (compute_ca) { 1373 /* dest = ~arg1 + arg2 [+ ca]. */ 1374 if (NARROW_MODE(ctx)) { 1375 /* Caution: a non-obvious corner case of the spec is that we 1376 must produce the *entire* 64-bit addition, but produce the 1377 carry into bit 32. */ 1378 TCGv inv1 = tcg_temp_new(); 1379 TCGv t1 = tcg_temp_new(); 1380 tcg_gen_not_tl(inv1, arg1); 1381 if (add_ca) { 1382 tcg_gen_add_tl(t0, arg2, cpu_ca); 1383 } else { 1384 tcg_gen_addi_tl(t0, arg2, 1); 1385 } 1386 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1387 tcg_gen_add_tl(t0, t0, inv1); 1388 tcg_temp_free(inv1); 1389 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1390 tcg_temp_free(t1); 1391 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 1392 if (is_isa300(ctx)) { 1393 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 1394 } 1395 } else if (add_ca) { 1396 TCGv zero, inv1 = tcg_temp_new(); 1397 tcg_gen_not_tl(inv1, arg1); 1398 zero = tcg_const_tl(0); 1399 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1400 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1401 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 1402 tcg_temp_free(zero); 1403 tcg_temp_free(inv1); 1404 } else { 1405 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1406 tcg_gen_sub_tl(t0, arg2, arg1); 1407 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 1408 } 1409 } else if (add_ca) { 1410 /* Since we're ignoring carry-out, we can simplify the 1411 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1412 tcg_gen_sub_tl(t0, arg2, arg1); 1413 tcg_gen_add_tl(t0, t0, cpu_ca); 1414 tcg_gen_subi_tl(t0, t0, 1); 1415 } else { 1416 tcg_gen_sub_tl(t0, arg2, arg1); 1417 } 1418 1419 if (compute_ov) { 1420 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1421 } 1422 if (unlikely(compute_rc0)) { 1423 gen_set_Rc0(ctx, t0); 1424 } 1425 1426 if (t0 != ret) { 1427 tcg_gen_mov_tl(ret, t0); 1428 tcg_temp_free(t0); 1429 } 1430 } 1431 /* Sub functions with Two operands functions */ 1432 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1433 static void glue(gen_, name)(DisasContext *ctx) \ 1434 { \ 1435 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1436 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1437 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1438 } 1439 /* Sub functions with one operand and one immediate */ 1440 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1441 add_ca, compute_ca, compute_ov) \ 1442 static void glue(gen_, name)(DisasContext *ctx) \ 1443 { \ 1444 TCGv t0 = tcg_const_tl(const_val); \ 1445 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1446 cpu_gpr[rA(ctx->opcode)], t0, \ 1447 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1448 tcg_temp_free(t0); \ 1449 } 1450 /* subf subf. subfo subfo. */ 1451 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1452 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1453 /* subfc subfc. subfco subfco. */ 1454 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1455 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1456 /* subfe subfe. subfeo subfo. */ 1457 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1458 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1459 /* subfme subfme. subfmeo subfmeo. */ 1460 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1461 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1462 /* subfze subfze. subfzeo subfzeo.*/ 1463 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1464 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1465 1466 /* subfic */ 1467 static void gen_subfic(DisasContext *ctx) 1468 { 1469 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1470 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1471 c, 0, 1, 0, 0); 1472 tcg_temp_free(c); 1473 } 1474 1475 /* neg neg. nego nego. */ 1476 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1477 { 1478 TCGv zero = tcg_const_tl(0); 1479 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1480 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1481 tcg_temp_free(zero); 1482 } 1483 1484 static void gen_neg(DisasContext *ctx) 1485 { 1486 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 1487 if (unlikely(Rc(ctx->opcode))) { 1488 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1489 } 1490 } 1491 1492 static void gen_nego(DisasContext *ctx) 1493 { 1494 gen_op_arith_neg(ctx, 1); 1495 } 1496 1497 /*** Integer logical ***/ 1498 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1499 static void glue(gen_, name)(DisasContext *ctx) \ 1500 { \ 1501 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1502 cpu_gpr[rB(ctx->opcode)]); \ 1503 if (unlikely(Rc(ctx->opcode) != 0)) \ 1504 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1505 } 1506 1507 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1508 static void glue(gen_, name)(DisasContext *ctx) \ 1509 { \ 1510 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1511 if (unlikely(Rc(ctx->opcode) != 0)) \ 1512 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1513 } 1514 1515 /* and & and. */ 1516 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1517 /* andc & andc. */ 1518 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1519 1520 /* andi. */ 1521 static void gen_andi_(DisasContext *ctx) 1522 { 1523 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1524 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1525 } 1526 1527 /* andis. */ 1528 static void gen_andis_(DisasContext *ctx) 1529 { 1530 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1531 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1532 } 1533 1534 /* cntlzw */ 1535 static void gen_cntlzw(DisasContext *ctx) 1536 { 1537 TCGv_i32 t = tcg_temp_new_i32(); 1538 1539 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1540 tcg_gen_clzi_i32(t, t, 32); 1541 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1542 tcg_temp_free_i32(t); 1543 1544 if (unlikely(Rc(ctx->opcode) != 0)) 1545 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1546 } 1547 1548 /* cnttzw */ 1549 static void gen_cnttzw(DisasContext *ctx) 1550 { 1551 TCGv_i32 t = tcg_temp_new_i32(); 1552 1553 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1554 tcg_gen_ctzi_i32(t, t, 32); 1555 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1556 tcg_temp_free_i32(t); 1557 1558 if (unlikely(Rc(ctx->opcode) != 0)) { 1559 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1560 } 1561 } 1562 1563 /* eqv & eqv. */ 1564 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1565 /* extsb & extsb. */ 1566 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1567 /* extsh & extsh. */ 1568 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1569 /* nand & nand. */ 1570 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1571 /* nor & nor. */ 1572 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1573 1574 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1575 static void gen_pause(DisasContext *ctx) 1576 { 1577 TCGv_i32 t0 = tcg_const_i32(0); 1578 tcg_gen_st_i32(t0, cpu_env, 1579 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1580 tcg_temp_free_i32(t0); 1581 1582 /* Stop translation, this gives other CPUs a chance to run */ 1583 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 1584 } 1585 #endif /* defined(TARGET_PPC64) */ 1586 1587 /* or & or. */ 1588 static void gen_or(DisasContext *ctx) 1589 { 1590 int rs, ra, rb; 1591 1592 rs = rS(ctx->opcode); 1593 ra = rA(ctx->opcode); 1594 rb = rB(ctx->opcode); 1595 /* Optimisation for mr. ri case */ 1596 if (rs != ra || rs != rb) { 1597 if (rs != rb) 1598 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1599 else 1600 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1601 if (unlikely(Rc(ctx->opcode) != 0)) 1602 gen_set_Rc0(ctx, cpu_gpr[ra]); 1603 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1604 gen_set_Rc0(ctx, cpu_gpr[rs]); 1605 #if defined(TARGET_PPC64) 1606 } else if (rs != 0) { /* 0 is nop */ 1607 int prio = 0; 1608 1609 switch (rs) { 1610 case 1: 1611 /* Set process priority to low */ 1612 prio = 2; 1613 break; 1614 case 6: 1615 /* Set process priority to medium-low */ 1616 prio = 3; 1617 break; 1618 case 2: 1619 /* Set process priority to normal */ 1620 prio = 4; 1621 break; 1622 #if !defined(CONFIG_USER_ONLY) 1623 case 31: 1624 if (!ctx->pr) { 1625 /* Set process priority to very low */ 1626 prio = 1; 1627 } 1628 break; 1629 case 5: 1630 if (!ctx->pr) { 1631 /* Set process priority to medium-hight */ 1632 prio = 5; 1633 } 1634 break; 1635 case 3: 1636 if (!ctx->pr) { 1637 /* Set process priority to high */ 1638 prio = 6; 1639 } 1640 break; 1641 case 7: 1642 if (ctx->hv && !ctx->pr) { 1643 /* Set process priority to very high */ 1644 prio = 7; 1645 } 1646 break; 1647 #endif 1648 default: 1649 break; 1650 } 1651 if (prio) { 1652 TCGv t0 = tcg_temp_new(); 1653 gen_load_spr(t0, SPR_PPR); 1654 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1655 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1656 gen_store_spr(SPR_PPR, t0); 1657 tcg_temp_free(t0); 1658 } 1659 #if !defined(CONFIG_USER_ONLY) 1660 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1661 * CPU and the kernel hangs. This applies to all encodings other 1662 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1663 * and all currently undefined. 1664 */ 1665 gen_pause(ctx); 1666 #endif 1667 #endif 1668 } 1669 } 1670 /* orc & orc. */ 1671 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1672 1673 /* xor & xor. */ 1674 static void gen_xor(DisasContext *ctx) 1675 { 1676 /* Optimisation for "set to zero" case */ 1677 if (rS(ctx->opcode) != rB(ctx->opcode)) 1678 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1679 else 1680 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1681 if (unlikely(Rc(ctx->opcode) != 0)) 1682 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1683 } 1684 1685 /* ori */ 1686 static void gen_ori(DisasContext *ctx) 1687 { 1688 target_ulong uimm = UIMM(ctx->opcode); 1689 1690 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1691 return; 1692 } 1693 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1694 } 1695 1696 /* oris */ 1697 static void gen_oris(DisasContext *ctx) 1698 { 1699 target_ulong uimm = UIMM(ctx->opcode); 1700 1701 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1702 /* NOP */ 1703 return; 1704 } 1705 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1706 } 1707 1708 /* xori */ 1709 static void gen_xori(DisasContext *ctx) 1710 { 1711 target_ulong uimm = UIMM(ctx->opcode); 1712 1713 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1714 /* NOP */ 1715 return; 1716 } 1717 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1718 } 1719 1720 /* xoris */ 1721 static void gen_xoris(DisasContext *ctx) 1722 { 1723 target_ulong uimm = UIMM(ctx->opcode); 1724 1725 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1726 /* NOP */ 1727 return; 1728 } 1729 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1730 } 1731 1732 /* popcntb : PowerPC 2.03 specification */ 1733 static void gen_popcntb(DisasContext *ctx) 1734 { 1735 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1736 } 1737 1738 static void gen_popcntw(DisasContext *ctx) 1739 { 1740 #if defined(TARGET_PPC64) 1741 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1742 #else 1743 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1744 #endif 1745 } 1746 1747 #if defined(TARGET_PPC64) 1748 /* popcntd: PowerPC 2.06 specification */ 1749 static void gen_popcntd(DisasContext *ctx) 1750 { 1751 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1752 } 1753 #endif 1754 1755 /* prtyw: PowerPC 2.05 specification */ 1756 static void gen_prtyw(DisasContext *ctx) 1757 { 1758 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1759 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1760 TCGv t0 = tcg_temp_new(); 1761 tcg_gen_shri_tl(t0, rs, 16); 1762 tcg_gen_xor_tl(ra, rs, t0); 1763 tcg_gen_shri_tl(t0, ra, 8); 1764 tcg_gen_xor_tl(ra, ra, t0); 1765 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1766 tcg_temp_free(t0); 1767 } 1768 1769 #if defined(TARGET_PPC64) 1770 /* prtyd: PowerPC 2.05 specification */ 1771 static void gen_prtyd(DisasContext *ctx) 1772 { 1773 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1774 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1775 TCGv t0 = tcg_temp_new(); 1776 tcg_gen_shri_tl(t0, rs, 32); 1777 tcg_gen_xor_tl(ra, rs, t0); 1778 tcg_gen_shri_tl(t0, ra, 16); 1779 tcg_gen_xor_tl(ra, ra, t0); 1780 tcg_gen_shri_tl(t0, ra, 8); 1781 tcg_gen_xor_tl(ra, ra, t0); 1782 tcg_gen_andi_tl(ra, ra, 1); 1783 tcg_temp_free(t0); 1784 } 1785 #endif 1786 1787 #if defined(TARGET_PPC64) 1788 /* bpermd */ 1789 static void gen_bpermd(DisasContext *ctx) 1790 { 1791 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1792 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1793 } 1794 #endif 1795 1796 #if defined(TARGET_PPC64) 1797 /* extsw & extsw. */ 1798 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1799 1800 /* cntlzd */ 1801 static void gen_cntlzd(DisasContext *ctx) 1802 { 1803 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1804 if (unlikely(Rc(ctx->opcode) != 0)) 1805 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1806 } 1807 1808 /* cnttzd */ 1809 static void gen_cnttzd(DisasContext *ctx) 1810 { 1811 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1812 if (unlikely(Rc(ctx->opcode) != 0)) { 1813 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1814 } 1815 } 1816 1817 /* darn */ 1818 static void gen_darn(DisasContext *ctx) 1819 { 1820 int l = L(ctx->opcode); 1821 1822 if (l == 0) { 1823 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1824 } else if (l <= 2) { 1825 /* Return 64-bit random for both CRN and RRN */ 1826 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1827 } else { 1828 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1829 } 1830 } 1831 #endif 1832 1833 /*** Integer rotate ***/ 1834 1835 /* rlwimi & rlwimi. */ 1836 static void gen_rlwimi(DisasContext *ctx) 1837 { 1838 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1839 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1840 uint32_t sh = SH(ctx->opcode); 1841 uint32_t mb = MB(ctx->opcode); 1842 uint32_t me = ME(ctx->opcode); 1843 1844 if (sh == (31-me) && mb <= me) { 1845 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1846 } else { 1847 target_ulong mask; 1848 TCGv t1; 1849 1850 #if defined(TARGET_PPC64) 1851 mb += 32; 1852 me += 32; 1853 #endif 1854 mask = MASK(mb, me); 1855 1856 t1 = tcg_temp_new(); 1857 if (mask <= 0xffffffffu) { 1858 TCGv_i32 t0 = tcg_temp_new_i32(); 1859 tcg_gen_trunc_tl_i32(t0, t_rs); 1860 tcg_gen_rotli_i32(t0, t0, sh); 1861 tcg_gen_extu_i32_tl(t1, t0); 1862 tcg_temp_free_i32(t0); 1863 } else { 1864 #if defined(TARGET_PPC64) 1865 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1866 tcg_gen_rotli_i64(t1, t1, sh); 1867 #else 1868 g_assert_not_reached(); 1869 #endif 1870 } 1871 1872 tcg_gen_andi_tl(t1, t1, mask); 1873 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1874 tcg_gen_or_tl(t_ra, t_ra, t1); 1875 tcg_temp_free(t1); 1876 } 1877 if (unlikely(Rc(ctx->opcode) != 0)) { 1878 gen_set_Rc0(ctx, t_ra); 1879 } 1880 } 1881 1882 /* rlwinm & rlwinm. */ 1883 static void gen_rlwinm(DisasContext *ctx) 1884 { 1885 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1886 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1887 int sh = SH(ctx->opcode); 1888 int mb = MB(ctx->opcode); 1889 int me = ME(ctx->opcode); 1890 int len = me - mb + 1; 1891 int rsh = (32 - sh) & 31; 1892 1893 if (sh != 0 && len > 0 && me == (31 - sh)) { 1894 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 1895 } else if (me == 31 && rsh + len <= 32) { 1896 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 1897 } else { 1898 target_ulong mask; 1899 #if defined(TARGET_PPC64) 1900 mb += 32; 1901 me += 32; 1902 #endif 1903 mask = MASK(mb, me); 1904 if (sh == 0) { 1905 tcg_gen_andi_tl(t_ra, t_rs, mask); 1906 } else if (mask <= 0xffffffffu) { 1907 TCGv_i32 t0 = tcg_temp_new_i32(); 1908 tcg_gen_trunc_tl_i32(t0, t_rs); 1909 tcg_gen_rotli_i32(t0, t0, sh); 1910 tcg_gen_andi_i32(t0, t0, mask); 1911 tcg_gen_extu_i32_tl(t_ra, t0); 1912 tcg_temp_free_i32(t0); 1913 } else { 1914 #if defined(TARGET_PPC64) 1915 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1916 tcg_gen_rotli_i64(t_ra, t_ra, sh); 1917 tcg_gen_andi_i64(t_ra, t_ra, mask); 1918 #else 1919 g_assert_not_reached(); 1920 #endif 1921 } 1922 } 1923 if (unlikely(Rc(ctx->opcode) != 0)) { 1924 gen_set_Rc0(ctx, t_ra); 1925 } 1926 } 1927 1928 /* rlwnm & rlwnm. */ 1929 static void gen_rlwnm(DisasContext *ctx) 1930 { 1931 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1932 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1933 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 1934 uint32_t mb = MB(ctx->opcode); 1935 uint32_t me = ME(ctx->opcode); 1936 target_ulong mask; 1937 1938 #if defined(TARGET_PPC64) 1939 mb += 32; 1940 me += 32; 1941 #endif 1942 mask = MASK(mb, me); 1943 1944 if (mask <= 0xffffffffu) { 1945 TCGv_i32 t0 = tcg_temp_new_i32(); 1946 TCGv_i32 t1 = tcg_temp_new_i32(); 1947 tcg_gen_trunc_tl_i32(t0, t_rb); 1948 tcg_gen_trunc_tl_i32(t1, t_rs); 1949 tcg_gen_andi_i32(t0, t0, 0x1f); 1950 tcg_gen_rotl_i32(t1, t1, t0); 1951 tcg_gen_extu_i32_tl(t_ra, t1); 1952 tcg_temp_free_i32(t0); 1953 tcg_temp_free_i32(t1); 1954 } else { 1955 #if defined(TARGET_PPC64) 1956 TCGv_i64 t0 = tcg_temp_new_i64(); 1957 tcg_gen_andi_i64(t0, t_rb, 0x1f); 1958 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1959 tcg_gen_rotl_i64(t_ra, t_ra, t0); 1960 tcg_temp_free_i64(t0); 1961 #else 1962 g_assert_not_reached(); 1963 #endif 1964 } 1965 1966 tcg_gen_andi_tl(t_ra, t_ra, mask); 1967 1968 if (unlikely(Rc(ctx->opcode) != 0)) { 1969 gen_set_Rc0(ctx, t_ra); 1970 } 1971 } 1972 1973 #if defined(TARGET_PPC64) 1974 #define GEN_PPC64_R2(name, opc1, opc2) \ 1975 static void glue(gen_, name##0)(DisasContext *ctx) \ 1976 { \ 1977 gen_##name(ctx, 0); \ 1978 } \ 1979 \ 1980 static void glue(gen_, name##1)(DisasContext *ctx) \ 1981 { \ 1982 gen_##name(ctx, 1); \ 1983 } 1984 #define GEN_PPC64_R4(name, opc1, opc2) \ 1985 static void glue(gen_, name##0)(DisasContext *ctx) \ 1986 { \ 1987 gen_##name(ctx, 0, 0); \ 1988 } \ 1989 \ 1990 static void glue(gen_, name##1)(DisasContext *ctx) \ 1991 { \ 1992 gen_##name(ctx, 0, 1); \ 1993 } \ 1994 \ 1995 static void glue(gen_, name##2)(DisasContext *ctx) \ 1996 { \ 1997 gen_##name(ctx, 1, 0); \ 1998 } \ 1999 \ 2000 static void glue(gen_, name##3)(DisasContext *ctx) \ 2001 { \ 2002 gen_##name(ctx, 1, 1); \ 2003 } 2004 2005 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2006 { 2007 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2008 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2009 int len = me - mb + 1; 2010 int rsh = (64 - sh) & 63; 2011 2012 if (sh != 0 && len > 0 && me == (63 - sh)) { 2013 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2014 } else if (me == 63 && rsh + len <= 64) { 2015 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2016 } else { 2017 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2018 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2019 } 2020 if (unlikely(Rc(ctx->opcode) != 0)) { 2021 gen_set_Rc0(ctx, t_ra); 2022 } 2023 } 2024 2025 /* rldicl - rldicl. */ 2026 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2027 { 2028 uint32_t sh, mb; 2029 2030 sh = SH(ctx->opcode) | (shn << 5); 2031 mb = MB(ctx->opcode) | (mbn << 5); 2032 gen_rldinm(ctx, mb, 63, sh); 2033 } 2034 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2035 2036 /* rldicr - rldicr. */ 2037 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2038 { 2039 uint32_t sh, me; 2040 2041 sh = SH(ctx->opcode) | (shn << 5); 2042 me = MB(ctx->opcode) | (men << 5); 2043 gen_rldinm(ctx, 0, me, sh); 2044 } 2045 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2046 2047 /* rldic - rldic. */ 2048 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2049 { 2050 uint32_t sh, mb; 2051 2052 sh = SH(ctx->opcode) | (shn << 5); 2053 mb = MB(ctx->opcode) | (mbn << 5); 2054 gen_rldinm(ctx, mb, 63 - sh, sh); 2055 } 2056 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2057 2058 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2059 { 2060 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2061 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2062 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2063 TCGv t0; 2064 2065 t0 = tcg_temp_new(); 2066 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2067 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2068 tcg_temp_free(t0); 2069 2070 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2071 if (unlikely(Rc(ctx->opcode) != 0)) { 2072 gen_set_Rc0(ctx, t_ra); 2073 } 2074 } 2075 2076 /* rldcl - rldcl. */ 2077 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2078 { 2079 uint32_t mb; 2080 2081 mb = MB(ctx->opcode) | (mbn << 5); 2082 gen_rldnm(ctx, mb, 63); 2083 } 2084 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2085 2086 /* rldcr - rldcr. */ 2087 static inline void gen_rldcr(DisasContext *ctx, int men) 2088 { 2089 uint32_t me; 2090 2091 me = MB(ctx->opcode) | (men << 5); 2092 gen_rldnm(ctx, 0, me); 2093 } 2094 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2095 2096 /* rldimi - rldimi. */ 2097 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2098 { 2099 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2100 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2101 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2102 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2103 uint32_t me = 63 - sh; 2104 2105 if (mb <= me) { 2106 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2107 } else { 2108 target_ulong mask = MASK(mb, me); 2109 TCGv t1 = tcg_temp_new(); 2110 2111 tcg_gen_rotli_tl(t1, t_rs, sh); 2112 tcg_gen_andi_tl(t1, t1, mask); 2113 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2114 tcg_gen_or_tl(t_ra, t_ra, t1); 2115 tcg_temp_free(t1); 2116 } 2117 if (unlikely(Rc(ctx->opcode) != 0)) { 2118 gen_set_Rc0(ctx, t_ra); 2119 } 2120 } 2121 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2122 #endif 2123 2124 /*** Integer shift ***/ 2125 2126 /* slw & slw. */ 2127 static void gen_slw(DisasContext *ctx) 2128 { 2129 TCGv t0, t1; 2130 2131 t0 = tcg_temp_new(); 2132 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2133 #if defined(TARGET_PPC64) 2134 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2135 tcg_gen_sari_tl(t0, t0, 0x3f); 2136 #else 2137 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2138 tcg_gen_sari_tl(t0, t0, 0x1f); 2139 #endif 2140 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2141 t1 = tcg_temp_new(); 2142 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2143 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2144 tcg_temp_free(t1); 2145 tcg_temp_free(t0); 2146 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2147 if (unlikely(Rc(ctx->opcode) != 0)) 2148 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2149 } 2150 2151 /* sraw & sraw. */ 2152 static void gen_sraw(DisasContext *ctx) 2153 { 2154 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2155 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2156 if (unlikely(Rc(ctx->opcode) != 0)) 2157 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2158 } 2159 2160 /* srawi & srawi. */ 2161 static void gen_srawi(DisasContext *ctx) 2162 { 2163 int sh = SH(ctx->opcode); 2164 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2165 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2166 if (sh == 0) { 2167 tcg_gen_ext32s_tl(dst, src); 2168 tcg_gen_movi_tl(cpu_ca, 0); 2169 if (is_isa300(ctx)) { 2170 tcg_gen_movi_tl(cpu_ca32, 0); 2171 } 2172 } else { 2173 TCGv t0; 2174 tcg_gen_ext32s_tl(dst, src); 2175 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2176 t0 = tcg_temp_new(); 2177 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2178 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2179 tcg_temp_free(t0); 2180 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2181 if (is_isa300(ctx)) { 2182 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2183 } 2184 tcg_gen_sari_tl(dst, dst, sh); 2185 } 2186 if (unlikely(Rc(ctx->opcode) != 0)) { 2187 gen_set_Rc0(ctx, dst); 2188 } 2189 } 2190 2191 /* srw & srw. */ 2192 static void gen_srw(DisasContext *ctx) 2193 { 2194 TCGv t0, t1; 2195 2196 t0 = tcg_temp_new(); 2197 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2198 #if defined(TARGET_PPC64) 2199 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2200 tcg_gen_sari_tl(t0, t0, 0x3f); 2201 #else 2202 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2203 tcg_gen_sari_tl(t0, t0, 0x1f); 2204 #endif 2205 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2206 tcg_gen_ext32u_tl(t0, t0); 2207 t1 = tcg_temp_new(); 2208 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2209 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2210 tcg_temp_free(t1); 2211 tcg_temp_free(t0); 2212 if (unlikely(Rc(ctx->opcode) != 0)) 2213 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2214 } 2215 2216 #if defined(TARGET_PPC64) 2217 /* sld & sld. */ 2218 static void gen_sld(DisasContext *ctx) 2219 { 2220 TCGv t0, t1; 2221 2222 t0 = tcg_temp_new(); 2223 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2224 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2225 tcg_gen_sari_tl(t0, t0, 0x3f); 2226 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2227 t1 = tcg_temp_new(); 2228 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2229 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2230 tcg_temp_free(t1); 2231 tcg_temp_free(t0); 2232 if (unlikely(Rc(ctx->opcode) != 0)) 2233 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2234 } 2235 2236 /* srad & srad. */ 2237 static void gen_srad(DisasContext *ctx) 2238 { 2239 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2240 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2241 if (unlikely(Rc(ctx->opcode) != 0)) 2242 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2243 } 2244 /* sradi & sradi. */ 2245 static inline void gen_sradi(DisasContext *ctx, int n) 2246 { 2247 int sh = SH(ctx->opcode) + (n << 5); 2248 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2249 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2250 if (sh == 0) { 2251 tcg_gen_mov_tl(dst, src); 2252 tcg_gen_movi_tl(cpu_ca, 0); 2253 if (is_isa300(ctx)) { 2254 tcg_gen_movi_tl(cpu_ca32, 0); 2255 } 2256 } else { 2257 TCGv t0; 2258 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2259 t0 = tcg_temp_new(); 2260 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2261 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2262 tcg_temp_free(t0); 2263 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2264 if (is_isa300(ctx)) { 2265 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2266 } 2267 tcg_gen_sari_tl(dst, src, sh); 2268 } 2269 if (unlikely(Rc(ctx->opcode) != 0)) { 2270 gen_set_Rc0(ctx, dst); 2271 } 2272 } 2273 2274 static void gen_sradi0(DisasContext *ctx) 2275 { 2276 gen_sradi(ctx, 0); 2277 } 2278 2279 static void gen_sradi1(DisasContext *ctx) 2280 { 2281 gen_sradi(ctx, 1); 2282 } 2283 2284 /* extswsli & extswsli. */ 2285 static inline void gen_extswsli(DisasContext *ctx, int n) 2286 { 2287 int sh = SH(ctx->opcode) + (n << 5); 2288 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2289 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2290 2291 tcg_gen_ext32s_tl(dst, src); 2292 tcg_gen_shli_tl(dst, dst, sh); 2293 if (unlikely(Rc(ctx->opcode) != 0)) { 2294 gen_set_Rc0(ctx, dst); 2295 } 2296 } 2297 2298 static void gen_extswsli0(DisasContext *ctx) 2299 { 2300 gen_extswsli(ctx, 0); 2301 } 2302 2303 static void gen_extswsli1(DisasContext *ctx) 2304 { 2305 gen_extswsli(ctx, 1); 2306 } 2307 2308 /* srd & srd. */ 2309 static void gen_srd(DisasContext *ctx) 2310 { 2311 TCGv t0, t1; 2312 2313 t0 = tcg_temp_new(); 2314 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2315 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2316 tcg_gen_sari_tl(t0, t0, 0x3f); 2317 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2318 t1 = tcg_temp_new(); 2319 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2320 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2321 tcg_temp_free(t1); 2322 tcg_temp_free(t0); 2323 if (unlikely(Rc(ctx->opcode) != 0)) 2324 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2325 } 2326 #endif 2327 2328 /*** Addressing modes ***/ 2329 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2330 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2331 target_long maskl) 2332 { 2333 target_long simm = SIMM(ctx->opcode); 2334 2335 simm &= ~maskl; 2336 if (rA(ctx->opcode) == 0) { 2337 if (NARROW_MODE(ctx)) { 2338 simm = (uint32_t)simm; 2339 } 2340 tcg_gen_movi_tl(EA, simm); 2341 } else if (likely(simm != 0)) { 2342 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2343 if (NARROW_MODE(ctx)) { 2344 tcg_gen_ext32u_tl(EA, EA); 2345 } 2346 } else { 2347 if (NARROW_MODE(ctx)) { 2348 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2349 } else { 2350 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2351 } 2352 } 2353 } 2354 2355 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2356 { 2357 if (rA(ctx->opcode) == 0) { 2358 if (NARROW_MODE(ctx)) { 2359 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2360 } else { 2361 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2362 } 2363 } else { 2364 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2365 if (NARROW_MODE(ctx)) { 2366 tcg_gen_ext32u_tl(EA, EA); 2367 } 2368 } 2369 } 2370 2371 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2372 { 2373 if (rA(ctx->opcode) == 0) { 2374 tcg_gen_movi_tl(EA, 0); 2375 } else if (NARROW_MODE(ctx)) { 2376 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2377 } else { 2378 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2379 } 2380 } 2381 2382 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2383 target_long val) 2384 { 2385 tcg_gen_addi_tl(ret, arg1, val); 2386 if (NARROW_MODE(ctx)) { 2387 tcg_gen_ext32u_tl(ret, ret); 2388 } 2389 } 2390 2391 static inline void gen_align_no_le(DisasContext *ctx) 2392 { 2393 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2394 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2395 } 2396 2397 /*** Integer load ***/ 2398 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2399 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2400 2401 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2402 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2403 TCGv val, \ 2404 TCGv addr) \ 2405 { \ 2406 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2407 } 2408 2409 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2410 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2411 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2412 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2413 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2414 2415 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2416 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2417 2418 #define GEN_QEMU_LOAD_64(ldop, op) \ 2419 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2420 TCGv_i64 val, \ 2421 TCGv addr) \ 2422 { \ 2423 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2424 } 2425 2426 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2427 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2428 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2429 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2430 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2431 2432 #if defined(TARGET_PPC64) 2433 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2434 #endif 2435 2436 #define GEN_QEMU_STORE_TL(stop, op) \ 2437 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2438 TCGv val, \ 2439 TCGv addr) \ 2440 { \ 2441 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2442 } 2443 2444 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2445 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2446 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2447 2448 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2449 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2450 2451 #define GEN_QEMU_STORE_64(stop, op) \ 2452 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2453 TCGv_i64 val, \ 2454 TCGv addr) \ 2455 { \ 2456 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2457 } 2458 2459 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2460 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2461 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2462 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2463 2464 #if defined(TARGET_PPC64) 2465 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2466 #endif 2467 2468 #define GEN_LD(name, ldop, opc, type) \ 2469 static void glue(gen_, name)(DisasContext *ctx) \ 2470 { \ 2471 TCGv EA; \ 2472 gen_set_access_type(ctx, ACCESS_INT); \ 2473 EA = tcg_temp_new(); \ 2474 gen_addr_imm_index(ctx, EA, 0); \ 2475 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2476 tcg_temp_free(EA); \ 2477 } 2478 2479 #define GEN_LDU(name, ldop, opc, type) \ 2480 static void glue(gen_, name##u)(DisasContext *ctx) \ 2481 { \ 2482 TCGv EA; \ 2483 if (unlikely(rA(ctx->opcode) == 0 || \ 2484 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2485 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2486 return; \ 2487 } \ 2488 gen_set_access_type(ctx, ACCESS_INT); \ 2489 EA = tcg_temp_new(); \ 2490 if (type == PPC_64B) \ 2491 gen_addr_imm_index(ctx, EA, 0x03); \ 2492 else \ 2493 gen_addr_imm_index(ctx, EA, 0); \ 2494 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2495 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2496 tcg_temp_free(EA); \ 2497 } 2498 2499 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2500 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2501 { \ 2502 TCGv EA; \ 2503 if (unlikely(rA(ctx->opcode) == 0 || \ 2504 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2505 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2506 return; \ 2507 } \ 2508 gen_set_access_type(ctx, ACCESS_INT); \ 2509 EA = tcg_temp_new(); \ 2510 gen_addr_reg_index(ctx, EA); \ 2511 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2512 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2513 tcg_temp_free(EA); \ 2514 } 2515 2516 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2517 static void glue(gen_, name##x)(DisasContext *ctx) \ 2518 { \ 2519 TCGv EA; \ 2520 chk; \ 2521 gen_set_access_type(ctx, ACCESS_INT); \ 2522 EA = tcg_temp_new(); \ 2523 gen_addr_reg_index(ctx, EA); \ 2524 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2525 tcg_temp_free(EA); \ 2526 } 2527 2528 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2529 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2530 2531 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2532 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2533 2534 #define GEN_LDS(name, ldop, op, type) \ 2535 GEN_LD(name, ldop, op | 0x20, type); \ 2536 GEN_LDU(name, ldop, op | 0x21, type); \ 2537 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2538 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2539 2540 /* lbz lbzu lbzux lbzx */ 2541 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2542 /* lha lhau lhaux lhax */ 2543 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2544 /* lhz lhzu lhzux lhzx */ 2545 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2546 /* lwz lwzu lwzux lwzx */ 2547 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2548 2549 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 2550 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2551 { \ 2552 TCGv EA; \ 2553 CHK_SV; \ 2554 gen_set_access_type(ctx, ACCESS_INT); \ 2555 EA = tcg_temp_new(); \ 2556 gen_addr_reg_index(ctx, EA); \ 2557 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 2558 tcg_temp_free(EA); \ 2559 } 2560 2561 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 2562 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 2563 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 2564 #if defined(TARGET_PPC64) 2565 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 2566 #endif 2567 2568 #if defined(TARGET_PPC64) 2569 /* lwaux */ 2570 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2571 /* lwax */ 2572 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2573 /* ldux */ 2574 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2575 /* ldx */ 2576 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2577 2578 /* CI load/store variants */ 2579 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2580 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2581 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2582 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2583 2584 static void gen_ld(DisasContext *ctx) 2585 { 2586 TCGv EA; 2587 if (Rc(ctx->opcode)) { 2588 if (unlikely(rA(ctx->opcode) == 0 || 2589 rA(ctx->opcode) == rD(ctx->opcode))) { 2590 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2591 return; 2592 } 2593 } 2594 gen_set_access_type(ctx, ACCESS_INT); 2595 EA = tcg_temp_new(); 2596 gen_addr_imm_index(ctx, EA, 0x03); 2597 if (ctx->opcode & 0x02) { 2598 /* lwa (lwau is undefined) */ 2599 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2600 } else { 2601 /* ld - ldu */ 2602 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2603 } 2604 if (Rc(ctx->opcode)) 2605 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2606 tcg_temp_free(EA); 2607 } 2608 2609 /* lq */ 2610 static void gen_lq(DisasContext *ctx) 2611 { 2612 int ra, rd; 2613 TCGv EA, hi, lo; 2614 2615 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2616 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2617 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2618 2619 if (!legal_in_user_mode && ctx->pr) { 2620 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2621 return; 2622 } 2623 2624 if (!le_is_supported && ctx->le_mode) { 2625 gen_align_no_le(ctx); 2626 return; 2627 } 2628 ra = rA(ctx->opcode); 2629 rd = rD(ctx->opcode); 2630 if (unlikely((rd & 1) || rd == ra)) { 2631 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2632 return; 2633 } 2634 2635 gen_set_access_type(ctx, ACCESS_INT); 2636 EA = tcg_temp_new(); 2637 gen_addr_imm_index(ctx, EA, 0x0F); 2638 2639 /* Note that the low part is always in RD+1, even in LE mode. */ 2640 lo = cpu_gpr[rd + 1]; 2641 hi = cpu_gpr[rd]; 2642 2643 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2644 if (HAVE_ATOMIC128) { 2645 TCGv_i32 oi = tcg_temp_new_i32(); 2646 if (ctx->le_mode) { 2647 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2648 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 2649 } else { 2650 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2651 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 2652 } 2653 tcg_temp_free_i32(oi); 2654 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 2655 } else { 2656 /* Restart with exclusive lock. */ 2657 gen_helper_exit_atomic(cpu_env); 2658 ctx->base.is_jmp = DISAS_NORETURN; 2659 } 2660 } else if (ctx->le_mode) { 2661 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2662 gen_addr_add(ctx, EA, EA, 8); 2663 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2664 } else { 2665 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2666 gen_addr_add(ctx, EA, EA, 8); 2667 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2668 } 2669 tcg_temp_free(EA); 2670 } 2671 #endif 2672 2673 /*** Integer store ***/ 2674 #define GEN_ST(name, stop, opc, type) \ 2675 static void glue(gen_, name)(DisasContext *ctx) \ 2676 { \ 2677 TCGv EA; \ 2678 gen_set_access_type(ctx, ACCESS_INT); \ 2679 EA = tcg_temp_new(); \ 2680 gen_addr_imm_index(ctx, EA, 0); \ 2681 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2682 tcg_temp_free(EA); \ 2683 } 2684 2685 #define GEN_STU(name, stop, opc, type) \ 2686 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2687 { \ 2688 TCGv EA; \ 2689 if (unlikely(rA(ctx->opcode) == 0)) { \ 2690 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2691 return; \ 2692 } \ 2693 gen_set_access_type(ctx, ACCESS_INT); \ 2694 EA = tcg_temp_new(); \ 2695 if (type == PPC_64B) \ 2696 gen_addr_imm_index(ctx, EA, 0x03); \ 2697 else \ 2698 gen_addr_imm_index(ctx, EA, 0); \ 2699 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2700 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2701 tcg_temp_free(EA); \ 2702 } 2703 2704 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2705 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2706 { \ 2707 TCGv EA; \ 2708 if (unlikely(rA(ctx->opcode) == 0)) { \ 2709 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2710 return; \ 2711 } \ 2712 gen_set_access_type(ctx, ACCESS_INT); \ 2713 EA = tcg_temp_new(); \ 2714 gen_addr_reg_index(ctx, EA); \ 2715 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2716 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2717 tcg_temp_free(EA); \ 2718 } 2719 2720 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2721 static void glue(gen_, name##x)(DisasContext *ctx) \ 2722 { \ 2723 TCGv EA; \ 2724 chk; \ 2725 gen_set_access_type(ctx, ACCESS_INT); \ 2726 EA = tcg_temp_new(); \ 2727 gen_addr_reg_index(ctx, EA); \ 2728 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2729 tcg_temp_free(EA); \ 2730 } 2731 #define GEN_STX(name, stop, opc2, opc3, type) \ 2732 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2733 2734 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2735 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2736 2737 #define GEN_STS(name, stop, op, type) \ 2738 GEN_ST(name, stop, op | 0x20, type); \ 2739 GEN_STU(name, stop, op | 0x21, type); \ 2740 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2741 GEN_STX(name, stop, 0x17, op | 0x00, type) 2742 2743 /* stb stbu stbux stbx */ 2744 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2745 /* sth sthu sthux sthx */ 2746 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2747 /* stw stwu stwux stwx */ 2748 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2749 2750 #define GEN_STEPX(name, stop, opc2, opc3) \ 2751 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2752 { \ 2753 TCGv EA; \ 2754 CHK_SV; \ 2755 gen_set_access_type(ctx, ACCESS_INT); \ 2756 EA = tcg_temp_new(); \ 2757 gen_addr_reg_index(ctx, EA); \ 2758 tcg_gen_qemu_st_tl( \ 2759 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 2760 tcg_temp_free(EA); \ 2761 } 2762 2763 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 2764 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 2765 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 2766 #if defined(TARGET_PPC64) 2767 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04) 2768 #endif 2769 2770 #if defined(TARGET_PPC64) 2771 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2772 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2773 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2774 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2775 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2776 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2777 2778 static void gen_std(DisasContext *ctx) 2779 { 2780 int rs; 2781 TCGv EA; 2782 2783 rs = rS(ctx->opcode); 2784 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2785 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2786 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2787 TCGv hi, lo; 2788 2789 if (!(ctx->insns_flags & PPC_64BX)) { 2790 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2791 } 2792 2793 if (!legal_in_user_mode && ctx->pr) { 2794 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2795 return; 2796 } 2797 2798 if (!le_is_supported && ctx->le_mode) { 2799 gen_align_no_le(ctx); 2800 return; 2801 } 2802 2803 if (unlikely(rs & 1)) { 2804 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2805 return; 2806 } 2807 gen_set_access_type(ctx, ACCESS_INT); 2808 EA = tcg_temp_new(); 2809 gen_addr_imm_index(ctx, EA, 0x03); 2810 2811 /* Note that the low part is always in RS+1, even in LE mode. */ 2812 lo = cpu_gpr[rs + 1]; 2813 hi = cpu_gpr[rs]; 2814 2815 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2816 if (HAVE_ATOMIC128) { 2817 TCGv_i32 oi = tcg_temp_new_i32(); 2818 if (ctx->le_mode) { 2819 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2820 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 2821 } else { 2822 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2823 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 2824 } 2825 tcg_temp_free_i32(oi); 2826 } else { 2827 /* Restart with exclusive lock. */ 2828 gen_helper_exit_atomic(cpu_env); 2829 ctx->base.is_jmp = DISAS_NORETURN; 2830 } 2831 } else if (ctx->le_mode) { 2832 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2833 gen_addr_add(ctx, EA, EA, 8); 2834 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2835 } else { 2836 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2837 gen_addr_add(ctx, EA, EA, 8); 2838 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2839 } 2840 tcg_temp_free(EA); 2841 } else { 2842 /* std / stdu */ 2843 if (Rc(ctx->opcode)) { 2844 if (unlikely(rA(ctx->opcode) == 0)) { 2845 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2846 return; 2847 } 2848 } 2849 gen_set_access_type(ctx, ACCESS_INT); 2850 EA = tcg_temp_new(); 2851 gen_addr_imm_index(ctx, EA, 0x03); 2852 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2853 if (Rc(ctx->opcode)) 2854 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2855 tcg_temp_free(EA); 2856 } 2857 } 2858 #endif 2859 /*** Integer load and store with byte reverse ***/ 2860 2861 /* lhbrx */ 2862 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2863 2864 /* lwbrx */ 2865 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2866 2867 #if defined(TARGET_PPC64) 2868 /* ldbrx */ 2869 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2870 /* stdbrx */ 2871 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2872 #endif /* TARGET_PPC64 */ 2873 2874 /* sthbrx */ 2875 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2876 /* stwbrx */ 2877 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2878 2879 /*** Integer load and store multiple ***/ 2880 2881 /* lmw */ 2882 static void gen_lmw(DisasContext *ctx) 2883 { 2884 TCGv t0; 2885 TCGv_i32 t1; 2886 2887 if (ctx->le_mode) { 2888 gen_align_no_le(ctx); 2889 return; 2890 } 2891 gen_set_access_type(ctx, ACCESS_INT); 2892 t0 = tcg_temp_new(); 2893 t1 = tcg_const_i32(rD(ctx->opcode)); 2894 gen_addr_imm_index(ctx, t0, 0); 2895 gen_helper_lmw(cpu_env, t0, t1); 2896 tcg_temp_free(t0); 2897 tcg_temp_free_i32(t1); 2898 } 2899 2900 /* stmw */ 2901 static void gen_stmw(DisasContext *ctx) 2902 { 2903 TCGv t0; 2904 TCGv_i32 t1; 2905 2906 if (ctx->le_mode) { 2907 gen_align_no_le(ctx); 2908 return; 2909 } 2910 gen_set_access_type(ctx, ACCESS_INT); 2911 t0 = tcg_temp_new(); 2912 t1 = tcg_const_i32(rS(ctx->opcode)); 2913 gen_addr_imm_index(ctx, t0, 0); 2914 gen_helper_stmw(cpu_env, t0, t1); 2915 tcg_temp_free(t0); 2916 tcg_temp_free_i32(t1); 2917 } 2918 2919 /*** Integer load and store strings ***/ 2920 2921 /* lswi */ 2922 /* PowerPC32 specification says we must generate an exception if 2923 * rA is in the range of registers to be loaded. 2924 * In an other hand, IBM says this is valid, but rA won't be loaded. 2925 * For now, I'll follow the spec... 2926 */ 2927 static void gen_lswi(DisasContext *ctx) 2928 { 2929 TCGv t0; 2930 TCGv_i32 t1, t2; 2931 int nb = NB(ctx->opcode); 2932 int start = rD(ctx->opcode); 2933 int ra = rA(ctx->opcode); 2934 int nr; 2935 2936 if (ctx->le_mode) { 2937 gen_align_no_le(ctx); 2938 return; 2939 } 2940 if (nb == 0) 2941 nb = 32; 2942 nr = DIV_ROUND_UP(nb, 4); 2943 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2944 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2945 return; 2946 } 2947 gen_set_access_type(ctx, ACCESS_INT); 2948 t0 = tcg_temp_new(); 2949 gen_addr_register(ctx, t0); 2950 t1 = tcg_const_i32(nb); 2951 t2 = tcg_const_i32(start); 2952 gen_helper_lsw(cpu_env, t0, t1, t2); 2953 tcg_temp_free(t0); 2954 tcg_temp_free_i32(t1); 2955 tcg_temp_free_i32(t2); 2956 } 2957 2958 /* lswx */ 2959 static void gen_lswx(DisasContext *ctx) 2960 { 2961 TCGv t0; 2962 TCGv_i32 t1, t2, t3; 2963 2964 if (ctx->le_mode) { 2965 gen_align_no_le(ctx); 2966 return; 2967 } 2968 gen_set_access_type(ctx, ACCESS_INT); 2969 t0 = tcg_temp_new(); 2970 gen_addr_reg_index(ctx, t0); 2971 t1 = tcg_const_i32(rD(ctx->opcode)); 2972 t2 = tcg_const_i32(rA(ctx->opcode)); 2973 t3 = tcg_const_i32(rB(ctx->opcode)); 2974 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 2975 tcg_temp_free(t0); 2976 tcg_temp_free_i32(t1); 2977 tcg_temp_free_i32(t2); 2978 tcg_temp_free_i32(t3); 2979 } 2980 2981 /* stswi */ 2982 static void gen_stswi(DisasContext *ctx) 2983 { 2984 TCGv t0; 2985 TCGv_i32 t1, t2; 2986 int nb = NB(ctx->opcode); 2987 2988 if (ctx->le_mode) { 2989 gen_align_no_le(ctx); 2990 return; 2991 } 2992 gen_set_access_type(ctx, ACCESS_INT); 2993 t0 = tcg_temp_new(); 2994 gen_addr_register(ctx, t0); 2995 if (nb == 0) 2996 nb = 32; 2997 t1 = tcg_const_i32(nb); 2998 t2 = tcg_const_i32(rS(ctx->opcode)); 2999 gen_helper_stsw(cpu_env, t0, t1, t2); 3000 tcg_temp_free(t0); 3001 tcg_temp_free_i32(t1); 3002 tcg_temp_free_i32(t2); 3003 } 3004 3005 /* stswx */ 3006 static void gen_stswx(DisasContext *ctx) 3007 { 3008 TCGv t0; 3009 TCGv_i32 t1, t2; 3010 3011 if (ctx->le_mode) { 3012 gen_align_no_le(ctx); 3013 return; 3014 } 3015 gen_set_access_type(ctx, ACCESS_INT); 3016 t0 = tcg_temp_new(); 3017 gen_addr_reg_index(ctx, t0); 3018 t1 = tcg_temp_new_i32(); 3019 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3020 tcg_gen_andi_i32(t1, t1, 0x7F); 3021 t2 = tcg_const_i32(rS(ctx->opcode)); 3022 gen_helper_stsw(cpu_env, t0, t1, t2); 3023 tcg_temp_free(t0); 3024 tcg_temp_free_i32(t1); 3025 tcg_temp_free_i32(t2); 3026 } 3027 3028 /*** Memory synchronisation ***/ 3029 /* eieio */ 3030 static void gen_eieio(DisasContext *ctx) 3031 { 3032 TCGBar bar = TCG_MO_LD_ST; 3033 3034 /* 3035 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3036 * tell the CPU it is a store-forwarding barrier. 3037 */ 3038 if (ctx->opcode & 0x2000000) { 3039 /* 3040 * ISA says that "Reserved fields in instructions are ignored 3041 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3042 * as this is not an instruction software should be using, 3043 * complain to the user. 3044 */ 3045 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3046 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3047 TARGET_FMT_lx "\n", ctx->base.pc_next - 4); 3048 } else { 3049 bar = TCG_MO_ST_LD; 3050 } 3051 } 3052 3053 tcg_gen_mb(bar | TCG_BAR_SC); 3054 } 3055 3056 #if !defined(CONFIG_USER_ONLY) 3057 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3058 { 3059 TCGv_i32 t; 3060 TCGLabel *l; 3061 3062 if (!ctx->lazy_tlb_flush) { 3063 return; 3064 } 3065 l = gen_new_label(); 3066 t = tcg_temp_new_i32(); 3067 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3068 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3069 if (global) { 3070 gen_helper_check_tlb_flush_global(cpu_env); 3071 } else { 3072 gen_helper_check_tlb_flush_local(cpu_env); 3073 } 3074 gen_set_label(l); 3075 tcg_temp_free_i32(t); 3076 } 3077 #else 3078 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3079 #endif 3080 3081 /* isync */ 3082 static void gen_isync(DisasContext *ctx) 3083 { 3084 /* 3085 * We need to check for a pending TLB flush. This can only happen in 3086 * kernel mode however so check MSR_PR 3087 */ 3088 if (!ctx->pr) { 3089 gen_check_tlb_flush(ctx, false); 3090 } 3091 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3092 gen_stop_exception(ctx); 3093 } 3094 3095 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3096 3097 static void gen_load_locked(DisasContext *ctx, TCGMemOp memop) 3098 { 3099 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3100 TCGv t0 = tcg_temp_new(); 3101 3102 gen_set_access_type(ctx, ACCESS_RES); 3103 gen_addr_reg_index(ctx, t0); 3104 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3105 tcg_gen_mov_tl(cpu_reserve, t0); 3106 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3107 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3108 tcg_temp_free(t0); 3109 } 3110 3111 #define LARX(name, memop) \ 3112 static void gen_##name(DisasContext *ctx) \ 3113 { \ 3114 gen_load_locked(ctx, memop); \ 3115 } 3116 3117 /* lwarx */ 3118 LARX(lbarx, DEF_MEMOP(MO_UB)) 3119 LARX(lharx, DEF_MEMOP(MO_UW)) 3120 LARX(lwarx, DEF_MEMOP(MO_UL)) 3121 3122 static void gen_fetch_inc_conditional(DisasContext *ctx, TCGMemOp memop, 3123 TCGv EA, TCGCond cond, int addend) 3124 { 3125 TCGv t = tcg_temp_new(); 3126 TCGv t2 = tcg_temp_new(); 3127 TCGv u = tcg_temp_new(); 3128 3129 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3130 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3131 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3132 tcg_gen_addi_tl(u, t, addend); 3133 3134 /* E.g. for fetch and increment bounded... */ 3135 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3136 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3137 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3138 3139 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3140 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3141 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3142 3143 tcg_temp_free(t); 3144 tcg_temp_free(t2); 3145 tcg_temp_free(u); 3146 } 3147 3148 static void gen_ld_atomic(DisasContext *ctx, TCGMemOp memop) 3149 { 3150 uint32_t gpr_FC = FC(ctx->opcode); 3151 TCGv EA = tcg_temp_new(); 3152 int rt = rD(ctx->opcode); 3153 bool need_serial; 3154 TCGv src, dst; 3155 3156 gen_addr_register(ctx, EA); 3157 dst = cpu_gpr[rt]; 3158 src = cpu_gpr[(rt + 1) & 31]; 3159 3160 need_serial = false; 3161 memop |= MO_ALIGN; 3162 switch (gpr_FC) { 3163 case 0: /* Fetch and add */ 3164 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3165 break; 3166 case 1: /* Fetch and xor */ 3167 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3168 break; 3169 case 2: /* Fetch and or */ 3170 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3171 break; 3172 case 3: /* Fetch and 'and' */ 3173 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3174 break; 3175 case 4: /* Fetch and max unsigned */ 3176 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3177 break; 3178 case 5: /* Fetch and max signed */ 3179 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3180 break; 3181 case 6: /* Fetch and min unsigned */ 3182 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3183 break; 3184 case 7: /* Fetch and min signed */ 3185 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3186 break; 3187 case 8: /* Swap */ 3188 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3189 break; 3190 3191 case 16: /* Compare and swap not equal */ 3192 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3193 need_serial = true; 3194 } else { 3195 TCGv t0 = tcg_temp_new(); 3196 TCGv t1 = tcg_temp_new(); 3197 3198 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3199 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3200 tcg_gen_mov_tl(t1, src); 3201 } else { 3202 tcg_gen_ext32u_tl(t1, src); 3203 } 3204 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3205 cpu_gpr[(rt + 2) & 31], t0); 3206 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3207 tcg_gen_mov_tl(dst, t0); 3208 3209 tcg_temp_free(t0); 3210 tcg_temp_free(t1); 3211 } 3212 break; 3213 3214 case 24: /* Fetch and increment bounded */ 3215 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3216 need_serial = true; 3217 } else { 3218 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3219 } 3220 break; 3221 case 25: /* Fetch and increment equal */ 3222 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3223 need_serial = true; 3224 } else { 3225 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3226 } 3227 break; 3228 case 28: /* Fetch and decrement bounded */ 3229 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3230 need_serial = true; 3231 } else { 3232 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3233 } 3234 break; 3235 3236 default: 3237 /* invoke data storage error handler */ 3238 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3239 } 3240 tcg_temp_free(EA); 3241 3242 if (need_serial) { 3243 /* Restart with exclusive lock. */ 3244 gen_helper_exit_atomic(cpu_env); 3245 ctx->base.is_jmp = DISAS_NORETURN; 3246 } 3247 } 3248 3249 static void gen_lwat(DisasContext *ctx) 3250 { 3251 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3252 } 3253 3254 #ifdef TARGET_PPC64 3255 static void gen_ldat(DisasContext *ctx) 3256 { 3257 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3258 } 3259 #endif 3260 3261 static void gen_st_atomic(DisasContext *ctx, TCGMemOp memop) 3262 { 3263 uint32_t gpr_FC = FC(ctx->opcode); 3264 TCGv EA = tcg_temp_new(); 3265 TCGv src, discard; 3266 3267 gen_addr_register(ctx, EA); 3268 src = cpu_gpr[rD(ctx->opcode)]; 3269 discard = tcg_temp_new(); 3270 3271 memop |= MO_ALIGN; 3272 switch (gpr_FC) { 3273 case 0: /* add and Store */ 3274 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3275 break; 3276 case 1: /* xor and Store */ 3277 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3278 break; 3279 case 2: /* Or and Store */ 3280 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3281 break; 3282 case 3: /* 'and' and Store */ 3283 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3284 break; 3285 case 4: /* Store max unsigned */ 3286 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3287 break; 3288 case 5: /* Store max signed */ 3289 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3290 break; 3291 case 6: /* Store min unsigned */ 3292 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3293 break; 3294 case 7: /* Store min signed */ 3295 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3296 break; 3297 case 24: /* Store twin */ 3298 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3299 /* Restart with exclusive lock. */ 3300 gen_helper_exit_atomic(cpu_env); 3301 ctx->base.is_jmp = DISAS_NORETURN; 3302 } else { 3303 TCGv t = tcg_temp_new(); 3304 TCGv t2 = tcg_temp_new(); 3305 TCGv s = tcg_temp_new(); 3306 TCGv s2 = tcg_temp_new(); 3307 TCGv ea_plus_s = tcg_temp_new(); 3308 3309 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3310 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3311 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3312 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3313 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3314 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3315 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3316 3317 tcg_temp_free(ea_plus_s); 3318 tcg_temp_free(s2); 3319 tcg_temp_free(s); 3320 tcg_temp_free(t2); 3321 tcg_temp_free(t); 3322 } 3323 break; 3324 default: 3325 /* invoke data storage error handler */ 3326 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3327 } 3328 tcg_temp_free(discard); 3329 tcg_temp_free(EA); 3330 } 3331 3332 static void gen_stwat(DisasContext *ctx) 3333 { 3334 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3335 } 3336 3337 #ifdef TARGET_PPC64 3338 static void gen_stdat(DisasContext *ctx) 3339 { 3340 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3341 } 3342 #endif 3343 3344 static void gen_conditional_store(DisasContext *ctx, TCGMemOp memop) 3345 { 3346 TCGLabel *l1 = gen_new_label(); 3347 TCGLabel *l2 = gen_new_label(); 3348 TCGv t0 = tcg_temp_new(); 3349 int reg = rS(ctx->opcode); 3350 3351 gen_set_access_type(ctx, ACCESS_RES); 3352 gen_addr_reg_index(ctx, t0); 3353 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 3354 tcg_temp_free(t0); 3355 3356 t0 = tcg_temp_new(); 3357 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3358 cpu_gpr[reg], ctx->mem_idx, 3359 DEF_MEMOP(memop) | MO_ALIGN); 3360 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3361 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3362 tcg_gen_or_tl(t0, t0, cpu_so); 3363 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3364 tcg_temp_free(t0); 3365 tcg_gen_br(l2); 3366 3367 gen_set_label(l1); 3368 3369 /* Address mismatch implies failure. But we still need to provide the 3370 memory barrier semantics of the instruction. */ 3371 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3372 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3373 3374 gen_set_label(l2); 3375 tcg_gen_movi_tl(cpu_reserve, -1); 3376 } 3377 3378 #define STCX(name, memop) \ 3379 static void gen_##name(DisasContext *ctx) \ 3380 { \ 3381 gen_conditional_store(ctx, memop); \ 3382 } 3383 3384 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3385 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3386 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3387 3388 #if defined(TARGET_PPC64) 3389 /* ldarx */ 3390 LARX(ldarx, DEF_MEMOP(MO_Q)) 3391 /* stdcx. */ 3392 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3393 3394 /* lqarx */ 3395 static void gen_lqarx(DisasContext *ctx) 3396 { 3397 int rd = rD(ctx->opcode); 3398 TCGv EA, hi, lo; 3399 3400 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3401 (rd == rB(ctx->opcode)))) { 3402 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3403 return; 3404 } 3405 3406 gen_set_access_type(ctx, ACCESS_RES); 3407 EA = tcg_temp_new(); 3408 gen_addr_reg_index(ctx, EA); 3409 3410 /* Note that the low part is always in RD+1, even in LE mode. */ 3411 lo = cpu_gpr[rd + 1]; 3412 hi = cpu_gpr[rd]; 3413 3414 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3415 if (HAVE_ATOMIC128) { 3416 TCGv_i32 oi = tcg_temp_new_i32(); 3417 if (ctx->le_mode) { 3418 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16, 3419 ctx->mem_idx)); 3420 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3421 } else { 3422 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16, 3423 ctx->mem_idx)); 3424 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3425 } 3426 tcg_temp_free_i32(oi); 3427 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3428 } else { 3429 /* Restart with exclusive lock. */ 3430 gen_helper_exit_atomic(cpu_env); 3431 ctx->base.is_jmp = DISAS_NORETURN; 3432 tcg_temp_free(EA); 3433 return; 3434 } 3435 } else if (ctx->le_mode) { 3436 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 3437 tcg_gen_mov_tl(cpu_reserve, EA); 3438 gen_addr_add(ctx, EA, EA, 8); 3439 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3440 } else { 3441 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 3442 tcg_gen_mov_tl(cpu_reserve, EA); 3443 gen_addr_add(ctx, EA, EA, 8); 3444 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3445 } 3446 tcg_temp_free(EA); 3447 3448 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 3449 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3450 } 3451 3452 /* stqcx. */ 3453 static void gen_stqcx_(DisasContext *ctx) 3454 { 3455 int rs = rS(ctx->opcode); 3456 TCGv EA, hi, lo; 3457 3458 if (unlikely(rs & 1)) { 3459 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3460 return; 3461 } 3462 3463 gen_set_access_type(ctx, ACCESS_RES); 3464 EA = tcg_temp_new(); 3465 gen_addr_reg_index(ctx, EA); 3466 3467 /* Note that the low part is always in RS+1, even in LE mode. */ 3468 lo = cpu_gpr[rs + 1]; 3469 hi = cpu_gpr[rs]; 3470 3471 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3472 if (HAVE_CMPXCHG128) { 3473 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16); 3474 if (ctx->le_mode) { 3475 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 3476 EA, lo, hi, oi); 3477 } else { 3478 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 3479 EA, lo, hi, oi); 3480 } 3481 tcg_temp_free_i32(oi); 3482 } else { 3483 /* Restart with exclusive lock. */ 3484 gen_helper_exit_atomic(cpu_env); 3485 ctx->base.is_jmp = DISAS_NORETURN; 3486 } 3487 tcg_temp_free(EA); 3488 } else { 3489 TCGLabel *lab_fail = gen_new_label(); 3490 TCGLabel *lab_over = gen_new_label(); 3491 TCGv_i64 t0 = tcg_temp_new_i64(); 3492 TCGv_i64 t1 = tcg_temp_new_i64(); 3493 3494 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 3495 tcg_temp_free(EA); 3496 3497 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 3498 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3499 ? offsetof(CPUPPCState, reserve_val2) 3500 : offsetof(CPUPPCState, reserve_val))); 3501 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3502 3503 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3504 gen_qemu_ld64_i64(ctx, t0, t0); 3505 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3506 ? offsetof(CPUPPCState, reserve_val) 3507 : offsetof(CPUPPCState, reserve_val2))); 3508 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3509 3510 /* Success */ 3511 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 3512 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3513 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 3514 3515 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3516 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 3517 tcg_gen_br(lab_over); 3518 3519 gen_set_label(lab_fail); 3520 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3521 3522 gen_set_label(lab_over); 3523 tcg_gen_movi_tl(cpu_reserve, -1); 3524 tcg_temp_free_i64(t0); 3525 tcg_temp_free_i64(t1); 3526 } 3527 } 3528 #endif /* defined(TARGET_PPC64) */ 3529 3530 /* sync */ 3531 static void gen_sync(DisasContext *ctx) 3532 { 3533 uint32_t l = (ctx->opcode >> 21) & 3; 3534 3535 /* 3536 * We may need to check for a pending TLB flush. 3537 * 3538 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3539 * 3540 * Additionally, this can only happen in kernel mode however so 3541 * check MSR_PR as well. 3542 */ 3543 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3544 gen_check_tlb_flush(ctx, true); 3545 } 3546 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3547 } 3548 3549 /* wait */ 3550 static void gen_wait(DisasContext *ctx) 3551 { 3552 TCGv_i32 t0 = tcg_const_i32(1); 3553 tcg_gen_st_i32(t0, cpu_env, 3554 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3555 tcg_temp_free_i32(t0); 3556 /* Stop translation, as the CPU is supposed to sleep from now */ 3557 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3558 } 3559 3560 #if defined(TARGET_PPC64) 3561 static void gen_doze(DisasContext *ctx) 3562 { 3563 #if defined(CONFIG_USER_ONLY) 3564 GEN_PRIV; 3565 #else 3566 TCGv_i32 t; 3567 3568 CHK_HV; 3569 t = tcg_const_i32(PPC_PM_DOZE); 3570 gen_helper_pminsn(cpu_env, t); 3571 tcg_temp_free_i32(t); 3572 gen_stop_exception(ctx); 3573 #endif /* defined(CONFIG_USER_ONLY) */ 3574 } 3575 3576 static void gen_nap(DisasContext *ctx) 3577 { 3578 #if defined(CONFIG_USER_ONLY) 3579 GEN_PRIV; 3580 #else 3581 TCGv_i32 t; 3582 3583 CHK_HV; 3584 t = tcg_const_i32(PPC_PM_NAP); 3585 gen_helper_pminsn(cpu_env, t); 3586 tcg_temp_free_i32(t); 3587 gen_stop_exception(ctx); 3588 #endif /* defined(CONFIG_USER_ONLY) */ 3589 } 3590 3591 static void gen_stop(DisasContext *ctx) 3592 { 3593 gen_nap(ctx); 3594 } 3595 3596 static void gen_sleep(DisasContext *ctx) 3597 { 3598 #if defined(CONFIG_USER_ONLY) 3599 GEN_PRIV; 3600 #else 3601 TCGv_i32 t; 3602 3603 CHK_HV; 3604 t = tcg_const_i32(PPC_PM_SLEEP); 3605 gen_helper_pminsn(cpu_env, t); 3606 tcg_temp_free_i32(t); 3607 gen_stop_exception(ctx); 3608 #endif /* defined(CONFIG_USER_ONLY) */ 3609 } 3610 3611 static void gen_rvwinkle(DisasContext *ctx) 3612 { 3613 #if defined(CONFIG_USER_ONLY) 3614 GEN_PRIV; 3615 #else 3616 TCGv_i32 t; 3617 3618 CHK_HV; 3619 t = tcg_const_i32(PPC_PM_RVWINKLE); 3620 gen_helper_pminsn(cpu_env, t); 3621 tcg_temp_free_i32(t); 3622 gen_stop_exception(ctx); 3623 #endif /* defined(CONFIG_USER_ONLY) */ 3624 } 3625 #endif /* #if defined(TARGET_PPC64) */ 3626 3627 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3628 { 3629 #if defined(TARGET_PPC64) 3630 if (ctx->has_cfar) 3631 tcg_gen_movi_tl(cpu_cfar, nip); 3632 #endif 3633 } 3634 3635 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3636 { 3637 if (unlikely(ctx->singlestep_enabled)) { 3638 return false; 3639 } 3640 3641 #ifndef CONFIG_USER_ONLY 3642 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3643 #else 3644 return true; 3645 #endif 3646 } 3647 3648 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 3649 { 3650 int sse = ctx->singlestep_enabled; 3651 if (unlikely(sse)) { 3652 if (sse & GDBSTUB_SINGLE_STEP) { 3653 gen_debug_exception(ctx); 3654 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) { 3655 uint32_t excp = gen_prep_dbgex(ctx, POWERPC_EXCP_BRANCH); 3656 if (excp != POWERPC_EXCP_NONE) { 3657 gen_exception(ctx, excp); 3658 } 3659 } 3660 tcg_gen_exit_tb(NULL, 0); 3661 } else { 3662 tcg_gen_lookup_and_goto_ptr(); 3663 } 3664 } 3665 3666 /*** Branch ***/ 3667 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3668 { 3669 if (NARROW_MODE(ctx)) { 3670 dest = (uint32_t) dest; 3671 } 3672 if (use_goto_tb(ctx, dest)) { 3673 tcg_gen_goto_tb(n); 3674 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3675 tcg_gen_exit_tb(ctx->base.tb, n); 3676 } else { 3677 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3678 gen_lookup_and_goto_ptr(ctx); 3679 } 3680 } 3681 3682 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3683 { 3684 if (NARROW_MODE(ctx)) { 3685 nip = (uint32_t)nip; 3686 } 3687 tcg_gen_movi_tl(cpu_lr, nip); 3688 } 3689 3690 /* b ba bl bla */ 3691 static void gen_b(DisasContext *ctx) 3692 { 3693 target_ulong li, target; 3694 3695 ctx->exception = POWERPC_EXCP_BRANCH; 3696 /* sign extend LI */ 3697 li = LI(ctx->opcode); 3698 li = (li ^ 0x02000000) - 0x02000000; 3699 if (likely(AA(ctx->opcode) == 0)) { 3700 target = ctx->base.pc_next + li - 4; 3701 } else { 3702 target = li; 3703 } 3704 if (LK(ctx->opcode)) { 3705 gen_setlr(ctx, ctx->base.pc_next); 3706 } 3707 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3708 gen_goto_tb(ctx, 0, target); 3709 } 3710 3711 #define BCOND_IM 0 3712 #define BCOND_LR 1 3713 #define BCOND_CTR 2 3714 #define BCOND_TAR 3 3715 3716 static void gen_bcond(DisasContext *ctx, int type) 3717 { 3718 uint32_t bo = BO(ctx->opcode); 3719 TCGLabel *l1; 3720 TCGv target; 3721 ctx->exception = POWERPC_EXCP_BRANCH; 3722 3723 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3724 target = tcg_temp_local_new(); 3725 if (type == BCOND_CTR) 3726 tcg_gen_mov_tl(target, cpu_ctr); 3727 else if (type == BCOND_TAR) 3728 gen_load_spr(target, SPR_TAR); 3729 else 3730 tcg_gen_mov_tl(target, cpu_lr); 3731 } else { 3732 target = NULL; 3733 } 3734 if (LK(ctx->opcode)) 3735 gen_setlr(ctx, ctx->base.pc_next); 3736 l1 = gen_new_label(); 3737 if ((bo & 0x4) == 0) { 3738 /* Decrement and test CTR */ 3739 TCGv temp = tcg_temp_new(); 3740 if (unlikely(type == BCOND_CTR)) { 3741 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3742 return; 3743 } 3744 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3745 if (NARROW_MODE(ctx)) { 3746 tcg_gen_ext32u_tl(temp, cpu_ctr); 3747 } else { 3748 tcg_gen_mov_tl(temp, cpu_ctr); 3749 } 3750 if (bo & 0x2) { 3751 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3752 } else { 3753 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3754 } 3755 tcg_temp_free(temp); 3756 } 3757 if ((bo & 0x10) == 0) { 3758 /* Test CR */ 3759 uint32_t bi = BI(ctx->opcode); 3760 uint32_t mask = 0x08 >> (bi & 0x03); 3761 TCGv_i32 temp = tcg_temp_new_i32(); 3762 3763 if (bo & 0x8) { 3764 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3765 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3766 } else { 3767 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3768 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3769 } 3770 tcg_temp_free_i32(temp); 3771 } 3772 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3773 if (type == BCOND_IM) { 3774 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3775 if (likely(AA(ctx->opcode) == 0)) { 3776 gen_goto_tb(ctx, 0, ctx->base.pc_next + li - 4); 3777 } else { 3778 gen_goto_tb(ctx, 0, li); 3779 } 3780 } else { 3781 if (NARROW_MODE(ctx)) { 3782 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3783 } else { 3784 tcg_gen_andi_tl(cpu_nip, target, ~3); 3785 } 3786 gen_lookup_and_goto_ptr(ctx); 3787 tcg_temp_free(target); 3788 } 3789 if ((bo & 0x14) != 0x14) { 3790 /* fallthrough case */ 3791 gen_set_label(l1); 3792 gen_goto_tb(ctx, 1, ctx->base.pc_next); 3793 } 3794 } 3795 3796 static void gen_bc(DisasContext *ctx) 3797 { 3798 gen_bcond(ctx, BCOND_IM); 3799 } 3800 3801 static void gen_bcctr(DisasContext *ctx) 3802 { 3803 gen_bcond(ctx, BCOND_CTR); 3804 } 3805 3806 static void gen_bclr(DisasContext *ctx) 3807 { 3808 gen_bcond(ctx, BCOND_LR); 3809 } 3810 3811 static void gen_bctar(DisasContext *ctx) 3812 { 3813 gen_bcond(ctx, BCOND_TAR); 3814 } 3815 3816 /*** Condition register logical ***/ 3817 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3818 static void glue(gen_, name)(DisasContext *ctx) \ 3819 { \ 3820 uint8_t bitmask; \ 3821 int sh; \ 3822 TCGv_i32 t0, t1; \ 3823 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3824 t0 = tcg_temp_new_i32(); \ 3825 if (sh > 0) \ 3826 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3827 else if (sh < 0) \ 3828 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3829 else \ 3830 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3831 t1 = tcg_temp_new_i32(); \ 3832 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3833 if (sh > 0) \ 3834 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3835 else if (sh < 0) \ 3836 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3837 else \ 3838 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3839 tcg_op(t0, t0, t1); \ 3840 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3841 tcg_gen_andi_i32(t0, t0, bitmask); \ 3842 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3843 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3844 tcg_temp_free_i32(t0); \ 3845 tcg_temp_free_i32(t1); \ 3846 } 3847 3848 /* crand */ 3849 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3850 /* crandc */ 3851 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3852 /* creqv */ 3853 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3854 /* crnand */ 3855 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3856 /* crnor */ 3857 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3858 /* cror */ 3859 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3860 /* crorc */ 3861 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3862 /* crxor */ 3863 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3864 3865 /* mcrf */ 3866 static void gen_mcrf(DisasContext *ctx) 3867 { 3868 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3869 } 3870 3871 /*** System linkage ***/ 3872 3873 /* rfi (supervisor only) */ 3874 static void gen_rfi(DisasContext *ctx) 3875 { 3876 #if defined(CONFIG_USER_ONLY) 3877 GEN_PRIV; 3878 #else 3879 /* This instruction doesn't exist anymore on 64-bit server 3880 * processors compliant with arch 2.x 3881 */ 3882 if (ctx->insns_flags & PPC_SEGMENT_64B) { 3883 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3884 return; 3885 } 3886 /* Restore CPU state */ 3887 CHK_SV; 3888 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3889 gen_io_start(); 3890 } 3891 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3892 gen_helper_rfi(cpu_env); 3893 gen_sync_exception(ctx); 3894 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3895 gen_io_end(); 3896 } 3897 #endif 3898 } 3899 3900 #if defined(TARGET_PPC64) 3901 static void gen_rfid(DisasContext *ctx) 3902 { 3903 #if defined(CONFIG_USER_ONLY) 3904 GEN_PRIV; 3905 #else 3906 /* Restore CPU state */ 3907 CHK_SV; 3908 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3909 gen_io_start(); 3910 } 3911 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3912 gen_helper_rfid(cpu_env); 3913 gen_sync_exception(ctx); 3914 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3915 gen_io_end(); 3916 } 3917 #endif 3918 } 3919 3920 static void gen_hrfid(DisasContext *ctx) 3921 { 3922 #if defined(CONFIG_USER_ONLY) 3923 GEN_PRIV; 3924 #else 3925 /* Restore CPU state */ 3926 CHK_HV; 3927 gen_helper_hrfid(cpu_env); 3928 gen_sync_exception(ctx); 3929 #endif 3930 } 3931 #endif 3932 3933 /* sc */ 3934 #if defined(CONFIG_USER_ONLY) 3935 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3936 #else 3937 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3938 #endif 3939 static void gen_sc(DisasContext *ctx) 3940 { 3941 uint32_t lev; 3942 3943 lev = (ctx->opcode >> 5) & 0x7F; 3944 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3945 } 3946 3947 /*** Trap ***/ 3948 3949 /* Check for unconditional traps (always or never) */ 3950 static bool check_unconditional_trap(DisasContext *ctx) 3951 { 3952 /* Trap never */ 3953 if (TO(ctx->opcode) == 0) { 3954 return true; 3955 } 3956 /* Trap always */ 3957 if (TO(ctx->opcode) == 31) { 3958 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 3959 return true; 3960 } 3961 return false; 3962 } 3963 3964 /* tw */ 3965 static void gen_tw(DisasContext *ctx) 3966 { 3967 TCGv_i32 t0; 3968 3969 if (check_unconditional_trap(ctx)) { 3970 return; 3971 } 3972 t0 = tcg_const_i32(TO(ctx->opcode)); 3973 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3974 t0); 3975 tcg_temp_free_i32(t0); 3976 } 3977 3978 /* twi */ 3979 static void gen_twi(DisasContext *ctx) 3980 { 3981 TCGv t0; 3982 TCGv_i32 t1; 3983 3984 if (check_unconditional_trap(ctx)) { 3985 return; 3986 } 3987 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3988 t1 = tcg_const_i32(TO(ctx->opcode)); 3989 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3990 tcg_temp_free(t0); 3991 tcg_temp_free_i32(t1); 3992 } 3993 3994 #if defined(TARGET_PPC64) 3995 /* td */ 3996 static void gen_td(DisasContext *ctx) 3997 { 3998 TCGv_i32 t0; 3999 4000 if (check_unconditional_trap(ctx)) { 4001 return; 4002 } 4003 t0 = tcg_const_i32(TO(ctx->opcode)); 4004 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4005 t0); 4006 tcg_temp_free_i32(t0); 4007 } 4008 4009 /* tdi */ 4010 static void gen_tdi(DisasContext *ctx) 4011 { 4012 TCGv t0; 4013 TCGv_i32 t1; 4014 4015 if (check_unconditional_trap(ctx)) { 4016 return; 4017 } 4018 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4019 t1 = tcg_const_i32(TO(ctx->opcode)); 4020 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4021 tcg_temp_free(t0); 4022 tcg_temp_free_i32(t1); 4023 } 4024 #endif 4025 4026 /*** Processor control ***/ 4027 4028 static void gen_read_xer(DisasContext *ctx, TCGv dst) 4029 { 4030 TCGv t0 = tcg_temp_new(); 4031 TCGv t1 = tcg_temp_new(); 4032 TCGv t2 = tcg_temp_new(); 4033 tcg_gen_mov_tl(dst, cpu_xer); 4034 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 4035 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 4036 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 4037 tcg_gen_or_tl(t0, t0, t1); 4038 tcg_gen_or_tl(dst, dst, t2); 4039 tcg_gen_or_tl(dst, dst, t0); 4040 if (is_isa300(ctx)) { 4041 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 4042 tcg_gen_or_tl(dst, dst, t0); 4043 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 4044 tcg_gen_or_tl(dst, dst, t0); 4045 } 4046 tcg_temp_free(t0); 4047 tcg_temp_free(t1); 4048 tcg_temp_free(t2); 4049 } 4050 4051 static void gen_write_xer(TCGv src) 4052 { 4053 /* Write all flags, while reading back check for isa300 */ 4054 tcg_gen_andi_tl(cpu_xer, src, 4055 ~((1u << XER_SO) | 4056 (1u << XER_OV) | (1u << XER_OV32) | 4057 (1u << XER_CA) | (1u << XER_CA32))); 4058 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 4059 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 4060 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 4061 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 4062 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 4063 } 4064 4065 /* mcrxr */ 4066 static void gen_mcrxr(DisasContext *ctx) 4067 { 4068 TCGv_i32 t0 = tcg_temp_new_i32(); 4069 TCGv_i32 t1 = tcg_temp_new_i32(); 4070 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4071 4072 tcg_gen_trunc_tl_i32(t0, cpu_so); 4073 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4074 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4075 tcg_gen_shli_i32(t0, t0, 3); 4076 tcg_gen_shli_i32(t1, t1, 2); 4077 tcg_gen_shli_i32(dst, dst, 1); 4078 tcg_gen_or_i32(dst, dst, t0); 4079 tcg_gen_or_i32(dst, dst, t1); 4080 tcg_temp_free_i32(t0); 4081 tcg_temp_free_i32(t1); 4082 4083 tcg_gen_movi_tl(cpu_so, 0); 4084 tcg_gen_movi_tl(cpu_ov, 0); 4085 tcg_gen_movi_tl(cpu_ca, 0); 4086 } 4087 4088 #ifdef TARGET_PPC64 4089 /* mcrxrx */ 4090 static void gen_mcrxrx(DisasContext *ctx) 4091 { 4092 TCGv t0 = tcg_temp_new(); 4093 TCGv t1 = tcg_temp_new(); 4094 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4095 4096 /* copy OV and OV32 */ 4097 tcg_gen_shli_tl(t0, cpu_ov, 1); 4098 tcg_gen_or_tl(t0, t0, cpu_ov32); 4099 tcg_gen_shli_tl(t0, t0, 2); 4100 /* copy CA and CA32 */ 4101 tcg_gen_shli_tl(t1, cpu_ca, 1); 4102 tcg_gen_or_tl(t1, t1, cpu_ca32); 4103 tcg_gen_or_tl(t0, t0, t1); 4104 tcg_gen_trunc_tl_i32(dst, t0); 4105 tcg_temp_free(t0); 4106 tcg_temp_free(t1); 4107 } 4108 #endif 4109 4110 /* mfcr mfocrf */ 4111 static void gen_mfcr(DisasContext *ctx) 4112 { 4113 uint32_t crm, crn; 4114 4115 if (likely(ctx->opcode & 0x00100000)) { 4116 crm = CRM(ctx->opcode); 4117 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4118 crn = ctz32 (crm); 4119 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4120 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4121 cpu_gpr[rD(ctx->opcode)], crn * 4); 4122 } 4123 } else { 4124 TCGv_i32 t0 = tcg_temp_new_i32(); 4125 tcg_gen_mov_i32(t0, cpu_crf[0]); 4126 tcg_gen_shli_i32(t0, t0, 4); 4127 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4128 tcg_gen_shli_i32(t0, t0, 4); 4129 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4130 tcg_gen_shli_i32(t0, t0, 4); 4131 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4132 tcg_gen_shli_i32(t0, t0, 4); 4133 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4134 tcg_gen_shli_i32(t0, t0, 4); 4135 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4136 tcg_gen_shli_i32(t0, t0, 4); 4137 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4138 tcg_gen_shli_i32(t0, t0, 4); 4139 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4140 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4141 tcg_temp_free_i32(t0); 4142 } 4143 } 4144 4145 /* mfmsr */ 4146 static void gen_mfmsr(DisasContext *ctx) 4147 { 4148 CHK_SV; 4149 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4150 } 4151 4152 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 4153 { 4154 #if 0 4155 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 4156 printf("ERROR: try to access SPR %d !\n", sprn); 4157 #endif 4158 } 4159 #define SPR_NOACCESS (&spr_noaccess) 4160 4161 /* mfspr */ 4162 static inline void gen_op_mfspr(DisasContext *ctx) 4163 { 4164 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4165 uint32_t sprn = SPR(ctx->opcode); 4166 4167 #if defined(CONFIG_USER_ONLY) 4168 read_cb = ctx->spr_cb[sprn].uea_read; 4169 #else 4170 if (ctx->pr) { 4171 read_cb = ctx->spr_cb[sprn].uea_read; 4172 } else if (ctx->hv) { 4173 read_cb = ctx->spr_cb[sprn].hea_read; 4174 } else { 4175 read_cb = ctx->spr_cb[sprn].oea_read; 4176 } 4177 #endif 4178 if (likely(read_cb != NULL)) { 4179 if (likely(read_cb != SPR_NOACCESS)) { 4180 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4181 } else { 4182 /* Privilege exception */ 4183 /* This is a hack to avoid warnings when running Linux: 4184 * this OS breaks the PowerPC virtualisation model, 4185 * allowing userland application to read the PVR 4186 */ 4187 if (sprn != SPR_PVR) { 4188 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4189 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4190 ctx->base.pc_next - 4); 4191 } 4192 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4193 } 4194 } else { 4195 /* ISA 2.07 defines these as no-ops */ 4196 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4197 (sprn >= 808 && sprn <= 811)) { 4198 /* This is a nop */ 4199 return; 4200 } 4201 /* Not defined */ 4202 qemu_log_mask(LOG_GUEST_ERROR, 4203 "Trying to read invalid spr %d (0x%03x) at " 4204 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4205 4206 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4207 * it can generate a priv, a hv emu or a no-op 4208 */ 4209 if (sprn & 0x10) { 4210 if (ctx->pr) { 4211 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4212 } 4213 } else { 4214 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4215 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4216 } 4217 } 4218 } 4219 } 4220 4221 static void gen_mfspr(DisasContext *ctx) 4222 { 4223 gen_op_mfspr(ctx); 4224 } 4225 4226 /* mftb */ 4227 static void gen_mftb(DisasContext *ctx) 4228 { 4229 gen_op_mfspr(ctx); 4230 } 4231 4232 /* mtcrf mtocrf*/ 4233 static void gen_mtcrf(DisasContext *ctx) 4234 { 4235 uint32_t crm, crn; 4236 4237 crm = CRM(ctx->opcode); 4238 if (likely((ctx->opcode & 0x00100000))) { 4239 if (crm && ((crm & (crm - 1)) == 0)) { 4240 TCGv_i32 temp = tcg_temp_new_i32(); 4241 crn = ctz32 (crm); 4242 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4243 tcg_gen_shri_i32(temp, temp, crn * 4); 4244 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4245 tcg_temp_free_i32(temp); 4246 } 4247 } else { 4248 TCGv_i32 temp = tcg_temp_new_i32(); 4249 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4250 for (crn = 0 ; crn < 8 ; crn++) { 4251 if (crm & (1 << crn)) { 4252 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4253 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4254 } 4255 } 4256 tcg_temp_free_i32(temp); 4257 } 4258 } 4259 4260 /* mtmsr */ 4261 #if defined(TARGET_PPC64) 4262 static void gen_mtmsrd(DisasContext *ctx) 4263 { 4264 CHK_SV; 4265 4266 #if !defined(CONFIG_USER_ONLY) 4267 if (ctx->opcode & 0x00010000) { 4268 /* Special form that does not need any synchronisation */ 4269 TCGv t0 = tcg_temp_new(); 4270 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4271 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4272 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4273 tcg_temp_free(t0); 4274 } else { 4275 /* XXX: we need to update nip before the store 4276 * if we enter power saving mode, we will exit the loop 4277 * directly from ppc_store_msr 4278 */ 4279 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4280 gen_io_start(); 4281 } 4282 gen_update_nip(ctx, ctx->base.pc_next); 4283 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4284 /* Must stop the translation as machine state (may have) changed */ 4285 /* Note that mtmsr is not always defined as context-synchronizing */ 4286 gen_stop_exception(ctx); 4287 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4288 gen_io_end(); 4289 } 4290 } 4291 #endif /* !defined(CONFIG_USER_ONLY) */ 4292 } 4293 #endif /* defined(TARGET_PPC64) */ 4294 4295 static void gen_mtmsr(DisasContext *ctx) 4296 { 4297 CHK_SV; 4298 4299 #if !defined(CONFIG_USER_ONLY) 4300 if (ctx->opcode & 0x00010000) { 4301 /* Special form that does not need any synchronisation */ 4302 TCGv t0 = tcg_temp_new(); 4303 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4304 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4305 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4306 tcg_temp_free(t0); 4307 } else { 4308 TCGv msr = tcg_temp_new(); 4309 4310 /* XXX: we need to update nip before the store 4311 * if we enter power saving mode, we will exit the loop 4312 * directly from ppc_store_msr 4313 */ 4314 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4315 gen_io_start(); 4316 } 4317 gen_update_nip(ctx, ctx->base.pc_next); 4318 #if defined(TARGET_PPC64) 4319 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4320 #else 4321 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4322 #endif 4323 gen_helper_store_msr(cpu_env, msr); 4324 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4325 gen_io_end(); 4326 } 4327 tcg_temp_free(msr); 4328 /* Must stop the translation as machine state (may have) changed */ 4329 /* Note that mtmsr is not always defined as context-synchronizing */ 4330 gen_stop_exception(ctx); 4331 } 4332 #endif 4333 } 4334 4335 /* mtspr */ 4336 static void gen_mtspr(DisasContext *ctx) 4337 { 4338 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4339 uint32_t sprn = SPR(ctx->opcode); 4340 4341 #if defined(CONFIG_USER_ONLY) 4342 write_cb = ctx->spr_cb[sprn].uea_write; 4343 #else 4344 if (ctx->pr) { 4345 write_cb = ctx->spr_cb[sprn].uea_write; 4346 } else if (ctx->hv) { 4347 write_cb = ctx->spr_cb[sprn].hea_write; 4348 } else { 4349 write_cb = ctx->spr_cb[sprn].oea_write; 4350 } 4351 #endif 4352 if (likely(write_cb != NULL)) { 4353 if (likely(write_cb != SPR_NOACCESS)) { 4354 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4355 } else { 4356 /* Privilege exception */ 4357 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 4358 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4359 ctx->base.pc_next - 4); 4360 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4361 } 4362 } else { 4363 /* ISA 2.07 defines these as no-ops */ 4364 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4365 (sprn >= 808 && sprn <= 811)) { 4366 /* This is a nop */ 4367 return; 4368 } 4369 4370 /* Not defined */ 4371 qemu_log_mask(LOG_GUEST_ERROR, 4372 "Trying to write invalid spr %d (0x%03x) at " 4373 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4374 4375 4376 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4377 * it can generate a priv, a hv emu or a no-op 4378 */ 4379 if (sprn & 0x10) { 4380 if (ctx->pr) { 4381 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4382 } 4383 } else { 4384 if (ctx->pr || sprn == 0) { 4385 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4386 } 4387 } 4388 } 4389 } 4390 4391 #if defined(TARGET_PPC64) 4392 /* setb */ 4393 static void gen_setb(DisasContext *ctx) 4394 { 4395 TCGv_i32 t0 = tcg_temp_new_i32(); 4396 TCGv_i32 t8 = tcg_temp_new_i32(); 4397 TCGv_i32 tm1 = tcg_temp_new_i32(); 4398 int crf = crfS(ctx->opcode); 4399 4400 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4401 tcg_gen_movi_i32(t8, 8); 4402 tcg_gen_movi_i32(tm1, -1); 4403 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4404 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4405 4406 tcg_temp_free_i32(t0); 4407 tcg_temp_free_i32(t8); 4408 tcg_temp_free_i32(tm1); 4409 } 4410 #endif 4411 4412 /*** Cache management ***/ 4413 4414 /* dcbf */ 4415 static void gen_dcbf(DisasContext *ctx) 4416 { 4417 /* XXX: specification says this is treated as a load by the MMU */ 4418 TCGv t0; 4419 gen_set_access_type(ctx, ACCESS_CACHE); 4420 t0 = tcg_temp_new(); 4421 gen_addr_reg_index(ctx, t0); 4422 gen_qemu_ld8u(ctx, t0, t0); 4423 tcg_temp_free(t0); 4424 } 4425 4426 /* dcbfep (external PID dcbf) */ 4427 static void gen_dcbfep(DisasContext *ctx) 4428 { 4429 /* XXX: specification says this is treated as a load by the MMU */ 4430 TCGv t0; 4431 CHK_SV; 4432 gen_set_access_type(ctx, ACCESS_CACHE); 4433 t0 = tcg_temp_new(); 4434 gen_addr_reg_index(ctx, t0); 4435 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4436 tcg_temp_free(t0); 4437 } 4438 4439 /* dcbi (Supervisor only) */ 4440 static void gen_dcbi(DisasContext *ctx) 4441 { 4442 #if defined(CONFIG_USER_ONLY) 4443 GEN_PRIV; 4444 #else 4445 TCGv EA, val; 4446 4447 CHK_SV; 4448 EA = tcg_temp_new(); 4449 gen_set_access_type(ctx, ACCESS_CACHE); 4450 gen_addr_reg_index(ctx, EA); 4451 val = tcg_temp_new(); 4452 /* XXX: specification says this should be treated as a store by the MMU */ 4453 gen_qemu_ld8u(ctx, val, EA); 4454 gen_qemu_st8(ctx, val, EA); 4455 tcg_temp_free(val); 4456 tcg_temp_free(EA); 4457 #endif /* defined(CONFIG_USER_ONLY) */ 4458 } 4459 4460 /* dcdst */ 4461 static void gen_dcbst(DisasContext *ctx) 4462 { 4463 /* XXX: specification say this is treated as a load by the MMU */ 4464 TCGv t0; 4465 gen_set_access_type(ctx, ACCESS_CACHE); 4466 t0 = tcg_temp_new(); 4467 gen_addr_reg_index(ctx, t0); 4468 gen_qemu_ld8u(ctx, t0, t0); 4469 tcg_temp_free(t0); 4470 } 4471 4472 /* dcbstep (dcbstep External PID version) */ 4473 static void gen_dcbstep(DisasContext *ctx) 4474 { 4475 /* XXX: specification say this is treated as a load by the MMU */ 4476 TCGv t0; 4477 gen_set_access_type(ctx, ACCESS_CACHE); 4478 t0 = tcg_temp_new(); 4479 gen_addr_reg_index(ctx, t0); 4480 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4481 tcg_temp_free(t0); 4482 } 4483 4484 /* dcbt */ 4485 static void gen_dcbt(DisasContext *ctx) 4486 { 4487 /* interpreted as no-op */ 4488 /* XXX: specification say this is treated as a load by the MMU 4489 * but does not generate any exception 4490 */ 4491 } 4492 4493 /* dcbtep */ 4494 static void gen_dcbtep(DisasContext *ctx) 4495 { 4496 /* interpreted as no-op */ 4497 /* XXX: specification say this is treated as a load by the MMU 4498 * but does not generate any exception 4499 */ 4500 } 4501 4502 /* dcbtst */ 4503 static void gen_dcbtst(DisasContext *ctx) 4504 { 4505 /* interpreted as no-op */ 4506 /* XXX: specification say this is treated as a load by the MMU 4507 * but does not generate any exception 4508 */ 4509 } 4510 4511 /* dcbtstep */ 4512 static void gen_dcbtstep(DisasContext *ctx) 4513 { 4514 /* interpreted as no-op */ 4515 /* XXX: specification say this is treated as a load by the MMU 4516 * but does not generate any exception 4517 */ 4518 } 4519 4520 /* dcbtls */ 4521 static void gen_dcbtls(DisasContext *ctx) 4522 { 4523 /* Always fails locking the cache */ 4524 TCGv t0 = tcg_temp_new(); 4525 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4526 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4527 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4528 tcg_temp_free(t0); 4529 } 4530 4531 /* dcbz */ 4532 static void gen_dcbz(DisasContext *ctx) 4533 { 4534 TCGv tcgv_addr; 4535 TCGv_i32 tcgv_op; 4536 4537 gen_set_access_type(ctx, ACCESS_CACHE); 4538 tcgv_addr = tcg_temp_new(); 4539 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4540 gen_addr_reg_index(ctx, tcgv_addr); 4541 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4542 tcg_temp_free(tcgv_addr); 4543 tcg_temp_free_i32(tcgv_op); 4544 } 4545 4546 /* dcbzep */ 4547 static void gen_dcbzep(DisasContext *ctx) 4548 { 4549 TCGv tcgv_addr; 4550 TCGv_i32 tcgv_op; 4551 4552 gen_set_access_type(ctx, ACCESS_CACHE); 4553 tcgv_addr = tcg_temp_new(); 4554 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4555 gen_addr_reg_index(ctx, tcgv_addr); 4556 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 4557 tcg_temp_free(tcgv_addr); 4558 tcg_temp_free_i32(tcgv_op); 4559 } 4560 4561 /* dst / dstt */ 4562 static void gen_dst(DisasContext *ctx) 4563 { 4564 if (rA(ctx->opcode) == 0) { 4565 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4566 } else { 4567 /* interpreted as no-op */ 4568 } 4569 } 4570 4571 /* dstst /dststt */ 4572 static void gen_dstst(DisasContext *ctx) 4573 { 4574 if (rA(ctx->opcode) == 0) { 4575 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4576 } else { 4577 /* interpreted as no-op */ 4578 } 4579 4580 } 4581 4582 /* dss / dssall */ 4583 static void gen_dss(DisasContext *ctx) 4584 { 4585 /* interpreted as no-op */ 4586 } 4587 4588 /* icbi */ 4589 static void gen_icbi(DisasContext *ctx) 4590 { 4591 TCGv t0; 4592 gen_set_access_type(ctx, ACCESS_CACHE); 4593 t0 = tcg_temp_new(); 4594 gen_addr_reg_index(ctx, t0); 4595 gen_helper_icbi(cpu_env, t0); 4596 tcg_temp_free(t0); 4597 } 4598 4599 /* icbiep */ 4600 static void gen_icbiep(DisasContext *ctx) 4601 { 4602 TCGv t0; 4603 gen_set_access_type(ctx, ACCESS_CACHE); 4604 t0 = tcg_temp_new(); 4605 gen_addr_reg_index(ctx, t0); 4606 gen_helper_icbiep(cpu_env, t0); 4607 tcg_temp_free(t0); 4608 } 4609 4610 /* Optional: */ 4611 /* dcba */ 4612 static void gen_dcba(DisasContext *ctx) 4613 { 4614 /* interpreted as no-op */ 4615 /* XXX: specification say this is treated as a store by the MMU 4616 * but does not generate any exception 4617 */ 4618 } 4619 4620 /*** Segment register manipulation ***/ 4621 /* Supervisor only: */ 4622 4623 /* mfsr */ 4624 static void gen_mfsr(DisasContext *ctx) 4625 { 4626 #if defined(CONFIG_USER_ONLY) 4627 GEN_PRIV; 4628 #else 4629 TCGv t0; 4630 4631 CHK_SV; 4632 t0 = tcg_const_tl(SR(ctx->opcode)); 4633 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4634 tcg_temp_free(t0); 4635 #endif /* defined(CONFIG_USER_ONLY) */ 4636 } 4637 4638 /* mfsrin */ 4639 static void gen_mfsrin(DisasContext *ctx) 4640 { 4641 #if defined(CONFIG_USER_ONLY) 4642 GEN_PRIV; 4643 #else 4644 TCGv t0; 4645 4646 CHK_SV; 4647 t0 = tcg_temp_new(); 4648 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4649 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4650 tcg_temp_free(t0); 4651 #endif /* defined(CONFIG_USER_ONLY) */ 4652 } 4653 4654 /* mtsr */ 4655 static void gen_mtsr(DisasContext *ctx) 4656 { 4657 #if defined(CONFIG_USER_ONLY) 4658 GEN_PRIV; 4659 #else 4660 TCGv t0; 4661 4662 CHK_SV; 4663 t0 = tcg_const_tl(SR(ctx->opcode)); 4664 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4665 tcg_temp_free(t0); 4666 #endif /* defined(CONFIG_USER_ONLY) */ 4667 } 4668 4669 /* mtsrin */ 4670 static void gen_mtsrin(DisasContext *ctx) 4671 { 4672 #if defined(CONFIG_USER_ONLY) 4673 GEN_PRIV; 4674 #else 4675 TCGv t0; 4676 CHK_SV; 4677 4678 t0 = tcg_temp_new(); 4679 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4680 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4681 tcg_temp_free(t0); 4682 #endif /* defined(CONFIG_USER_ONLY) */ 4683 } 4684 4685 #if defined(TARGET_PPC64) 4686 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4687 4688 /* mfsr */ 4689 static void gen_mfsr_64b(DisasContext *ctx) 4690 { 4691 #if defined(CONFIG_USER_ONLY) 4692 GEN_PRIV; 4693 #else 4694 TCGv t0; 4695 4696 CHK_SV; 4697 t0 = tcg_const_tl(SR(ctx->opcode)); 4698 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4699 tcg_temp_free(t0); 4700 #endif /* defined(CONFIG_USER_ONLY) */ 4701 } 4702 4703 /* mfsrin */ 4704 static void gen_mfsrin_64b(DisasContext *ctx) 4705 { 4706 #if defined(CONFIG_USER_ONLY) 4707 GEN_PRIV; 4708 #else 4709 TCGv t0; 4710 4711 CHK_SV; 4712 t0 = tcg_temp_new(); 4713 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4714 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4715 tcg_temp_free(t0); 4716 #endif /* defined(CONFIG_USER_ONLY) */ 4717 } 4718 4719 /* mtsr */ 4720 static void gen_mtsr_64b(DisasContext *ctx) 4721 { 4722 #if defined(CONFIG_USER_ONLY) 4723 GEN_PRIV; 4724 #else 4725 TCGv t0; 4726 4727 CHK_SV; 4728 t0 = tcg_const_tl(SR(ctx->opcode)); 4729 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4730 tcg_temp_free(t0); 4731 #endif /* defined(CONFIG_USER_ONLY) */ 4732 } 4733 4734 /* mtsrin */ 4735 static void gen_mtsrin_64b(DisasContext *ctx) 4736 { 4737 #if defined(CONFIG_USER_ONLY) 4738 GEN_PRIV; 4739 #else 4740 TCGv t0; 4741 4742 CHK_SV; 4743 t0 = tcg_temp_new(); 4744 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4745 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4746 tcg_temp_free(t0); 4747 #endif /* defined(CONFIG_USER_ONLY) */ 4748 } 4749 4750 /* slbmte */ 4751 static void gen_slbmte(DisasContext *ctx) 4752 { 4753 #if defined(CONFIG_USER_ONLY) 4754 GEN_PRIV; 4755 #else 4756 CHK_SV; 4757 4758 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4759 cpu_gpr[rS(ctx->opcode)]); 4760 #endif /* defined(CONFIG_USER_ONLY) */ 4761 } 4762 4763 static void gen_slbmfee(DisasContext *ctx) 4764 { 4765 #if defined(CONFIG_USER_ONLY) 4766 GEN_PRIV; 4767 #else 4768 CHK_SV; 4769 4770 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4771 cpu_gpr[rB(ctx->opcode)]); 4772 #endif /* defined(CONFIG_USER_ONLY) */ 4773 } 4774 4775 static void gen_slbmfev(DisasContext *ctx) 4776 { 4777 #if defined(CONFIG_USER_ONLY) 4778 GEN_PRIV; 4779 #else 4780 CHK_SV; 4781 4782 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4783 cpu_gpr[rB(ctx->opcode)]); 4784 #endif /* defined(CONFIG_USER_ONLY) */ 4785 } 4786 4787 static void gen_slbfee_(DisasContext *ctx) 4788 { 4789 #if defined(CONFIG_USER_ONLY) 4790 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4791 #else 4792 TCGLabel *l1, *l2; 4793 4794 if (unlikely(ctx->pr)) { 4795 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4796 return; 4797 } 4798 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4799 cpu_gpr[rB(ctx->opcode)]); 4800 l1 = gen_new_label(); 4801 l2 = gen_new_label(); 4802 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4803 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4804 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4805 tcg_gen_br(l2); 4806 gen_set_label(l1); 4807 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4808 gen_set_label(l2); 4809 #endif 4810 } 4811 #endif /* defined(TARGET_PPC64) */ 4812 4813 /*** Lookaside buffer management ***/ 4814 /* Optional & supervisor only: */ 4815 4816 /* tlbia */ 4817 static void gen_tlbia(DisasContext *ctx) 4818 { 4819 #if defined(CONFIG_USER_ONLY) 4820 GEN_PRIV; 4821 #else 4822 CHK_HV; 4823 4824 gen_helper_tlbia(cpu_env); 4825 #endif /* defined(CONFIG_USER_ONLY) */ 4826 } 4827 4828 /* tlbiel */ 4829 static void gen_tlbiel(DisasContext *ctx) 4830 { 4831 #if defined(CONFIG_USER_ONLY) 4832 GEN_PRIV; 4833 #else 4834 CHK_SV; 4835 4836 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4837 #endif /* defined(CONFIG_USER_ONLY) */ 4838 } 4839 4840 /* tlbie */ 4841 static void gen_tlbie(DisasContext *ctx) 4842 { 4843 #if defined(CONFIG_USER_ONLY) 4844 GEN_PRIV; 4845 #else 4846 TCGv_i32 t1; 4847 4848 if (ctx->gtse) { 4849 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */ 4850 } else { 4851 CHK_HV; /* Else hypervisor privileged */ 4852 } 4853 4854 if (NARROW_MODE(ctx)) { 4855 TCGv t0 = tcg_temp_new(); 4856 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4857 gen_helper_tlbie(cpu_env, t0); 4858 tcg_temp_free(t0); 4859 } else { 4860 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4861 } 4862 t1 = tcg_temp_new_i32(); 4863 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4864 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4865 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4866 tcg_temp_free_i32(t1); 4867 #endif /* defined(CONFIG_USER_ONLY) */ 4868 } 4869 4870 /* tlbsync */ 4871 static void gen_tlbsync(DisasContext *ctx) 4872 { 4873 #if defined(CONFIG_USER_ONLY) 4874 GEN_PRIV; 4875 #else 4876 4877 if (ctx->gtse) { 4878 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 4879 } else { 4880 CHK_HV; /* Else hypervisor privileged */ 4881 } 4882 4883 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4884 if (ctx->insns_flags & PPC_BOOKE) { 4885 gen_check_tlb_flush(ctx, true); 4886 } 4887 #endif /* defined(CONFIG_USER_ONLY) */ 4888 } 4889 4890 #if defined(TARGET_PPC64) 4891 /* slbia */ 4892 static void gen_slbia(DisasContext *ctx) 4893 { 4894 #if defined(CONFIG_USER_ONLY) 4895 GEN_PRIV; 4896 #else 4897 CHK_SV; 4898 4899 gen_helper_slbia(cpu_env); 4900 #endif /* defined(CONFIG_USER_ONLY) */ 4901 } 4902 4903 /* slbie */ 4904 static void gen_slbie(DisasContext *ctx) 4905 { 4906 #if defined(CONFIG_USER_ONLY) 4907 GEN_PRIV; 4908 #else 4909 CHK_SV; 4910 4911 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4912 #endif /* defined(CONFIG_USER_ONLY) */ 4913 } 4914 4915 /* slbieg */ 4916 static void gen_slbieg(DisasContext *ctx) 4917 { 4918 #if defined(CONFIG_USER_ONLY) 4919 GEN_PRIV; 4920 #else 4921 CHK_SV; 4922 4923 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4924 #endif /* defined(CONFIG_USER_ONLY) */ 4925 } 4926 4927 /* slbsync */ 4928 static void gen_slbsync(DisasContext *ctx) 4929 { 4930 #if defined(CONFIG_USER_ONLY) 4931 GEN_PRIV; 4932 #else 4933 CHK_SV; 4934 gen_check_tlb_flush(ctx, true); 4935 #endif /* defined(CONFIG_USER_ONLY) */ 4936 } 4937 4938 #endif /* defined(TARGET_PPC64) */ 4939 4940 /*** External control ***/ 4941 /* Optional: */ 4942 4943 /* eciwx */ 4944 static void gen_eciwx(DisasContext *ctx) 4945 { 4946 TCGv t0; 4947 /* Should check EAR[E] ! */ 4948 gen_set_access_type(ctx, ACCESS_EXT); 4949 t0 = tcg_temp_new(); 4950 gen_addr_reg_index(ctx, t0); 4951 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4952 DEF_MEMOP(MO_UL | MO_ALIGN)); 4953 tcg_temp_free(t0); 4954 } 4955 4956 /* ecowx */ 4957 static void gen_ecowx(DisasContext *ctx) 4958 { 4959 TCGv t0; 4960 /* Should check EAR[E] ! */ 4961 gen_set_access_type(ctx, ACCESS_EXT); 4962 t0 = tcg_temp_new(); 4963 gen_addr_reg_index(ctx, t0); 4964 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4965 DEF_MEMOP(MO_UL | MO_ALIGN)); 4966 tcg_temp_free(t0); 4967 } 4968 4969 /* PowerPC 601 specific instructions */ 4970 4971 /* abs - abs. */ 4972 static void gen_abs(DisasContext *ctx) 4973 { 4974 TCGLabel *l1 = gen_new_label(); 4975 TCGLabel *l2 = gen_new_label(); 4976 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 4977 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4978 tcg_gen_br(l2); 4979 gen_set_label(l1); 4980 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4981 gen_set_label(l2); 4982 if (unlikely(Rc(ctx->opcode) != 0)) 4983 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4984 } 4985 4986 /* abso - abso. */ 4987 static void gen_abso(DisasContext *ctx) 4988 { 4989 TCGLabel *l1 = gen_new_label(); 4990 TCGLabel *l2 = gen_new_label(); 4991 TCGLabel *l3 = gen_new_label(); 4992 /* Start with XER OV disabled, the most likely case */ 4993 tcg_gen_movi_tl(cpu_ov, 0); 4994 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 4995 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 4996 tcg_gen_movi_tl(cpu_ov, 1); 4997 tcg_gen_movi_tl(cpu_so, 1); 4998 tcg_gen_br(l2); 4999 gen_set_label(l1); 5000 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5001 tcg_gen_br(l3); 5002 gen_set_label(l2); 5003 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5004 gen_set_label(l3); 5005 if (unlikely(Rc(ctx->opcode) != 0)) 5006 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5007 } 5008 5009 /* clcs */ 5010 static void gen_clcs(DisasContext *ctx) 5011 { 5012 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5013 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5014 tcg_temp_free_i32(t0); 5015 /* Rc=1 sets CR0 to an undefined state */ 5016 } 5017 5018 /* div - div. */ 5019 static void gen_div(DisasContext *ctx) 5020 { 5021 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5022 cpu_gpr[rB(ctx->opcode)]); 5023 if (unlikely(Rc(ctx->opcode) != 0)) 5024 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5025 } 5026 5027 /* divo - divo. */ 5028 static void gen_divo(DisasContext *ctx) 5029 { 5030 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5031 cpu_gpr[rB(ctx->opcode)]); 5032 if (unlikely(Rc(ctx->opcode) != 0)) 5033 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5034 } 5035 5036 /* divs - divs. */ 5037 static void gen_divs(DisasContext *ctx) 5038 { 5039 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5040 cpu_gpr[rB(ctx->opcode)]); 5041 if (unlikely(Rc(ctx->opcode) != 0)) 5042 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5043 } 5044 5045 /* divso - divso. */ 5046 static void gen_divso(DisasContext *ctx) 5047 { 5048 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5049 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5050 if (unlikely(Rc(ctx->opcode) != 0)) 5051 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5052 } 5053 5054 /* doz - doz. */ 5055 static void gen_doz(DisasContext *ctx) 5056 { 5057 TCGLabel *l1 = gen_new_label(); 5058 TCGLabel *l2 = gen_new_label(); 5059 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5060 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5061 tcg_gen_br(l2); 5062 gen_set_label(l1); 5063 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5064 gen_set_label(l2); 5065 if (unlikely(Rc(ctx->opcode) != 0)) 5066 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5067 } 5068 5069 /* dozo - dozo. */ 5070 static void gen_dozo(DisasContext *ctx) 5071 { 5072 TCGLabel *l1 = gen_new_label(); 5073 TCGLabel *l2 = gen_new_label(); 5074 TCGv t0 = tcg_temp_new(); 5075 TCGv t1 = tcg_temp_new(); 5076 TCGv t2 = tcg_temp_new(); 5077 /* Start with XER OV disabled, the most likely case */ 5078 tcg_gen_movi_tl(cpu_ov, 0); 5079 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5080 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5081 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5082 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5083 tcg_gen_andc_tl(t1, t1, t2); 5084 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5085 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5086 tcg_gen_movi_tl(cpu_ov, 1); 5087 tcg_gen_movi_tl(cpu_so, 1); 5088 tcg_gen_br(l2); 5089 gen_set_label(l1); 5090 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5091 gen_set_label(l2); 5092 tcg_temp_free(t0); 5093 tcg_temp_free(t1); 5094 tcg_temp_free(t2); 5095 if (unlikely(Rc(ctx->opcode) != 0)) 5096 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5097 } 5098 5099 /* dozi */ 5100 static void gen_dozi(DisasContext *ctx) 5101 { 5102 target_long simm = SIMM(ctx->opcode); 5103 TCGLabel *l1 = gen_new_label(); 5104 TCGLabel *l2 = gen_new_label(); 5105 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5106 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5107 tcg_gen_br(l2); 5108 gen_set_label(l1); 5109 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5110 gen_set_label(l2); 5111 if (unlikely(Rc(ctx->opcode) != 0)) 5112 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5113 } 5114 5115 /* lscbx - lscbx. */ 5116 static void gen_lscbx(DisasContext *ctx) 5117 { 5118 TCGv t0 = tcg_temp_new(); 5119 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5120 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5121 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5122 5123 gen_addr_reg_index(ctx, t0); 5124 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5125 tcg_temp_free_i32(t1); 5126 tcg_temp_free_i32(t2); 5127 tcg_temp_free_i32(t3); 5128 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5129 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5130 if (unlikely(Rc(ctx->opcode) != 0)) 5131 gen_set_Rc0(ctx, t0); 5132 tcg_temp_free(t0); 5133 } 5134 5135 /* maskg - maskg. */ 5136 static void gen_maskg(DisasContext *ctx) 5137 { 5138 TCGLabel *l1 = gen_new_label(); 5139 TCGv t0 = tcg_temp_new(); 5140 TCGv t1 = tcg_temp_new(); 5141 TCGv t2 = tcg_temp_new(); 5142 TCGv t3 = tcg_temp_new(); 5143 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5144 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5145 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5146 tcg_gen_addi_tl(t2, t0, 1); 5147 tcg_gen_shr_tl(t2, t3, t2); 5148 tcg_gen_shr_tl(t3, t3, t1); 5149 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5150 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5151 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5152 gen_set_label(l1); 5153 tcg_temp_free(t0); 5154 tcg_temp_free(t1); 5155 tcg_temp_free(t2); 5156 tcg_temp_free(t3); 5157 if (unlikely(Rc(ctx->opcode) != 0)) 5158 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5159 } 5160 5161 /* maskir - maskir. */ 5162 static void gen_maskir(DisasContext *ctx) 5163 { 5164 TCGv t0 = tcg_temp_new(); 5165 TCGv t1 = tcg_temp_new(); 5166 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5167 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5168 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5169 tcg_temp_free(t0); 5170 tcg_temp_free(t1); 5171 if (unlikely(Rc(ctx->opcode) != 0)) 5172 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5173 } 5174 5175 /* mul - mul. */ 5176 static void gen_mul(DisasContext *ctx) 5177 { 5178 TCGv_i64 t0 = tcg_temp_new_i64(); 5179 TCGv_i64 t1 = tcg_temp_new_i64(); 5180 TCGv t2 = tcg_temp_new(); 5181 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5182 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5183 tcg_gen_mul_i64(t0, t0, t1); 5184 tcg_gen_trunc_i64_tl(t2, t0); 5185 gen_store_spr(SPR_MQ, t2); 5186 tcg_gen_shri_i64(t1, t0, 32); 5187 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5188 tcg_temp_free_i64(t0); 5189 tcg_temp_free_i64(t1); 5190 tcg_temp_free(t2); 5191 if (unlikely(Rc(ctx->opcode) != 0)) 5192 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5193 } 5194 5195 /* mulo - mulo. */ 5196 static void gen_mulo(DisasContext *ctx) 5197 { 5198 TCGLabel *l1 = gen_new_label(); 5199 TCGv_i64 t0 = tcg_temp_new_i64(); 5200 TCGv_i64 t1 = tcg_temp_new_i64(); 5201 TCGv t2 = tcg_temp_new(); 5202 /* Start with XER OV disabled, the most likely case */ 5203 tcg_gen_movi_tl(cpu_ov, 0); 5204 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5205 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5206 tcg_gen_mul_i64(t0, t0, t1); 5207 tcg_gen_trunc_i64_tl(t2, t0); 5208 gen_store_spr(SPR_MQ, t2); 5209 tcg_gen_shri_i64(t1, t0, 32); 5210 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5211 tcg_gen_ext32s_i64(t1, t0); 5212 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5213 tcg_gen_movi_tl(cpu_ov, 1); 5214 tcg_gen_movi_tl(cpu_so, 1); 5215 gen_set_label(l1); 5216 tcg_temp_free_i64(t0); 5217 tcg_temp_free_i64(t1); 5218 tcg_temp_free(t2); 5219 if (unlikely(Rc(ctx->opcode) != 0)) 5220 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5221 } 5222 5223 /* nabs - nabs. */ 5224 static void gen_nabs(DisasContext *ctx) 5225 { 5226 TCGLabel *l1 = gen_new_label(); 5227 TCGLabel *l2 = gen_new_label(); 5228 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5229 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5230 tcg_gen_br(l2); 5231 gen_set_label(l1); 5232 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5233 gen_set_label(l2); 5234 if (unlikely(Rc(ctx->opcode) != 0)) 5235 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5236 } 5237 5238 /* nabso - nabso. */ 5239 static void gen_nabso(DisasContext *ctx) 5240 { 5241 TCGLabel *l1 = gen_new_label(); 5242 TCGLabel *l2 = gen_new_label(); 5243 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5244 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5245 tcg_gen_br(l2); 5246 gen_set_label(l1); 5247 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5248 gen_set_label(l2); 5249 /* nabs never overflows */ 5250 tcg_gen_movi_tl(cpu_ov, 0); 5251 if (unlikely(Rc(ctx->opcode) != 0)) 5252 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5253 } 5254 5255 /* rlmi - rlmi. */ 5256 static void gen_rlmi(DisasContext *ctx) 5257 { 5258 uint32_t mb = MB(ctx->opcode); 5259 uint32_t me = ME(ctx->opcode); 5260 TCGv t0 = tcg_temp_new(); 5261 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5262 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5263 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5264 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 5265 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5266 tcg_temp_free(t0); 5267 if (unlikely(Rc(ctx->opcode) != 0)) 5268 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5269 } 5270 5271 /* rrib - rrib. */ 5272 static void gen_rrib(DisasContext *ctx) 5273 { 5274 TCGv t0 = tcg_temp_new(); 5275 TCGv t1 = tcg_temp_new(); 5276 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5277 tcg_gen_movi_tl(t1, 0x80000000); 5278 tcg_gen_shr_tl(t1, t1, t0); 5279 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5280 tcg_gen_and_tl(t0, t0, t1); 5281 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5282 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5283 tcg_temp_free(t0); 5284 tcg_temp_free(t1); 5285 if (unlikely(Rc(ctx->opcode) != 0)) 5286 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5287 } 5288 5289 /* sle - sle. */ 5290 static void gen_sle(DisasContext *ctx) 5291 { 5292 TCGv t0 = tcg_temp_new(); 5293 TCGv t1 = tcg_temp_new(); 5294 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5295 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5296 tcg_gen_subfi_tl(t1, 32, t1); 5297 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5298 tcg_gen_or_tl(t1, t0, t1); 5299 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5300 gen_store_spr(SPR_MQ, t1); 5301 tcg_temp_free(t0); 5302 tcg_temp_free(t1); 5303 if (unlikely(Rc(ctx->opcode) != 0)) 5304 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5305 } 5306 5307 /* sleq - sleq. */ 5308 static void gen_sleq(DisasContext *ctx) 5309 { 5310 TCGv t0 = tcg_temp_new(); 5311 TCGv t1 = tcg_temp_new(); 5312 TCGv t2 = tcg_temp_new(); 5313 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5314 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 5315 tcg_gen_shl_tl(t2, t2, t0); 5316 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5317 gen_load_spr(t1, SPR_MQ); 5318 gen_store_spr(SPR_MQ, t0); 5319 tcg_gen_and_tl(t0, t0, t2); 5320 tcg_gen_andc_tl(t1, t1, t2); 5321 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5322 tcg_temp_free(t0); 5323 tcg_temp_free(t1); 5324 tcg_temp_free(t2); 5325 if (unlikely(Rc(ctx->opcode) != 0)) 5326 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5327 } 5328 5329 /* sliq - sliq. */ 5330 static void gen_sliq(DisasContext *ctx) 5331 { 5332 int sh = SH(ctx->opcode); 5333 TCGv t0 = tcg_temp_new(); 5334 TCGv t1 = tcg_temp_new(); 5335 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5336 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5337 tcg_gen_or_tl(t1, t0, t1); 5338 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5339 gen_store_spr(SPR_MQ, t1); 5340 tcg_temp_free(t0); 5341 tcg_temp_free(t1); 5342 if (unlikely(Rc(ctx->opcode) != 0)) 5343 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5344 } 5345 5346 /* slliq - slliq. */ 5347 static void gen_slliq(DisasContext *ctx) 5348 { 5349 int sh = SH(ctx->opcode); 5350 TCGv t0 = tcg_temp_new(); 5351 TCGv t1 = tcg_temp_new(); 5352 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5353 gen_load_spr(t1, SPR_MQ); 5354 gen_store_spr(SPR_MQ, t0); 5355 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 5356 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 5357 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5358 tcg_temp_free(t0); 5359 tcg_temp_free(t1); 5360 if (unlikely(Rc(ctx->opcode) != 0)) 5361 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5362 } 5363 5364 /* sllq - sllq. */ 5365 static void gen_sllq(DisasContext *ctx) 5366 { 5367 TCGLabel *l1 = gen_new_label(); 5368 TCGLabel *l2 = gen_new_label(); 5369 TCGv t0 = tcg_temp_local_new(); 5370 TCGv t1 = tcg_temp_local_new(); 5371 TCGv t2 = tcg_temp_local_new(); 5372 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5373 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5374 tcg_gen_shl_tl(t1, t1, t2); 5375 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5376 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5377 gen_load_spr(t0, SPR_MQ); 5378 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5379 tcg_gen_br(l2); 5380 gen_set_label(l1); 5381 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5382 gen_load_spr(t2, SPR_MQ); 5383 tcg_gen_andc_tl(t1, t2, t1); 5384 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5385 gen_set_label(l2); 5386 tcg_temp_free(t0); 5387 tcg_temp_free(t1); 5388 tcg_temp_free(t2); 5389 if (unlikely(Rc(ctx->opcode) != 0)) 5390 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5391 } 5392 5393 /* slq - slq. */ 5394 static void gen_slq(DisasContext *ctx) 5395 { 5396 TCGLabel *l1 = gen_new_label(); 5397 TCGv t0 = tcg_temp_new(); 5398 TCGv t1 = tcg_temp_new(); 5399 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5400 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5401 tcg_gen_subfi_tl(t1, 32, t1); 5402 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5403 tcg_gen_or_tl(t1, t0, t1); 5404 gen_store_spr(SPR_MQ, t1); 5405 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5406 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5407 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5408 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5409 gen_set_label(l1); 5410 tcg_temp_free(t0); 5411 tcg_temp_free(t1); 5412 if (unlikely(Rc(ctx->opcode) != 0)) 5413 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5414 } 5415 5416 /* sraiq - sraiq. */ 5417 static void gen_sraiq(DisasContext *ctx) 5418 { 5419 int sh = SH(ctx->opcode); 5420 TCGLabel *l1 = gen_new_label(); 5421 TCGv t0 = tcg_temp_new(); 5422 TCGv t1 = tcg_temp_new(); 5423 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5424 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5425 tcg_gen_or_tl(t0, t0, t1); 5426 gen_store_spr(SPR_MQ, t0); 5427 tcg_gen_movi_tl(cpu_ca, 0); 5428 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5429 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5430 tcg_gen_movi_tl(cpu_ca, 1); 5431 gen_set_label(l1); 5432 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5433 tcg_temp_free(t0); 5434 tcg_temp_free(t1); 5435 if (unlikely(Rc(ctx->opcode) != 0)) 5436 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5437 } 5438 5439 /* sraq - sraq. */ 5440 static void gen_sraq(DisasContext *ctx) 5441 { 5442 TCGLabel *l1 = gen_new_label(); 5443 TCGLabel *l2 = gen_new_label(); 5444 TCGv t0 = tcg_temp_new(); 5445 TCGv t1 = tcg_temp_local_new(); 5446 TCGv t2 = tcg_temp_local_new(); 5447 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5448 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5449 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5450 tcg_gen_subfi_tl(t2, 32, t2); 5451 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5452 tcg_gen_or_tl(t0, t0, t2); 5453 gen_store_spr(SPR_MQ, t0); 5454 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5455 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5456 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5457 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5458 gen_set_label(l1); 5459 tcg_temp_free(t0); 5460 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5461 tcg_gen_movi_tl(cpu_ca, 0); 5462 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5463 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5464 tcg_gen_movi_tl(cpu_ca, 1); 5465 gen_set_label(l2); 5466 tcg_temp_free(t1); 5467 tcg_temp_free(t2); 5468 if (unlikely(Rc(ctx->opcode) != 0)) 5469 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5470 } 5471 5472 /* sre - sre. */ 5473 static void gen_sre(DisasContext *ctx) 5474 { 5475 TCGv t0 = tcg_temp_new(); 5476 TCGv t1 = tcg_temp_new(); 5477 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5478 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5479 tcg_gen_subfi_tl(t1, 32, t1); 5480 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5481 tcg_gen_or_tl(t1, t0, t1); 5482 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5483 gen_store_spr(SPR_MQ, t1); 5484 tcg_temp_free(t0); 5485 tcg_temp_free(t1); 5486 if (unlikely(Rc(ctx->opcode) != 0)) 5487 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5488 } 5489 5490 /* srea - srea. */ 5491 static void gen_srea(DisasContext *ctx) 5492 { 5493 TCGv t0 = tcg_temp_new(); 5494 TCGv t1 = tcg_temp_new(); 5495 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5496 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5497 gen_store_spr(SPR_MQ, t0); 5498 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5499 tcg_temp_free(t0); 5500 tcg_temp_free(t1); 5501 if (unlikely(Rc(ctx->opcode) != 0)) 5502 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5503 } 5504 5505 /* sreq */ 5506 static void gen_sreq(DisasContext *ctx) 5507 { 5508 TCGv t0 = tcg_temp_new(); 5509 TCGv t1 = tcg_temp_new(); 5510 TCGv t2 = tcg_temp_new(); 5511 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5512 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5513 tcg_gen_shr_tl(t1, t1, t0); 5514 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5515 gen_load_spr(t2, SPR_MQ); 5516 gen_store_spr(SPR_MQ, t0); 5517 tcg_gen_and_tl(t0, t0, t1); 5518 tcg_gen_andc_tl(t2, t2, t1); 5519 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5520 tcg_temp_free(t0); 5521 tcg_temp_free(t1); 5522 tcg_temp_free(t2); 5523 if (unlikely(Rc(ctx->opcode) != 0)) 5524 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5525 } 5526 5527 /* sriq */ 5528 static void gen_sriq(DisasContext *ctx) 5529 { 5530 int sh = SH(ctx->opcode); 5531 TCGv t0 = tcg_temp_new(); 5532 TCGv t1 = tcg_temp_new(); 5533 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5534 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5535 tcg_gen_or_tl(t1, t0, t1); 5536 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5537 gen_store_spr(SPR_MQ, t1); 5538 tcg_temp_free(t0); 5539 tcg_temp_free(t1); 5540 if (unlikely(Rc(ctx->opcode) != 0)) 5541 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5542 } 5543 5544 /* srliq */ 5545 static void gen_srliq(DisasContext *ctx) 5546 { 5547 int sh = SH(ctx->opcode); 5548 TCGv t0 = tcg_temp_new(); 5549 TCGv t1 = tcg_temp_new(); 5550 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5551 gen_load_spr(t1, SPR_MQ); 5552 gen_store_spr(SPR_MQ, t0); 5553 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5554 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5555 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5556 tcg_temp_free(t0); 5557 tcg_temp_free(t1); 5558 if (unlikely(Rc(ctx->opcode) != 0)) 5559 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5560 } 5561 5562 /* srlq */ 5563 static void gen_srlq(DisasContext *ctx) 5564 { 5565 TCGLabel *l1 = gen_new_label(); 5566 TCGLabel *l2 = gen_new_label(); 5567 TCGv t0 = tcg_temp_local_new(); 5568 TCGv t1 = tcg_temp_local_new(); 5569 TCGv t2 = tcg_temp_local_new(); 5570 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5571 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5572 tcg_gen_shr_tl(t2, t1, t2); 5573 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5574 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5575 gen_load_spr(t0, SPR_MQ); 5576 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5577 tcg_gen_br(l2); 5578 gen_set_label(l1); 5579 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5580 tcg_gen_and_tl(t0, t0, t2); 5581 gen_load_spr(t1, SPR_MQ); 5582 tcg_gen_andc_tl(t1, t1, t2); 5583 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5584 gen_set_label(l2); 5585 tcg_temp_free(t0); 5586 tcg_temp_free(t1); 5587 tcg_temp_free(t2); 5588 if (unlikely(Rc(ctx->opcode) != 0)) 5589 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5590 } 5591 5592 /* srq */ 5593 static void gen_srq(DisasContext *ctx) 5594 { 5595 TCGLabel *l1 = gen_new_label(); 5596 TCGv t0 = tcg_temp_new(); 5597 TCGv t1 = tcg_temp_new(); 5598 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5599 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5600 tcg_gen_subfi_tl(t1, 32, t1); 5601 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5602 tcg_gen_or_tl(t1, t0, t1); 5603 gen_store_spr(SPR_MQ, t1); 5604 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5605 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5606 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5607 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5608 gen_set_label(l1); 5609 tcg_temp_free(t0); 5610 tcg_temp_free(t1); 5611 if (unlikely(Rc(ctx->opcode) != 0)) 5612 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5613 } 5614 5615 /* PowerPC 602 specific instructions */ 5616 5617 /* dsa */ 5618 static void gen_dsa(DisasContext *ctx) 5619 { 5620 /* XXX: TODO */ 5621 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5622 } 5623 5624 /* esa */ 5625 static void gen_esa(DisasContext *ctx) 5626 { 5627 /* XXX: TODO */ 5628 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5629 } 5630 5631 /* mfrom */ 5632 static void gen_mfrom(DisasContext *ctx) 5633 { 5634 #if defined(CONFIG_USER_ONLY) 5635 GEN_PRIV; 5636 #else 5637 CHK_SV; 5638 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5639 #endif /* defined(CONFIG_USER_ONLY) */ 5640 } 5641 5642 /* 602 - 603 - G2 TLB management */ 5643 5644 /* tlbld */ 5645 static void gen_tlbld_6xx(DisasContext *ctx) 5646 { 5647 #if defined(CONFIG_USER_ONLY) 5648 GEN_PRIV; 5649 #else 5650 CHK_SV; 5651 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5652 #endif /* defined(CONFIG_USER_ONLY) */ 5653 } 5654 5655 /* tlbli */ 5656 static void gen_tlbli_6xx(DisasContext *ctx) 5657 { 5658 #if defined(CONFIG_USER_ONLY) 5659 GEN_PRIV; 5660 #else 5661 CHK_SV; 5662 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5663 #endif /* defined(CONFIG_USER_ONLY) */ 5664 } 5665 5666 /* 74xx TLB management */ 5667 5668 /* tlbld */ 5669 static void gen_tlbld_74xx(DisasContext *ctx) 5670 { 5671 #if defined(CONFIG_USER_ONLY) 5672 GEN_PRIV; 5673 #else 5674 CHK_SV; 5675 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5676 #endif /* defined(CONFIG_USER_ONLY) */ 5677 } 5678 5679 /* tlbli */ 5680 static void gen_tlbli_74xx(DisasContext *ctx) 5681 { 5682 #if defined(CONFIG_USER_ONLY) 5683 GEN_PRIV; 5684 #else 5685 CHK_SV; 5686 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5687 #endif /* defined(CONFIG_USER_ONLY) */ 5688 } 5689 5690 /* POWER instructions not in PowerPC 601 */ 5691 5692 /* clf */ 5693 static void gen_clf(DisasContext *ctx) 5694 { 5695 /* Cache line flush: implemented as no-op */ 5696 } 5697 5698 /* cli */ 5699 static void gen_cli(DisasContext *ctx) 5700 { 5701 #if defined(CONFIG_USER_ONLY) 5702 GEN_PRIV; 5703 #else 5704 /* Cache line invalidate: privileged and treated as no-op */ 5705 CHK_SV; 5706 #endif /* defined(CONFIG_USER_ONLY) */ 5707 } 5708 5709 /* dclst */ 5710 static void gen_dclst(DisasContext *ctx) 5711 { 5712 /* Data cache line store: treated as no-op */ 5713 } 5714 5715 static void gen_mfsri(DisasContext *ctx) 5716 { 5717 #if defined(CONFIG_USER_ONLY) 5718 GEN_PRIV; 5719 #else 5720 int ra = rA(ctx->opcode); 5721 int rd = rD(ctx->opcode); 5722 TCGv t0; 5723 5724 CHK_SV; 5725 t0 = tcg_temp_new(); 5726 gen_addr_reg_index(ctx, t0); 5727 tcg_gen_extract_tl(t0, t0, 28, 4); 5728 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5729 tcg_temp_free(t0); 5730 if (ra != 0 && ra != rd) 5731 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5732 #endif /* defined(CONFIG_USER_ONLY) */ 5733 } 5734 5735 static void gen_rac(DisasContext *ctx) 5736 { 5737 #if defined(CONFIG_USER_ONLY) 5738 GEN_PRIV; 5739 #else 5740 TCGv t0; 5741 5742 CHK_SV; 5743 t0 = tcg_temp_new(); 5744 gen_addr_reg_index(ctx, t0); 5745 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5746 tcg_temp_free(t0); 5747 #endif /* defined(CONFIG_USER_ONLY) */ 5748 } 5749 5750 static void gen_rfsvc(DisasContext *ctx) 5751 { 5752 #if defined(CONFIG_USER_ONLY) 5753 GEN_PRIV; 5754 #else 5755 CHK_SV; 5756 5757 gen_helper_rfsvc(cpu_env); 5758 gen_sync_exception(ctx); 5759 #endif /* defined(CONFIG_USER_ONLY) */ 5760 } 5761 5762 /* svc is not implemented for now */ 5763 5764 /* BookE specific instructions */ 5765 5766 /* XXX: not implemented on 440 ? */ 5767 static void gen_mfapidi(DisasContext *ctx) 5768 { 5769 /* XXX: TODO */ 5770 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5771 } 5772 5773 /* XXX: not implemented on 440 ? */ 5774 static void gen_tlbiva(DisasContext *ctx) 5775 { 5776 #if defined(CONFIG_USER_ONLY) 5777 GEN_PRIV; 5778 #else 5779 TCGv t0; 5780 5781 CHK_SV; 5782 t0 = tcg_temp_new(); 5783 gen_addr_reg_index(ctx, t0); 5784 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5785 tcg_temp_free(t0); 5786 #endif /* defined(CONFIG_USER_ONLY) */ 5787 } 5788 5789 /* All 405 MAC instructions are translated here */ 5790 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5791 int ra, int rb, int rt, int Rc) 5792 { 5793 TCGv t0, t1; 5794 5795 t0 = tcg_temp_local_new(); 5796 t1 = tcg_temp_local_new(); 5797 5798 switch (opc3 & 0x0D) { 5799 case 0x05: 5800 /* macchw - macchw. - macchwo - macchwo. */ 5801 /* macchws - macchws. - macchwso - macchwso. */ 5802 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5803 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5804 /* mulchw - mulchw. */ 5805 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5806 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5807 tcg_gen_ext16s_tl(t1, t1); 5808 break; 5809 case 0x04: 5810 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5811 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5812 /* mulchwu - mulchwu. */ 5813 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5814 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5815 tcg_gen_ext16u_tl(t1, t1); 5816 break; 5817 case 0x01: 5818 /* machhw - machhw. - machhwo - machhwo. */ 5819 /* machhws - machhws. - machhwso - machhwso. */ 5820 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5821 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5822 /* mulhhw - mulhhw. */ 5823 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5824 tcg_gen_ext16s_tl(t0, t0); 5825 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5826 tcg_gen_ext16s_tl(t1, t1); 5827 break; 5828 case 0x00: 5829 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5830 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5831 /* mulhhwu - mulhhwu. */ 5832 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5833 tcg_gen_ext16u_tl(t0, t0); 5834 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5835 tcg_gen_ext16u_tl(t1, t1); 5836 break; 5837 case 0x0D: 5838 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5839 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5840 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5841 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5842 /* mullhw - mullhw. */ 5843 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5844 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5845 break; 5846 case 0x0C: 5847 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5848 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5849 /* mullhwu - mullhwu. */ 5850 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5851 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5852 break; 5853 } 5854 if (opc2 & 0x04) { 5855 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5856 tcg_gen_mul_tl(t1, t0, t1); 5857 if (opc2 & 0x02) { 5858 /* nmultiply-and-accumulate (0x0E) */ 5859 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5860 } else { 5861 /* multiply-and-accumulate (0x0C) */ 5862 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5863 } 5864 5865 if (opc3 & 0x12) { 5866 /* Check overflow and/or saturate */ 5867 TCGLabel *l1 = gen_new_label(); 5868 5869 if (opc3 & 0x10) { 5870 /* Start with XER OV disabled, the most likely case */ 5871 tcg_gen_movi_tl(cpu_ov, 0); 5872 } 5873 if (opc3 & 0x01) { 5874 /* Signed */ 5875 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5876 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5877 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5878 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5879 if (opc3 & 0x02) { 5880 /* Saturate */ 5881 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5882 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5883 } 5884 } else { 5885 /* Unsigned */ 5886 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5887 if (opc3 & 0x02) { 5888 /* Saturate */ 5889 tcg_gen_movi_tl(t0, UINT32_MAX); 5890 } 5891 } 5892 if (opc3 & 0x10) { 5893 /* Check overflow */ 5894 tcg_gen_movi_tl(cpu_ov, 1); 5895 tcg_gen_movi_tl(cpu_so, 1); 5896 } 5897 gen_set_label(l1); 5898 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5899 } 5900 } else { 5901 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5902 } 5903 tcg_temp_free(t0); 5904 tcg_temp_free(t1); 5905 if (unlikely(Rc) != 0) { 5906 /* Update Rc0 */ 5907 gen_set_Rc0(ctx, cpu_gpr[rt]); 5908 } 5909 } 5910 5911 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5912 static void glue(gen_, name)(DisasContext *ctx) \ 5913 { \ 5914 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5915 rD(ctx->opcode), Rc(ctx->opcode)); \ 5916 } 5917 5918 /* macchw - macchw. */ 5919 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5920 /* macchwo - macchwo. */ 5921 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5922 /* macchws - macchws. */ 5923 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5924 /* macchwso - macchwso. */ 5925 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5926 /* macchwsu - macchwsu. */ 5927 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5928 /* macchwsuo - macchwsuo. */ 5929 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5930 /* macchwu - macchwu. */ 5931 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5932 /* macchwuo - macchwuo. */ 5933 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5934 /* machhw - machhw. */ 5935 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5936 /* machhwo - machhwo. */ 5937 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5938 /* machhws - machhws. */ 5939 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5940 /* machhwso - machhwso. */ 5941 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5942 /* machhwsu - machhwsu. */ 5943 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5944 /* machhwsuo - machhwsuo. */ 5945 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5946 /* machhwu - machhwu. */ 5947 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5948 /* machhwuo - machhwuo. */ 5949 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5950 /* maclhw - maclhw. */ 5951 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5952 /* maclhwo - maclhwo. */ 5953 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5954 /* maclhws - maclhws. */ 5955 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5956 /* maclhwso - maclhwso. */ 5957 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 5958 /* maclhwu - maclhwu. */ 5959 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 5960 /* maclhwuo - maclhwuo. */ 5961 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 5962 /* maclhwsu - maclhwsu. */ 5963 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 5964 /* maclhwsuo - maclhwsuo. */ 5965 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 5966 /* nmacchw - nmacchw. */ 5967 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 5968 /* nmacchwo - nmacchwo. */ 5969 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 5970 /* nmacchws - nmacchws. */ 5971 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 5972 /* nmacchwso - nmacchwso. */ 5973 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 5974 /* nmachhw - nmachhw. */ 5975 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 5976 /* nmachhwo - nmachhwo. */ 5977 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 5978 /* nmachhws - nmachhws. */ 5979 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 5980 /* nmachhwso - nmachhwso. */ 5981 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 5982 /* nmaclhw - nmaclhw. */ 5983 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 5984 /* nmaclhwo - nmaclhwo. */ 5985 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 5986 /* nmaclhws - nmaclhws. */ 5987 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 5988 /* nmaclhwso - nmaclhwso. */ 5989 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 5990 5991 /* mulchw - mulchw. */ 5992 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 5993 /* mulchwu - mulchwu. */ 5994 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 5995 /* mulhhw - mulhhw. */ 5996 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 5997 /* mulhhwu - mulhhwu. */ 5998 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 5999 /* mullhw - mullhw. */ 6000 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 6001 /* mullhwu - mullhwu. */ 6002 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 6003 6004 /* mfdcr */ 6005 static void gen_mfdcr(DisasContext *ctx) 6006 { 6007 #if defined(CONFIG_USER_ONLY) 6008 GEN_PRIV; 6009 #else 6010 TCGv dcrn; 6011 6012 CHK_SV; 6013 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6014 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6015 tcg_temp_free(dcrn); 6016 #endif /* defined(CONFIG_USER_ONLY) */ 6017 } 6018 6019 /* mtdcr */ 6020 static void gen_mtdcr(DisasContext *ctx) 6021 { 6022 #if defined(CONFIG_USER_ONLY) 6023 GEN_PRIV; 6024 #else 6025 TCGv dcrn; 6026 6027 CHK_SV; 6028 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6029 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6030 tcg_temp_free(dcrn); 6031 #endif /* defined(CONFIG_USER_ONLY) */ 6032 } 6033 6034 /* mfdcrx */ 6035 /* XXX: not implemented on 440 ? */ 6036 static void gen_mfdcrx(DisasContext *ctx) 6037 { 6038 #if defined(CONFIG_USER_ONLY) 6039 GEN_PRIV; 6040 #else 6041 CHK_SV; 6042 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6043 cpu_gpr[rA(ctx->opcode)]); 6044 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6045 #endif /* defined(CONFIG_USER_ONLY) */ 6046 } 6047 6048 /* mtdcrx */ 6049 /* XXX: not implemented on 440 ? */ 6050 static void gen_mtdcrx(DisasContext *ctx) 6051 { 6052 #if defined(CONFIG_USER_ONLY) 6053 GEN_PRIV; 6054 #else 6055 CHK_SV; 6056 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6057 cpu_gpr[rS(ctx->opcode)]); 6058 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6059 #endif /* defined(CONFIG_USER_ONLY) */ 6060 } 6061 6062 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6063 static void gen_mfdcrux(DisasContext *ctx) 6064 { 6065 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6066 cpu_gpr[rA(ctx->opcode)]); 6067 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6068 } 6069 6070 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6071 static void gen_mtdcrux(DisasContext *ctx) 6072 { 6073 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6074 cpu_gpr[rS(ctx->opcode)]); 6075 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6076 } 6077 6078 /* dccci */ 6079 static void gen_dccci(DisasContext *ctx) 6080 { 6081 CHK_SV; 6082 /* interpreted as no-op */ 6083 } 6084 6085 /* dcread */ 6086 static void gen_dcread(DisasContext *ctx) 6087 { 6088 #if defined(CONFIG_USER_ONLY) 6089 GEN_PRIV; 6090 #else 6091 TCGv EA, val; 6092 6093 CHK_SV; 6094 gen_set_access_type(ctx, ACCESS_CACHE); 6095 EA = tcg_temp_new(); 6096 gen_addr_reg_index(ctx, EA); 6097 val = tcg_temp_new(); 6098 gen_qemu_ld32u(ctx, val, EA); 6099 tcg_temp_free(val); 6100 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6101 tcg_temp_free(EA); 6102 #endif /* defined(CONFIG_USER_ONLY) */ 6103 } 6104 6105 /* icbt */ 6106 static void gen_icbt_40x(DisasContext *ctx) 6107 { 6108 /* interpreted as no-op */ 6109 /* XXX: specification say this is treated as a load by the MMU 6110 * but does not generate any exception 6111 */ 6112 } 6113 6114 /* iccci */ 6115 static void gen_iccci(DisasContext *ctx) 6116 { 6117 CHK_SV; 6118 /* interpreted as no-op */ 6119 } 6120 6121 /* icread */ 6122 static void gen_icread(DisasContext *ctx) 6123 { 6124 CHK_SV; 6125 /* interpreted as no-op */ 6126 } 6127 6128 /* rfci (supervisor only) */ 6129 static void gen_rfci_40x(DisasContext *ctx) 6130 { 6131 #if defined(CONFIG_USER_ONLY) 6132 GEN_PRIV; 6133 #else 6134 CHK_SV; 6135 /* Restore CPU state */ 6136 gen_helper_40x_rfci(cpu_env); 6137 gen_sync_exception(ctx); 6138 #endif /* defined(CONFIG_USER_ONLY) */ 6139 } 6140 6141 static void gen_rfci(DisasContext *ctx) 6142 { 6143 #if defined(CONFIG_USER_ONLY) 6144 GEN_PRIV; 6145 #else 6146 CHK_SV; 6147 /* Restore CPU state */ 6148 gen_helper_rfci(cpu_env); 6149 gen_sync_exception(ctx); 6150 #endif /* defined(CONFIG_USER_ONLY) */ 6151 } 6152 6153 /* BookE specific */ 6154 6155 /* XXX: not implemented on 440 ? */ 6156 static void gen_rfdi(DisasContext *ctx) 6157 { 6158 #if defined(CONFIG_USER_ONLY) 6159 GEN_PRIV; 6160 #else 6161 CHK_SV; 6162 /* Restore CPU state */ 6163 gen_helper_rfdi(cpu_env); 6164 gen_sync_exception(ctx); 6165 #endif /* defined(CONFIG_USER_ONLY) */ 6166 } 6167 6168 /* XXX: not implemented on 440 ? */ 6169 static void gen_rfmci(DisasContext *ctx) 6170 { 6171 #if defined(CONFIG_USER_ONLY) 6172 GEN_PRIV; 6173 #else 6174 CHK_SV; 6175 /* Restore CPU state */ 6176 gen_helper_rfmci(cpu_env); 6177 gen_sync_exception(ctx); 6178 #endif /* defined(CONFIG_USER_ONLY) */ 6179 } 6180 6181 /* TLB management - PowerPC 405 implementation */ 6182 6183 /* tlbre */ 6184 static void gen_tlbre_40x(DisasContext *ctx) 6185 { 6186 #if defined(CONFIG_USER_ONLY) 6187 GEN_PRIV; 6188 #else 6189 CHK_SV; 6190 switch (rB(ctx->opcode)) { 6191 case 0: 6192 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6193 cpu_gpr[rA(ctx->opcode)]); 6194 break; 6195 case 1: 6196 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6197 cpu_gpr[rA(ctx->opcode)]); 6198 break; 6199 default: 6200 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6201 break; 6202 } 6203 #endif /* defined(CONFIG_USER_ONLY) */ 6204 } 6205 6206 /* tlbsx - tlbsx. */ 6207 static void gen_tlbsx_40x(DisasContext *ctx) 6208 { 6209 #if defined(CONFIG_USER_ONLY) 6210 GEN_PRIV; 6211 #else 6212 TCGv t0; 6213 6214 CHK_SV; 6215 t0 = tcg_temp_new(); 6216 gen_addr_reg_index(ctx, t0); 6217 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6218 tcg_temp_free(t0); 6219 if (Rc(ctx->opcode)) { 6220 TCGLabel *l1 = gen_new_label(); 6221 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6222 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6223 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6224 gen_set_label(l1); 6225 } 6226 #endif /* defined(CONFIG_USER_ONLY) */ 6227 } 6228 6229 /* tlbwe */ 6230 static void gen_tlbwe_40x(DisasContext *ctx) 6231 { 6232 #if defined(CONFIG_USER_ONLY) 6233 GEN_PRIV; 6234 #else 6235 CHK_SV; 6236 6237 switch (rB(ctx->opcode)) { 6238 case 0: 6239 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6240 cpu_gpr[rS(ctx->opcode)]); 6241 break; 6242 case 1: 6243 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6244 cpu_gpr[rS(ctx->opcode)]); 6245 break; 6246 default: 6247 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6248 break; 6249 } 6250 #endif /* defined(CONFIG_USER_ONLY) */ 6251 } 6252 6253 /* TLB management - PowerPC 440 implementation */ 6254 6255 /* tlbre */ 6256 static void gen_tlbre_440(DisasContext *ctx) 6257 { 6258 #if defined(CONFIG_USER_ONLY) 6259 GEN_PRIV; 6260 #else 6261 CHK_SV; 6262 6263 switch (rB(ctx->opcode)) { 6264 case 0: 6265 case 1: 6266 case 2: 6267 { 6268 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6269 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6270 t0, cpu_gpr[rA(ctx->opcode)]); 6271 tcg_temp_free_i32(t0); 6272 } 6273 break; 6274 default: 6275 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6276 break; 6277 } 6278 #endif /* defined(CONFIG_USER_ONLY) */ 6279 } 6280 6281 /* tlbsx - tlbsx. */ 6282 static void gen_tlbsx_440(DisasContext *ctx) 6283 { 6284 #if defined(CONFIG_USER_ONLY) 6285 GEN_PRIV; 6286 #else 6287 TCGv t0; 6288 6289 CHK_SV; 6290 t0 = tcg_temp_new(); 6291 gen_addr_reg_index(ctx, t0); 6292 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6293 tcg_temp_free(t0); 6294 if (Rc(ctx->opcode)) { 6295 TCGLabel *l1 = gen_new_label(); 6296 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6297 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6298 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6299 gen_set_label(l1); 6300 } 6301 #endif /* defined(CONFIG_USER_ONLY) */ 6302 } 6303 6304 /* tlbwe */ 6305 static void gen_tlbwe_440(DisasContext *ctx) 6306 { 6307 #if defined(CONFIG_USER_ONLY) 6308 GEN_PRIV; 6309 #else 6310 CHK_SV; 6311 switch (rB(ctx->opcode)) { 6312 case 0: 6313 case 1: 6314 case 2: 6315 { 6316 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6317 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 6318 cpu_gpr[rS(ctx->opcode)]); 6319 tcg_temp_free_i32(t0); 6320 } 6321 break; 6322 default: 6323 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6324 break; 6325 } 6326 #endif /* defined(CONFIG_USER_ONLY) */ 6327 } 6328 6329 /* TLB management - PowerPC BookE 2.06 implementation */ 6330 6331 /* tlbre */ 6332 static void gen_tlbre_booke206(DisasContext *ctx) 6333 { 6334 #if defined(CONFIG_USER_ONLY) 6335 GEN_PRIV; 6336 #else 6337 CHK_SV; 6338 gen_helper_booke206_tlbre(cpu_env); 6339 #endif /* defined(CONFIG_USER_ONLY) */ 6340 } 6341 6342 /* tlbsx - tlbsx. */ 6343 static void gen_tlbsx_booke206(DisasContext *ctx) 6344 { 6345 #if defined(CONFIG_USER_ONLY) 6346 GEN_PRIV; 6347 #else 6348 TCGv t0; 6349 6350 CHK_SV; 6351 if (rA(ctx->opcode)) { 6352 t0 = tcg_temp_new(); 6353 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6354 } else { 6355 t0 = tcg_const_tl(0); 6356 } 6357 6358 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6359 gen_helper_booke206_tlbsx(cpu_env, t0); 6360 tcg_temp_free(t0); 6361 #endif /* defined(CONFIG_USER_ONLY) */ 6362 } 6363 6364 /* tlbwe */ 6365 static void gen_tlbwe_booke206(DisasContext *ctx) 6366 { 6367 #if defined(CONFIG_USER_ONLY) 6368 GEN_PRIV; 6369 #else 6370 CHK_SV; 6371 gen_helper_booke206_tlbwe(cpu_env); 6372 #endif /* defined(CONFIG_USER_ONLY) */ 6373 } 6374 6375 static void gen_tlbivax_booke206(DisasContext *ctx) 6376 { 6377 #if defined(CONFIG_USER_ONLY) 6378 GEN_PRIV; 6379 #else 6380 TCGv t0; 6381 6382 CHK_SV; 6383 t0 = tcg_temp_new(); 6384 gen_addr_reg_index(ctx, t0); 6385 gen_helper_booke206_tlbivax(cpu_env, t0); 6386 tcg_temp_free(t0); 6387 #endif /* defined(CONFIG_USER_ONLY) */ 6388 } 6389 6390 static void gen_tlbilx_booke206(DisasContext *ctx) 6391 { 6392 #if defined(CONFIG_USER_ONLY) 6393 GEN_PRIV; 6394 #else 6395 TCGv t0; 6396 6397 CHK_SV; 6398 t0 = tcg_temp_new(); 6399 gen_addr_reg_index(ctx, t0); 6400 6401 switch((ctx->opcode >> 21) & 0x3) { 6402 case 0: 6403 gen_helper_booke206_tlbilx0(cpu_env, t0); 6404 break; 6405 case 1: 6406 gen_helper_booke206_tlbilx1(cpu_env, t0); 6407 break; 6408 case 3: 6409 gen_helper_booke206_tlbilx3(cpu_env, t0); 6410 break; 6411 default: 6412 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6413 break; 6414 } 6415 6416 tcg_temp_free(t0); 6417 #endif /* defined(CONFIG_USER_ONLY) */ 6418 } 6419 6420 6421 /* wrtee */ 6422 static void gen_wrtee(DisasContext *ctx) 6423 { 6424 #if defined(CONFIG_USER_ONLY) 6425 GEN_PRIV; 6426 #else 6427 TCGv t0; 6428 6429 CHK_SV; 6430 t0 = tcg_temp_new(); 6431 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6432 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6433 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6434 tcg_temp_free(t0); 6435 /* Stop translation to have a chance to raise an exception 6436 * if we just set msr_ee to 1 6437 */ 6438 gen_stop_exception(ctx); 6439 #endif /* defined(CONFIG_USER_ONLY) */ 6440 } 6441 6442 /* wrteei */ 6443 static void gen_wrteei(DisasContext *ctx) 6444 { 6445 #if defined(CONFIG_USER_ONLY) 6446 GEN_PRIV; 6447 #else 6448 CHK_SV; 6449 if (ctx->opcode & 0x00008000) { 6450 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6451 /* Stop translation to have a chance to raise an exception */ 6452 gen_stop_exception(ctx); 6453 } else { 6454 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6455 } 6456 #endif /* defined(CONFIG_USER_ONLY) */ 6457 } 6458 6459 /* PowerPC 440 specific instructions */ 6460 6461 /* dlmzb */ 6462 static void gen_dlmzb(DisasContext *ctx) 6463 { 6464 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6465 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6466 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6467 tcg_temp_free_i32(t0); 6468 } 6469 6470 /* mbar replaces eieio on 440 */ 6471 static void gen_mbar(DisasContext *ctx) 6472 { 6473 /* interpreted as no-op */ 6474 } 6475 6476 /* msync replaces sync on 440 */ 6477 static void gen_msync_4xx(DisasContext *ctx) 6478 { 6479 /* interpreted as no-op */ 6480 } 6481 6482 /* icbt */ 6483 static void gen_icbt_440(DisasContext *ctx) 6484 { 6485 /* interpreted as no-op */ 6486 /* XXX: specification say this is treated as a load by the MMU 6487 * but does not generate any exception 6488 */ 6489 } 6490 6491 /* Embedded.Processor Control */ 6492 6493 static void gen_msgclr(DisasContext *ctx) 6494 { 6495 #if defined(CONFIG_USER_ONLY) 6496 GEN_PRIV; 6497 #else 6498 CHK_HV; 6499 /* 64-bit server processors compliant with arch 2.x */ 6500 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6501 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6502 } else { 6503 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6504 } 6505 #endif /* defined(CONFIG_USER_ONLY) */ 6506 } 6507 6508 static void gen_msgsnd(DisasContext *ctx) 6509 { 6510 #if defined(CONFIG_USER_ONLY) 6511 GEN_PRIV; 6512 #else 6513 CHK_HV; 6514 /* 64-bit server processors compliant with arch 2.x */ 6515 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6516 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6517 } else { 6518 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6519 } 6520 #endif /* defined(CONFIG_USER_ONLY) */ 6521 } 6522 6523 static void gen_msgsync(DisasContext *ctx) 6524 { 6525 #if defined(CONFIG_USER_ONLY) 6526 GEN_PRIV; 6527 #else 6528 CHK_HV; 6529 #endif /* defined(CONFIG_USER_ONLY) */ 6530 /* interpreted as no-op */ 6531 } 6532 6533 #if defined(TARGET_PPC64) 6534 static void gen_maddld(DisasContext *ctx) 6535 { 6536 TCGv_i64 t1 = tcg_temp_new_i64(); 6537 6538 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6539 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6540 tcg_temp_free_i64(t1); 6541 } 6542 6543 /* maddhd maddhdu */ 6544 static void gen_maddhd_maddhdu(DisasContext *ctx) 6545 { 6546 TCGv_i64 lo = tcg_temp_new_i64(); 6547 TCGv_i64 hi = tcg_temp_new_i64(); 6548 TCGv_i64 t1 = tcg_temp_new_i64(); 6549 6550 if (Rc(ctx->opcode)) { 6551 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6552 cpu_gpr[rB(ctx->opcode)]); 6553 tcg_gen_movi_i64(t1, 0); 6554 } else { 6555 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6556 cpu_gpr[rB(ctx->opcode)]); 6557 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6558 } 6559 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6560 cpu_gpr[rC(ctx->opcode)], t1); 6561 tcg_temp_free_i64(lo); 6562 tcg_temp_free_i64(hi); 6563 tcg_temp_free_i64(t1); 6564 } 6565 #endif /* defined(TARGET_PPC64) */ 6566 6567 static void gen_tbegin(DisasContext *ctx) 6568 { 6569 if (unlikely(!ctx->tm_enabled)) { 6570 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6571 return; 6572 } 6573 gen_helper_tbegin(cpu_env); 6574 } 6575 6576 #define GEN_TM_NOOP(name) \ 6577 static inline void gen_##name(DisasContext *ctx) \ 6578 { \ 6579 if (unlikely(!ctx->tm_enabled)) { \ 6580 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6581 return; \ 6582 } \ 6583 /* Because tbegin always fails in QEMU, these user \ 6584 * space instructions all have a simple implementation: \ 6585 * \ 6586 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6587 * = 0b0 || 0b00 || 0b0 \ 6588 */ \ 6589 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6590 } 6591 6592 GEN_TM_NOOP(tend); 6593 GEN_TM_NOOP(tabort); 6594 GEN_TM_NOOP(tabortwc); 6595 GEN_TM_NOOP(tabortwci); 6596 GEN_TM_NOOP(tabortdc); 6597 GEN_TM_NOOP(tabortdci); 6598 GEN_TM_NOOP(tsr); 6599 static inline void gen_cp_abort(DisasContext *ctx) 6600 { 6601 // Do Nothing 6602 } 6603 6604 #define GEN_CP_PASTE_NOOP(name) \ 6605 static inline void gen_##name(DisasContext *ctx) \ 6606 { \ 6607 /* Generate invalid exception until \ 6608 * we have an implementation of the copy \ 6609 * paste facility \ 6610 */ \ 6611 gen_invalid(ctx); \ 6612 } 6613 6614 GEN_CP_PASTE_NOOP(copy) 6615 GEN_CP_PASTE_NOOP(paste) 6616 6617 static void gen_tcheck(DisasContext *ctx) 6618 { 6619 if (unlikely(!ctx->tm_enabled)) { 6620 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6621 return; 6622 } 6623 /* Because tbegin always fails, the tcheck implementation 6624 * is simple: 6625 * 6626 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6627 * = 0b1 || 0b00 || 0b0 6628 */ 6629 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6630 } 6631 6632 #if defined(CONFIG_USER_ONLY) 6633 #define GEN_TM_PRIV_NOOP(name) \ 6634 static inline void gen_##name(DisasContext *ctx) \ 6635 { \ 6636 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6637 } 6638 6639 #else 6640 6641 #define GEN_TM_PRIV_NOOP(name) \ 6642 static inline void gen_##name(DisasContext *ctx) \ 6643 { \ 6644 CHK_SV; \ 6645 if (unlikely(!ctx->tm_enabled)) { \ 6646 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6647 return; \ 6648 } \ 6649 /* Because tbegin always fails, the implementation is \ 6650 * simple: \ 6651 * \ 6652 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6653 * = 0b0 || 0b00 | 0b0 \ 6654 */ \ 6655 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6656 } 6657 6658 #endif 6659 6660 GEN_TM_PRIV_NOOP(treclaim); 6661 GEN_TM_PRIV_NOOP(trechkpt); 6662 6663 static inline void get_fpr(TCGv_i64 dst, int regno) 6664 { 6665 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, vsr[regno].u64[0])); 6666 } 6667 6668 static inline void set_fpr(int regno, TCGv_i64 src) 6669 { 6670 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, vsr[regno].u64[0])); 6671 } 6672 6673 static inline void get_avr64(TCGv_i64 dst, int regno, bool high) 6674 { 6675 #ifdef HOST_WORDS_BIGENDIAN 6676 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, 6677 vsr[32 + regno].u64[(high ? 0 : 1)])); 6678 #else 6679 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, 6680 vsr[32 + regno].u64[(high ? 1 : 0)])); 6681 #endif 6682 } 6683 6684 static inline void set_avr64(int regno, TCGv_i64 src, bool high) 6685 { 6686 #ifdef HOST_WORDS_BIGENDIAN 6687 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, 6688 vsr[32 + regno].u64[(high ? 0 : 1)])); 6689 #else 6690 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, 6691 vsr[32 + regno].u64[(high ? 1 : 0)])); 6692 #endif 6693 } 6694 6695 #include "translate/fp-impl.inc.c" 6696 6697 #include "translate/vmx-impl.inc.c" 6698 6699 #include "translate/vsx-impl.inc.c" 6700 6701 #include "translate/dfp-impl.inc.c" 6702 6703 #include "translate/spe-impl.inc.c" 6704 6705 /* Handles lfdp, lxsd, lxssp */ 6706 static void gen_dform39(DisasContext *ctx) 6707 { 6708 switch (ctx->opcode & 0x3) { 6709 case 0: /* lfdp */ 6710 if (ctx->insns_flags2 & PPC2_ISA205) { 6711 return gen_lfdp(ctx); 6712 } 6713 break; 6714 case 2: /* lxsd */ 6715 if (ctx->insns_flags2 & PPC2_ISA300) { 6716 return gen_lxsd(ctx); 6717 } 6718 break; 6719 case 3: /* lxssp */ 6720 if (ctx->insns_flags2 & PPC2_ISA300) { 6721 return gen_lxssp(ctx); 6722 } 6723 break; 6724 } 6725 return gen_invalid(ctx); 6726 } 6727 6728 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 6729 static void gen_dform3D(DisasContext *ctx) 6730 { 6731 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 6732 switch (ctx->opcode & 0x7) { 6733 case 1: /* lxv */ 6734 if (ctx->insns_flags2 & PPC2_ISA300) { 6735 return gen_lxv(ctx); 6736 } 6737 break; 6738 case 5: /* stxv */ 6739 if (ctx->insns_flags2 & PPC2_ISA300) { 6740 return gen_stxv(ctx); 6741 } 6742 break; 6743 } 6744 } else { /* DS-FORM */ 6745 switch (ctx->opcode & 0x3) { 6746 case 0: /* stfdp */ 6747 if (ctx->insns_flags2 & PPC2_ISA205) { 6748 return gen_stfdp(ctx); 6749 } 6750 break; 6751 case 2: /* stxsd */ 6752 if (ctx->insns_flags2 & PPC2_ISA300) { 6753 return gen_stxsd(ctx); 6754 } 6755 break; 6756 case 3: /* stxssp */ 6757 if (ctx->insns_flags2 & PPC2_ISA300) { 6758 return gen_stxssp(ctx); 6759 } 6760 break; 6761 } 6762 } 6763 return gen_invalid(ctx); 6764 } 6765 6766 static opcode_t opcodes[] = { 6767 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6768 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6769 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6770 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6771 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6772 #if defined(TARGET_PPC64) 6773 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6774 #endif 6775 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6776 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6777 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6778 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6779 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6780 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6781 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6782 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6783 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6784 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6785 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6786 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6787 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6788 #if defined(TARGET_PPC64) 6789 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6790 #endif 6791 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6792 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6793 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6794 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6795 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6796 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6797 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6798 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 6799 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6800 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 6801 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6802 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6803 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6804 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6805 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6806 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6807 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6808 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6809 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6810 #if defined(TARGET_PPC64) 6811 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6812 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6813 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6814 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6815 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6816 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6817 #endif 6818 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6819 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6820 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6821 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6822 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6823 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6824 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6825 #if defined(TARGET_PPC64) 6826 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6827 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6828 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6829 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6830 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6831 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6832 PPC_NONE, PPC2_ISA300), 6833 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6834 PPC_NONE, PPC2_ISA300), 6835 #endif 6836 #if defined(TARGET_PPC64) 6837 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6838 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6839 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6840 #endif 6841 /* handles lfdp, lxsd, lxssp */ 6842 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6843 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 6844 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6845 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6846 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6847 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6848 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6849 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6850 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6851 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 6852 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6853 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6854 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6855 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6856 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 6857 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 6858 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6859 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6860 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6861 #if defined(TARGET_PPC64) 6862 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 6863 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 6864 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6865 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6866 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6867 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6868 #endif 6869 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6870 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6871 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 6872 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6873 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6874 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6875 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6876 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6877 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6878 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6879 #if defined(TARGET_PPC64) 6880 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6881 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6882 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6883 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6884 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6885 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6886 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6887 #endif 6888 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6889 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6890 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6891 #if defined(TARGET_PPC64) 6892 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6893 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6894 #endif 6895 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6896 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6897 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6898 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6899 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6900 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6901 #if defined(TARGET_PPC64) 6902 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6903 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6904 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 6905 #endif 6906 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6907 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6908 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6909 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6910 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6911 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6912 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6913 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6914 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6915 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6916 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6917 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6918 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6919 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6920 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6921 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 6922 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6923 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6924 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6925 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6926 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6927 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6928 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6929 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6930 #if defined(TARGET_PPC64) 6931 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6932 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6933 PPC_SEGMENT_64B), 6934 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6935 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6936 PPC_SEGMENT_64B), 6937 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6938 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6939 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6940 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6941 #endif 6942 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6943 /* XXX Those instructions will need to be handled differently for 6944 * different ISA versions */ 6945 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6946 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6947 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 6948 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 6949 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6950 #if defined(TARGET_PPC64) 6951 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6952 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6953 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 6954 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6955 #endif 6956 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6957 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6958 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6959 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6960 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6961 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6962 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6963 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6964 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6965 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6966 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 6967 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6968 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 6969 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 6970 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 6971 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 6972 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 6973 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 6974 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 6975 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6976 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 6977 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 6978 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 6979 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 6980 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 6981 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 6982 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 6983 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 6984 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 6985 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 6986 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 6987 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 6988 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 6989 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 6990 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 6991 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 6992 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 6993 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 6994 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 6995 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 6996 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 6997 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 6998 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 6999 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 7000 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7001 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7002 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7003 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7004 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7005 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7006 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7007 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7008 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7009 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7010 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7011 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7012 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7013 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7014 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7015 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7016 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7017 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7018 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7019 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7020 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7021 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7022 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7023 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7024 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7025 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7026 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7027 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7028 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7029 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7030 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7031 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7032 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7033 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7034 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7035 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7036 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7037 PPC_NONE, PPC2_BOOKE206), 7038 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7039 PPC_NONE, PPC2_BOOKE206), 7040 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7041 PPC_NONE, PPC2_BOOKE206), 7042 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7043 PPC_NONE, PPC2_BOOKE206), 7044 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7045 PPC_NONE, PPC2_BOOKE206), 7046 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7047 PPC_NONE, PPC2_PRCNTL), 7048 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7049 PPC_NONE, PPC2_PRCNTL), 7050 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7051 PPC_NONE, PPC2_PRCNTL), 7052 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7053 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7054 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7055 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7056 PPC_BOOKE, PPC2_BOOKE206), 7057 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE), 7058 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7059 PPC_BOOKE, PPC2_BOOKE206), 7060 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7061 PPC_440_SPEC), 7062 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7063 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7064 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7065 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7066 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7067 #if defined(TARGET_PPC64) 7068 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7069 PPC2_ISA300), 7070 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7071 #endif 7072 7073 #undef GEN_INT_ARITH_ADD 7074 #undef GEN_INT_ARITH_ADD_CONST 7075 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7076 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7077 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7078 add_ca, compute_ca, compute_ov) \ 7079 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7080 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7081 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7082 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7083 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7084 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7085 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7086 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7087 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7088 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 7089 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7090 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7091 7092 #undef GEN_INT_ARITH_DIVW 7093 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7094 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7095 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7096 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7097 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7098 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7099 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7100 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7101 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7102 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7103 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7104 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7105 7106 #if defined(TARGET_PPC64) 7107 #undef GEN_INT_ARITH_DIVD 7108 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7109 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7110 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7111 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7112 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7113 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7114 7115 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7116 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7117 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7118 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7119 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7120 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7121 7122 #undef GEN_INT_ARITH_MUL_HELPER 7123 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7124 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7125 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7126 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7127 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7128 #endif 7129 7130 #undef GEN_INT_ARITH_SUBF 7131 #undef GEN_INT_ARITH_SUBF_CONST 7132 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7133 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7134 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7135 add_ca, compute_ca, compute_ov) \ 7136 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7137 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7138 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7139 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7140 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7141 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7142 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7143 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7144 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7145 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7146 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7147 7148 #undef GEN_LOGICAL1 7149 #undef GEN_LOGICAL2 7150 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7151 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7152 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7153 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7154 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7155 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7156 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7157 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7158 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7159 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7160 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7161 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 7162 #if defined(TARGET_PPC64) 7163 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 7164 #endif 7165 7166 #if defined(TARGET_PPC64) 7167 #undef GEN_PPC64_R2 7168 #undef GEN_PPC64_R4 7169 #define GEN_PPC64_R2(name, opc1, opc2) \ 7170 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7171 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7172 PPC_64B) 7173 #define GEN_PPC64_R4(name, opc1, opc2) \ 7174 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7175 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 7176 PPC_64B), \ 7177 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7178 PPC_64B), \ 7179 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 7180 PPC_64B) 7181 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 7182 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 7183 GEN_PPC64_R4(rldic, 0x1E, 0x04), 7184 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 7185 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 7186 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 7187 #endif 7188 7189 #undef GEN_LD 7190 #undef GEN_LDU 7191 #undef GEN_LDUX 7192 #undef GEN_LDX_E 7193 #undef GEN_LDS 7194 #define GEN_LD(name, ldop, opc, type) \ 7195 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7196 #define GEN_LDU(name, ldop, opc, type) \ 7197 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 7198 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 7199 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7200 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 7201 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 7202 #define GEN_LDS(name, ldop, op, type) \ 7203 GEN_LD(name, ldop, op | 0x20, type) \ 7204 GEN_LDU(name, ldop, op | 0x21, type) \ 7205 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 7206 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 7207 7208 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 7209 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 7210 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 7211 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 7212 #if defined(TARGET_PPC64) 7213 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 7214 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 7215 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 7216 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 7217 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 7218 7219 /* HV/P7 and later only */ 7220 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 7221 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 7222 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 7223 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 7224 #endif 7225 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 7226 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 7227 7228 /* External PID based load */ 7229 #undef GEN_LDEPX 7230 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 7231 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7232 0x00000001, PPC_NONE, PPC2_BOOKE206), 7233 7234 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 7235 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 7236 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 7237 #if defined(TARGET_PPC64) 7238 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 7239 #endif 7240 7241 #undef GEN_ST 7242 #undef GEN_STU 7243 #undef GEN_STUX 7244 #undef GEN_STX_E 7245 #undef GEN_STS 7246 #define GEN_ST(name, stop, opc, type) \ 7247 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7248 #define GEN_STU(name, stop, opc, type) \ 7249 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 7250 #define GEN_STUX(name, stop, opc2, opc3, type) \ 7251 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7252 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 7253 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 7254 #define GEN_STS(name, stop, op, type) \ 7255 GEN_ST(name, stop, op | 0x20, type) \ 7256 GEN_STU(name, stop, op | 0x21, type) \ 7257 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 7258 GEN_STX(name, stop, 0x17, op | 0x00, type) 7259 7260 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 7261 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 7262 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 7263 #if defined(TARGET_PPC64) 7264 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 7265 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 7266 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 7267 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 7268 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 7269 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 7270 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 7271 #endif 7272 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 7273 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 7274 7275 #undef GEN_STEPX 7276 #define GEN_STEPX(name, ldop, opc2, opc3) \ 7277 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7278 0x00000001, PPC_NONE, PPC2_BOOKE206), 7279 7280 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 7281 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 7282 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 7283 #if defined(TARGET_PPC64) 7284 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04) 7285 #endif 7286 7287 #undef GEN_CRLOGIC 7288 #define GEN_CRLOGIC(name, tcg_op, opc) \ 7289 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 7290 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 7291 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 7292 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 7293 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 7294 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 7295 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 7296 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 7297 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 7298 7299 #undef GEN_MAC_HANDLER 7300 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 7301 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 7302 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 7303 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 7304 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 7305 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 7306 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 7307 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 7308 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 7309 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 7310 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 7311 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 7312 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 7313 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 7314 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 7315 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 7316 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 7317 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 7318 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 7319 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 7320 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 7321 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 7322 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 7323 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 7324 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 7325 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 7326 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 7327 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 7328 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 7329 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 7330 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 7331 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 7332 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 7333 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 7334 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 7335 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 7336 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 7337 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 7338 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 7339 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 7340 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 7341 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 7342 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 7343 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 7344 7345 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 7346 PPC_NONE, PPC2_TM), 7347 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 7348 PPC_NONE, PPC2_TM), 7349 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 7350 PPC_NONE, PPC2_TM), 7351 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 7352 PPC_NONE, PPC2_TM), 7353 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 7354 PPC_NONE, PPC2_TM), 7355 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 7356 PPC_NONE, PPC2_TM), 7357 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 7358 PPC_NONE, PPC2_TM), 7359 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 7360 PPC_NONE, PPC2_TM), 7361 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 7362 PPC_NONE, PPC2_TM), 7363 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 7364 PPC_NONE, PPC2_TM), 7365 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 7366 PPC_NONE, PPC2_TM), 7367 7368 #include "translate/fp-ops.inc.c" 7369 7370 #include "translate/vmx-ops.inc.c" 7371 7372 #include "translate/vsx-ops.inc.c" 7373 7374 #include "translate/dfp-ops.inc.c" 7375 7376 #include "translate/spe-ops.inc.c" 7377 }; 7378 7379 #include "helper_regs.h" 7380 #include "translate_init.inc.c" 7381 7382 /*****************************************************************************/ 7383 /* Misc PowerPC helpers */ 7384 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 7385 int flags) 7386 { 7387 #define RGPL 4 7388 #define RFPL 4 7389 7390 PowerPCCPU *cpu = POWERPC_CPU(cs); 7391 CPUPPCState *env = &cpu->env; 7392 int i; 7393 7394 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 7395 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 7396 env->nip, env->lr, env->ctr, cpu_read_xer(env), 7397 cs->cpu_index); 7398 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 7399 TARGET_FMT_lx " iidx %d didx %d\n", 7400 env->msr, env->spr[SPR_HID0], 7401 env->hflags, env->immu_idx, env->dmmu_idx); 7402 #if !defined(NO_TIMER_DUMP) 7403 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 7404 #if !defined(CONFIG_USER_ONLY) 7405 " DECR %08" PRIu32 7406 #endif 7407 "\n", 7408 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 7409 #if !defined(CONFIG_USER_ONLY) 7410 , cpu_ppc_load_decr(env) 7411 #endif 7412 ); 7413 #endif 7414 for (i = 0; i < 32; i++) { 7415 if ((i & (RGPL - 1)) == 0) 7416 cpu_fprintf(f, "GPR%02d", i); 7417 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 7418 if ((i & (RGPL - 1)) == (RGPL - 1)) 7419 cpu_fprintf(f, "\n"); 7420 } 7421 cpu_fprintf(f, "CR "); 7422 for (i = 0; i < 8; i++) 7423 cpu_fprintf(f, "%01x", env->crf[i]); 7424 cpu_fprintf(f, " ["); 7425 for (i = 0; i < 8; i++) { 7426 char a = '-'; 7427 if (env->crf[i] & 0x08) 7428 a = 'L'; 7429 else if (env->crf[i] & 0x04) 7430 a = 'G'; 7431 else if (env->crf[i] & 0x02) 7432 a = 'E'; 7433 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 7434 } 7435 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 7436 env->reserve_addr); 7437 7438 if (flags & CPU_DUMP_FPU) { 7439 for (i = 0; i < 32; i++) { 7440 if ((i & (RFPL - 1)) == 0) { 7441 cpu_fprintf(f, "FPR%02d", i); 7442 } 7443 cpu_fprintf(f, " %016" PRIx64, *cpu_fpr_ptr(env, i)); 7444 if ((i & (RFPL - 1)) == (RFPL - 1)) { 7445 cpu_fprintf(f, "\n"); 7446 } 7447 } 7448 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 7449 } 7450 7451 #if !defined(CONFIG_USER_ONLY) 7452 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 7453 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 7454 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 7455 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 7456 7457 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 7458 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 7459 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 7460 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 7461 7462 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 7463 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 7464 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 7465 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 7466 7467 #if defined(TARGET_PPC64) 7468 if (env->excp_model == POWERPC_EXCP_POWER7 || 7469 env->excp_model == POWERPC_EXCP_POWER8) { 7470 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 7471 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 7472 } 7473 #endif 7474 if (env->excp_model == POWERPC_EXCP_BOOKE) { 7475 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 7476 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 7477 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 7478 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 7479 7480 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 7481 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 7482 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 7483 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 7484 7485 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 7486 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 7487 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 7488 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 7489 7490 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 7491 " EPR " TARGET_FMT_lx "\n", 7492 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 7493 env->spr[SPR_BOOKE_EPR]); 7494 7495 /* FSL-specific */ 7496 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 7497 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 7498 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 7499 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 7500 7501 /* 7502 * IVORs are left out as they are large and do not change often -- 7503 * they can be read with "p $ivor0", "p $ivor1", etc. 7504 */ 7505 } 7506 7507 #if defined(TARGET_PPC64) 7508 if (env->flags & POWERPC_FLAG_CFAR) { 7509 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 7510 } 7511 #endif 7512 7513 if (env->spr_cb[SPR_LPCR].name) 7514 cpu_fprintf(f, " LPCR " TARGET_FMT_lx "\n", env->spr[SPR_LPCR]); 7515 7516 switch (env->mmu_model) { 7517 case POWERPC_MMU_32B: 7518 case POWERPC_MMU_601: 7519 case POWERPC_MMU_SOFT_6xx: 7520 case POWERPC_MMU_SOFT_74xx: 7521 #if defined(TARGET_PPC64) 7522 case POWERPC_MMU_64B: 7523 case POWERPC_MMU_2_03: 7524 case POWERPC_MMU_2_06: 7525 case POWERPC_MMU_2_07: 7526 case POWERPC_MMU_3_00: 7527 #endif 7528 if (env->spr_cb[SPR_SDR1].name) { /* SDR1 Exists */ 7529 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " ", env->spr[SPR_SDR1]); 7530 } 7531 if (env->spr_cb[SPR_PTCR].name) { /* PTCR Exists */ 7532 cpu_fprintf(f, " PTCR " TARGET_FMT_lx " ", env->spr[SPR_PTCR]); 7533 } 7534 cpu_fprintf(f, " DAR " TARGET_FMT_lx " DSISR " TARGET_FMT_lx "\n", 7535 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 7536 break; 7537 case POWERPC_MMU_BOOKE206: 7538 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 7539 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 7540 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 7541 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 7542 7543 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 7544 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 7545 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 7546 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 7547 7548 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 7549 " TLB1CFG " TARGET_FMT_lx "\n", 7550 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 7551 env->spr[SPR_BOOKE_TLB1CFG]); 7552 break; 7553 default: 7554 break; 7555 } 7556 #endif 7557 7558 #undef RGPL 7559 #undef RFPL 7560 } 7561 7562 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 7563 fprintf_function cpu_fprintf, int flags) 7564 { 7565 #if defined(DO_PPC_STATISTICS) 7566 PowerPCCPU *cpu = POWERPC_CPU(cs); 7567 opc_handler_t **t1, **t2, **t3, *handler; 7568 int op1, op2, op3; 7569 7570 t1 = cpu->env.opcodes; 7571 for (op1 = 0; op1 < 64; op1++) { 7572 handler = t1[op1]; 7573 if (is_indirect_opcode(handler)) { 7574 t2 = ind_table(handler); 7575 for (op2 = 0; op2 < 32; op2++) { 7576 handler = t2[op2]; 7577 if (is_indirect_opcode(handler)) { 7578 t3 = ind_table(handler); 7579 for (op3 = 0; op3 < 32; op3++) { 7580 handler = t3[op3]; 7581 if (handler->count == 0) 7582 continue; 7583 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 7584 "%016" PRIx64 " %" PRId64 "\n", 7585 op1, op2, op3, op1, (op3 << 5) | op2, 7586 handler->oname, 7587 handler->count, handler->count); 7588 } 7589 } else { 7590 if (handler->count == 0) 7591 continue; 7592 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 7593 "%016" PRIx64 " %" PRId64 "\n", 7594 op1, op2, op1, op2, handler->oname, 7595 handler->count, handler->count); 7596 } 7597 } 7598 } else { 7599 if (handler->count == 0) 7600 continue; 7601 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 7602 " %" PRId64 "\n", 7603 op1, op1, handler->oname, 7604 handler->count, handler->count); 7605 } 7606 } 7607 #endif 7608 } 7609 7610 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 7611 { 7612 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7613 CPUPPCState *env = cs->env_ptr; 7614 int bound; 7615 7616 ctx->exception = POWERPC_EXCP_NONE; 7617 ctx->spr_cb = env->spr_cb; 7618 ctx->pr = msr_pr; 7619 ctx->mem_idx = env->dmmu_idx; 7620 ctx->dr = msr_dr; 7621 #if !defined(CONFIG_USER_ONLY) 7622 ctx->hv = msr_hv || !env->has_hv_mode; 7623 #endif 7624 ctx->insns_flags = env->insns_flags; 7625 ctx->insns_flags2 = env->insns_flags2; 7626 ctx->access_type = -1; 7627 ctx->need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7628 ctx->le_mode = !!(env->hflags & (1 << MSR_LE)); 7629 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 7630 ctx->flags = env->flags; 7631 #if defined(TARGET_PPC64) 7632 ctx->sf_mode = msr_is_64bit(env, env->msr); 7633 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7634 #endif 7635 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 7636 || env->mmu_model == POWERPC_MMU_601 7637 || (env->mmu_model & POWERPC_MMU_64B); 7638 7639 ctx->fpu_enabled = !!msr_fp; 7640 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7641 ctx->spe_enabled = !!msr_spe; 7642 else 7643 ctx->spe_enabled = false; 7644 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7645 ctx->altivec_enabled = !!msr_vr; 7646 else 7647 ctx->altivec_enabled = false; 7648 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7649 ctx->vsx_enabled = !!msr_vsx; 7650 } else { 7651 ctx->vsx_enabled = false; 7652 } 7653 #if defined(TARGET_PPC64) 7654 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7655 ctx->tm_enabled = !!msr_tm; 7656 } else { 7657 ctx->tm_enabled = false; 7658 } 7659 #endif 7660 ctx->gtse = !!(env->spr[SPR_LPCR] & LPCR_GTSE); 7661 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7662 ctx->singlestep_enabled = CPU_SINGLE_STEP; 7663 else 7664 ctx->singlestep_enabled = 0; 7665 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7666 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7667 if ((env->flags & POWERPC_FLAG_DE) && msr_de) { 7668 ctx->singlestep_enabled = 0; 7669 target_ulong dbcr0 = env->spr[SPR_BOOKE_DBCR0]; 7670 if (dbcr0 & DBCR0_ICMP) { 7671 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 7672 } 7673 if (dbcr0 & DBCR0_BRT) { 7674 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7675 } 7676 7677 } 7678 if (unlikely(ctx->base.singlestep_enabled)) { 7679 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7680 } 7681 #if defined (DO_SINGLE_STEP) && 0 7682 /* Single step trace mode */ 7683 msr_se = 1; 7684 #endif 7685 7686 bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4; 7687 ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 7688 } 7689 7690 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 7691 { 7692 } 7693 7694 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 7695 { 7696 tcg_gen_insn_start(dcbase->pc_next); 7697 } 7698 7699 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs, 7700 const CPUBreakpoint *bp) 7701 { 7702 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7703 7704 gen_debug_exception(ctx); 7705 dcbase->is_jmp = DISAS_NORETURN; 7706 /* The address covered by the breakpoint must be included in 7707 [tb->pc, tb->pc + tb->size) in order to for it to be 7708 properly cleared -- thus we increment the PC here so that 7709 the logic setting tb->size below does the right thing. */ 7710 ctx->base.pc_next += 4; 7711 return true; 7712 } 7713 7714 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 7715 { 7716 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7717 CPUPPCState *env = cs->env_ptr; 7718 opc_handler_t **table, *handler; 7719 7720 LOG_DISAS("----------------\n"); 7721 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7722 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 7723 7724 if (unlikely(need_byteswap(ctx))) { 7725 ctx->opcode = bswap32(cpu_ldl_code(env, ctx->base.pc_next)); 7726 } else { 7727 ctx->opcode = cpu_ldl_code(env, ctx->base.pc_next); 7728 } 7729 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7730 ctx->opcode, opc1(ctx->opcode), opc2(ctx->opcode), 7731 opc3(ctx->opcode), opc4(ctx->opcode), 7732 ctx->le_mode ? "little" : "big"); 7733 ctx->base.pc_next += 4; 7734 table = env->opcodes; 7735 handler = table[opc1(ctx->opcode)]; 7736 if (is_indirect_opcode(handler)) { 7737 table = ind_table(handler); 7738 handler = table[opc2(ctx->opcode)]; 7739 if (is_indirect_opcode(handler)) { 7740 table = ind_table(handler); 7741 handler = table[opc3(ctx->opcode)]; 7742 if (is_indirect_opcode(handler)) { 7743 table = ind_table(handler); 7744 handler = table[opc4(ctx->opcode)]; 7745 } 7746 } 7747 } 7748 /* Is opcode *REALLY* valid ? */ 7749 if (unlikely(handler->handler == &gen_invalid)) { 7750 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7751 "%02x - %02x - %02x - %02x (%08x) " 7752 TARGET_FMT_lx " %d\n", 7753 opc1(ctx->opcode), opc2(ctx->opcode), 7754 opc3(ctx->opcode), opc4(ctx->opcode), 7755 ctx->opcode, ctx->base.pc_next - 4, (int)msr_ir); 7756 } else { 7757 uint32_t inval; 7758 7759 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 7760 && Rc(ctx->opcode))) { 7761 inval = handler->inval2; 7762 } else { 7763 inval = handler->inval1; 7764 } 7765 7766 if (unlikely((ctx->opcode & inval) != 0)) { 7767 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7768 "%02x - %02x - %02x - %02x (%08x) " 7769 TARGET_FMT_lx "\n", ctx->opcode & inval, 7770 opc1(ctx->opcode), opc2(ctx->opcode), 7771 opc3(ctx->opcode), opc4(ctx->opcode), 7772 ctx->opcode, ctx->base.pc_next - 4); 7773 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7774 ctx->base.is_jmp = DISAS_NORETURN; 7775 return; 7776 } 7777 } 7778 (*(handler->handler))(ctx); 7779 #if defined(DO_PPC_STATISTICS) 7780 handler->count++; 7781 #endif 7782 /* Check trace mode exceptions */ 7783 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP && 7784 (ctx->base.pc_next <= 0x100 || ctx->base.pc_next > 0xF00) && 7785 ctx->exception != POWERPC_SYSCALL && 7786 ctx->exception != POWERPC_EXCP_TRAP && 7787 ctx->exception != POWERPC_EXCP_BRANCH)) { 7788 uint32_t excp = gen_prep_dbgex(ctx, POWERPC_EXCP_TRACE); 7789 if (excp != POWERPC_EXCP_NONE) 7790 gen_exception_nip(ctx, excp, ctx->base.pc_next); 7791 } 7792 7793 if (tcg_check_temp_count()) { 7794 qemu_log("Opcode %02x %02x %02x %02x (%08x) leaked " 7795 "temporaries\n", opc1(ctx->opcode), opc2(ctx->opcode), 7796 opc3(ctx->opcode), opc4(ctx->opcode), ctx->opcode); 7797 } 7798 7799 ctx->base.is_jmp = ctx->exception == POWERPC_EXCP_NONE ? 7800 DISAS_NEXT : DISAS_NORETURN; 7801 } 7802 7803 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 7804 { 7805 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7806 7807 if (ctx->exception == POWERPC_EXCP_NONE) { 7808 gen_goto_tb(ctx, 0, ctx->base.pc_next); 7809 } else if (ctx->exception != POWERPC_EXCP_BRANCH) { 7810 if (unlikely(ctx->base.singlestep_enabled)) { 7811 gen_debug_exception(ctx); 7812 } 7813 /* Generate the return instruction */ 7814 tcg_gen_exit_tb(NULL, 0); 7815 } 7816 } 7817 7818 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 7819 { 7820 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 7821 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 7822 } 7823 7824 static const TranslatorOps ppc_tr_ops = { 7825 .init_disas_context = ppc_tr_init_disas_context, 7826 .tb_start = ppc_tr_tb_start, 7827 .insn_start = ppc_tr_insn_start, 7828 .breakpoint_check = ppc_tr_breakpoint_check, 7829 .translate_insn = ppc_tr_translate_insn, 7830 .tb_stop = ppc_tr_tb_stop, 7831 .disas_log = ppc_tr_disas_log, 7832 }; 7833 7834 void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb) 7835 { 7836 DisasContext ctx; 7837 7838 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb); 7839 } 7840 7841 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7842 target_ulong *data) 7843 { 7844 env->nip = data[0]; 7845 } 7846