1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "qemu/host-utils.h" 28 #include "exec/cpu_ldst.h" 29 30 #include "exec/helper-proto.h" 31 #include "exec/helper-gen.h" 32 33 #include "trace-tcg.h" 34 #include "exec/translator.h" 35 #include "exec/log.h" 36 #include "qemu/atomic128.h" 37 38 39 #define CPU_SINGLE_STEP 0x1 40 #define CPU_BRANCH_STEP 0x2 41 #define GDBSTUB_SINGLE_STEP 0x4 42 43 /* Include definitions for instructions classes and implementations flags */ 44 //#define PPC_DEBUG_DISAS 45 //#define DO_PPC_STATISTICS 46 47 #ifdef PPC_DEBUG_DISAS 48 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 49 #else 50 # define LOG_DISAS(...) do { } while (0) 51 #endif 52 /*****************************************************************************/ 53 /* Code translation helpers */ 54 55 /* global register indexes */ 56 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 57 + 10*4 + 22*5 /* SPE GPRh */ 58 + 8*5 /* CRF */]; 59 static TCGv cpu_gpr[32]; 60 static TCGv cpu_gprh[32]; 61 static TCGv_i32 cpu_crf[8]; 62 static TCGv cpu_nip; 63 static TCGv cpu_msr; 64 static TCGv cpu_ctr; 65 static TCGv cpu_lr; 66 #if defined(TARGET_PPC64) 67 static TCGv cpu_cfar; 68 #endif 69 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 70 static TCGv cpu_reserve; 71 static TCGv cpu_reserve_val; 72 static TCGv cpu_fpscr; 73 static TCGv_i32 cpu_access_type; 74 75 #include "exec/gen-icount.h" 76 77 void ppc_translate_init(void) 78 { 79 int i; 80 char* p; 81 size_t cpu_reg_names_size; 82 83 p = cpu_reg_names; 84 cpu_reg_names_size = sizeof(cpu_reg_names); 85 86 for (i = 0; i < 8; i++) { 87 snprintf(p, cpu_reg_names_size, "crf%d", i); 88 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 89 offsetof(CPUPPCState, crf[i]), p); 90 p += 5; 91 cpu_reg_names_size -= 5; 92 } 93 94 for (i = 0; i < 32; i++) { 95 snprintf(p, cpu_reg_names_size, "r%d", i); 96 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 97 offsetof(CPUPPCState, gpr[i]), p); 98 p += (i < 10) ? 3 : 4; 99 cpu_reg_names_size -= (i < 10) ? 3 : 4; 100 snprintf(p, cpu_reg_names_size, "r%dH", i); 101 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 102 offsetof(CPUPPCState, gprh[i]), p); 103 p += (i < 10) ? 4 : 5; 104 cpu_reg_names_size -= (i < 10) ? 4 : 5; 105 } 106 107 cpu_nip = tcg_global_mem_new(cpu_env, 108 offsetof(CPUPPCState, nip), "nip"); 109 110 cpu_msr = tcg_global_mem_new(cpu_env, 111 offsetof(CPUPPCState, msr), "msr"); 112 113 cpu_ctr = tcg_global_mem_new(cpu_env, 114 offsetof(CPUPPCState, ctr), "ctr"); 115 116 cpu_lr = tcg_global_mem_new(cpu_env, 117 offsetof(CPUPPCState, lr), "lr"); 118 119 #if defined(TARGET_PPC64) 120 cpu_cfar = tcg_global_mem_new(cpu_env, 121 offsetof(CPUPPCState, cfar), "cfar"); 122 #endif 123 124 cpu_xer = tcg_global_mem_new(cpu_env, 125 offsetof(CPUPPCState, xer), "xer"); 126 cpu_so = tcg_global_mem_new(cpu_env, 127 offsetof(CPUPPCState, so), "SO"); 128 cpu_ov = tcg_global_mem_new(cpu_env, 129 offsetof(CPUPPCState, ov), "OV"); 130 cpu_ca = tcg_global_mem_new(cpu_env, 131 offsetof(CPUPPCState, ca), "CA"); 132 cpu_ov32 = tcg_global_mem_new(cpu_env, 133 offsetof(CPUPPCState, ov32), "OV32"); 134 cpu_ca32 = tcg_global_mem_new(cpu_env, 135 offsetof(CPUPPCState, ca32), "CA32"); 136 137 cpu_reserve = tcg_global_mem_new(cpu_env, 138 offsetof(CPUPPCState, reserve_addr), 139 "reserve_addr"); 140 cpu_reserve_val = tcg_global_mem_new(cpu_env, 141 offsetof(CPUPPCState, reserve_val), 142 "reserve_val"); 143 144 cpu_fpscr = tcg_global_mem_new(cpu_env, 145 offsetof(CPUPPCState, fpscr), "fpscr"); 146 147 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 148 offsetof(CPUPPCState, access_type), "access_type"); 149 } 150 151 /* internal defines */ 152 struct DisasContext { 153 DisasContextBase base; 154 uint32_t opcode; 155 uint32_t exception; 156 /* Routine used to access memory */ 157 bool pr, hv, dr, le_mode; 158 bool lazy_tlb_flush; 159 bool need_access_type; 160 int mem_idx; 161 int access_type; 162 /* Translation flags */ 163 TCGMemOp default_tcg_memop_mask; 164 #if defined(TARGET_PPC64) 165 bool sf_mode; 166 bool has_cfar; 167 #endif 168 bool fpu_enabled; 169 bool altivec_enabled; 170 bool vsx_enabled; 171 bool spe_enabled; 172 bool tm_enabled; 173 bool gtse; 174 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 175 int singlestep_enabled; 176 uint32_t flags; 177 uint64_t insns_flags; 178 uint64_t insns_flags2; 179 }; 180 181 /* Return true iff byteswap is needed in a scalar memop */ 182 static inline bool need_byteswap(const DisasContext *ctx) 183 { 184 #if defined(TARGET_WORDS_BIGENDIAN) 185 return ctx->le_mode; 186 #else 187 return !ctx->le_mode; 188 #endif 189 } 190 191 /* True when active word size < size of target_long. */ 192 #ifdef TARGET_PPC64 193 # define NARROW_MODE(C) (!(C)->sf_mode) 194 #else 195 # define NARROW_MODE(C) 0 196 #endif 197 198 struct opc_handler_t { 199 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 200 uint32_t inval1; 201 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 202 uint32_t inval2; 203 /* instruction type */ 204 uint64_t type; 205 /* extended instruction type */ 206 uint64_t type2; 207 /* handler */ 208 void (*handler)(DisasContext *ctx); 209 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 210 const char *oname; 211 #endif 212 #if defined(DO_PPC_STATISTICS) 213 uint64_t count; 214 #endif 215 }; 216 217 /* SPR load/store helpers */ 218 static inline void gen_load_spr(TCGv t, int reg) 219 { 220 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 221 } 222 223 static inline void gen_store_spr(int reg, TCGv t) 224 { 225 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 226 } 227 228 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 229 { 230 if (ctx->need_access_type && ctx->access_type != access_type) { 231 tcg_gen_movi_i32(cpu_access_type, access_type); 232 ctx->access_type = access_type; 233 } 234 } 235 236 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 237 { 238 if (NARROW_MODE(ctx)) { 239 nip = (uint32_t)nip; 240 } 241 tcg_gen_movi_tl(cpu_nip, nip); 242 } 243 244 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 245 { 246 TCGv_i32 t0, t1; 247 248 /* These are all synchronous exceptions, we set the PC back to 249 * the faulting instruction 250 */ 251 if (ctx->exception == POWERPC_EXCP_NONE) { 252 gen_update_nip(ctx, ctx->base.pc_next - 4); 253 } 254 t0 = tcg_const_i32(excp); 255 t1 = tcg_const_i32(error); 256 gen_helper_raise_exception_err(cpu_env, t0, t1); 257 tcg_temp_free_i32(t0); 258 tcg_temp_free_i32(t1); 259 ctx->exception = (excp); 260 } 261 262 static void gen_exception(DisasContext *ctx, uint32_t excp) 263 { 264 TCGv_i32 t0; 265 266 /* These are all synchronous exceptions, we set the PC back to 267 * the faulting instruction 268 */ 269 if (ctx->exception == POWERPC_EXCP_NONE) { 270 gen_update_nip(ctx, ctx->base.pc_next - 4); 271 } 272 t0 = tcg_const_i32(excp); 273 gen_helper_raise_exception(cpu_env, t0); 274 tcg_temp_free_i32(t0); 275 ctx->exception = (excp); 276 } 277 278 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 279 target_ulong nip) 280 { 281 TCGv_i32 t0; 282 283 gen_update_nip(ctx, nip); 284 t0 = tcg_const_i32(excp); 285 gen_helper_raise_exception(cpu_env, t0); 286 tcg_temp_free_i32(t0); 287 ctx->exception = (excp); 288 } 289 290 /* 291 * Tells the caller what is the appropriate exception to generate and prepares 292 * SPR registers for this exception. 293 * 294 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or 295 * POWERPC_EXCP_DEBUG (on BookE). 296 */ 297 static uint32_t gen_prep_dbgex(DisasContext *ctx) 298 { 299 if (ctx->flags & POWERPC_FLAG_DE) { 300 target_ulong dbsr = 0; 301 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) { 302 dbsr = DBCR0_ICMP; 303 } else { 304 /* Must have been branch */ 305 dbsr = DBCR0_BRT; 306 } 307 TCGv t0 = tcg_temp_new(); 308 gen_load_spr(t0, SPR_BOOKE_DBSR); 309 tcg_gen_ori_tl(t0, t0, dbsr); 310 gen_store_spr(SPR_BOOKE_DBSR, t0); 311 tcg_temp_free(t0); 312 return POWERPC_EXCP_DEBUG; 313 } else { 314 return POWERPC_EXCP_TRACE; 315 } 316 } 317 318 static void gen_debug_exception(DisasContext *ctx) 319 { 320 TCGv_i32 t0; 321 322 /* These are all synchronous exceptions, we set the PC back to 323 * the faulting instruction 324 */ 325 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 326 (ctx->exception != POWERPC_EXCP_SYNC)) { 327 gen_update_nip(ctx, ctx->base.pc_next); 328 } 329 t0 = tcg_const_i32(EXCP_DEBUG); 330 gen_helper_raise_exception(cpu_env, t0); 331 tcg_temp_free_i32(t0); 332 } 333 334 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 335 { 336 /* Will be converted to program check if needed */ 337 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 338 } 339 340 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 341 { 342 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 343 } 344 345 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 346 { 347 /* Will be converted to program check if needed */ 348 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 349 } 350 351 /* Stop translation */ 352 static inline void gen_stop_exception(DisasContext *ctx) 353 { 354 gen_update_nip(ctx, ctx->base.pc_next); 355 ctx->exception = POWERPC_EXCP_STOP; 356 } 357 358 #ifndef CONFIG_USER_ONLY 359 /* No need to update nip here, as execution flow will change */ 360 static inline void gen_sync_exception(DisasContext *ctx) 361 { 362 ctx->exception = POWERPC_EXCP_SYNC; 363 } 364 #endif 365 366 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 367 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 368 369 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 370 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 371 372 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 373 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 374 375 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 376 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 377 378 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 379 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 380 381 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 382 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 383 384 typedef struct opcode_t { 385 unsigned char opc1, opc2, opc3, opc4; 386 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 387 unsigned char pad[4]; 388 #endif 389 opc_handler_t handler; 390 const char *oname; 391 } opcode_t; 392 393 /* Helpers for priv. check */ 394 #define GEN_PRIV \ 395 do { \ 396 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 397 } while (0) 398 399 #if defined(CONFIG_USER_ONLY) 400 #define CHK_HV GEN_PRIV 401 #define CHK_SV GEN_PRIV 402 #define CHK_HVRM GEN_PRIV 403 #else 404 #define CHK_HV \ 405 do { \ 406 if (unlikely(ctx->pr || !ctx->hv)) { \ 407 GEN_PRIV; \ 408 } \ 409 } while (0) 410 #define CHK_SV \ 411 do { \ 412 if (unlikely(ctx->pr)) { \ 413 GEN_PRIV; \ 414 } \ 415 } while (0) 416 #define CHK_HVRM \ 417 do { \ 418 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 419 GEN_PRIV; \ 420 } \ 421 } while (0) 422 #endif 423 424 #define CHK_NONE 425 426 /*****************************************************************************/ 427 /* PowerPC instructions table */ 428 429 #if defined(DO_PPC_STATISTICS) 430 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 431 { \ 432 .opc1 = op1, \ 433 .opc2 = op2, \ 434 .opc3 = op3, \ 435 .opc4 = 0xff, \ 436 .handler = { \ 437 .inval1 = invl, \ 438 .type = _typ, \ 439 .type2 = _typ2, \ 440 .handler = &gen_##name, \ 441 .oname = stringify(name), \ 442 }, \ 443 .oname = stringify(name), \ 444 } 445 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 446 { \ 447 .opc1 = op1, \ 448 .opc2 = op2, \ 449 .opc3 = op3, \ 450 .opc4 = 0xff, \ 451 .handler = { \ 452 .inval1 = invl1, \ 453 .inval2 = invl2, \ 454 .type = _typ, \ 455 .type2 = _typ2, \ 456 .handler = &gen_##name, \ 457 .oname = stringify(name), \ 458 }, \ 459 .oname = stringify(name), \ 460 } 461 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 462 { \ 463 .opc1 = op1, \ 464 .opc2 = op2, \ 465 .opc3 = op3, \ 466 .opc4 = 0xff, \ 467 .handler = { \ 468 .inval1 = invl, \ 469 .type = _typ, \ 470 .type2 = _typ2, \ 471 .handler = &gen_##name, \ 472 .oname = onam, \ 473 }, \ 474 .oname = onam, \ 475 } 476 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 477 { \ 478 .opc1 = op1, \ 479 .opc2 = op2, \ 480 .opc3 = op3, \ 481 .opc4 = op4, \ 482 .handler = { \ 483 .inval1 = invl, \ 484 .type = _typ, \ 485 .type2 = _typ2, \ 486 .handler = &gen_##name, \ 487 .oname = stringify(name), \ 488 }, \ 489 .oname = stringify(name), \ 490 } 491 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 492 { \ 493 .opc1 = op1, \ 494 .opc2 = op2, \ 495 .opc3 = op3, \ 496 .opc4 = op4, \ 497 .handler = { \ 498 .inval1 = invl, \ 499 .type = _typ, \ 500 .type2 = _typ2, \ 501 .handler = &gen_##name, \ 502 .oname = onam, \ 503 }, \ 504 .oname = onam, \ 505 } 506 #else 507 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 508 { \ 509 .opc1 = op1, \ 510 .opc2 = op2, \ 511 .opc3 = op3, \ 512 .opc4 = 0xff, \ 513 .handler = { \ 514 .inval1 = invl, \ 515 .type = _typ, \ 516 .type2 = _typ2, \ 517 .handler = &gen_##name, \ 518 }, \ 519 .oname = stringify(name), \ 520 } 521 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 522 { \ 523 .opc1 = op1, \ 524 .opc2 = op2, \ 525 .opc3 = op3, \ 526 .opc4 = 0xff, \ 527 .handler = { \ 528 .inval1 = invl1, \ 529 .inval2 = invl2, \ 530 .type = _typ, \ 531 .type2 = _typ2, \ 532 .handler = &gen_##name, \ 533 }, \ 534 .oname = stringify(name), \ 535 } 536 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 537 { \ 538 .opc1 = op1, \ 539 .opc2 = op2, \ 540 .opc3 = op3, \ 541 .opc4 = 0xff, \ 542 .handler = { \ 543 .inval1 = invl, \ 544 .type = _typ, \ 545 .type2 = _typ2, \ 546 .handler = &gen_##name, \ 547 }, \ 548 .oname = onam, \ 549 } 550 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 551 { \ 552 .opc1 = op1, \ 553 .opc2 = op2, \ 554 .opc3 = op3, \ 555 .opc4 = op4, \ 556 .handler = { \ 557 .inval1 = invl, \ 558 .type = _typ, \ 559 .type2 = _typ2, \ 560 .handler = &gen_##name, \ 561 }, \ 562 .oname = stringify(name), \ 563 } 564 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 565 { \ 566 .opc1 = op1, \ 567 .opc2 = op2, \ 568 .opc3 = op3, \ 569 .opc4 = op4, \ 570 .handler = { \ 571 .inval1 = invl, \ 572 .type = _typ, \ 573 .type2 = _typ2, \ 574 .handler = &gen_##name, \ 575 }, \ 576 .oname = onam, \ 577 } 578 #endif 579 580 /* Invalid instruction */ 581 static void gen_invalid(DisasContext *ctx) 582 { 583 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 584 } 585 586 static opc_handler_t invalid_handler = { 587 .inval1 = 0xFFFFFFFF, 588 .inval2 = 0xFFFFFFFF, 589 .type = PPC_NONE, 590 .type2 = PPC_NONE, 591 .handler = gen_invalid, 592 }; 593 594 /*** Integer comparison ***/ 595 596 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 597 { 598 TCGv t0 = tcg_temp_new(); 599 TCGv t1 = tcg_temp_new(); 600 TCGv_i32 t = tcg_temp_new_i32(); 601 602 tcg_gen_movi_tl(t0, CRF_EQ); 603 tcg_gen_movi_tl(t1, CRF_LT); 604 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), t0, arg0, arg1, t1, t0); 605 tcg_gen_movi_tl(t1, CRF_GT); 606 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), t0, arg0, arg1, t1, t0); 607 608 tcg_gen_trunc_tl_i32(t, t0); 609 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 610 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 611 612 tcg_temp_free(t0); 613 tcg_temp_free(t1); 614 tcg_temp_free_i32(t); 615 } 616 617 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 618 { 619 TCGv t0 = tcg_const_tl(arg1); 620 gen_op_cmp(arg0, t0, s, crf); 621 tcg_temp_free(t0); 622 } 623 624 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 625 { 626 TCGv t0, t1; 627 t0 = tcg_temp_new(); 628 t1 = tcg_temp_new(); 629 if (s) { 630 tcg_gen_ext32s_tl(t0, arg0); 631 tcg_gen_ext32s_tl(t1, arg1); 632 } else { 633 tcg_gen_ext32u_tl(t0, arg0); 634 tcg_gen_ext32u_tl(t1, arg1); 635 } 636 gen_op_cmp(t0, t1, s, crf); 637 tcg_temp_free(t1); 638 tcg_temp_free(t0); 639 } 640 641 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 642 { 643 TCGv t0 = tcg_const_tl(arg1); 644 gen_op_cmp32(arg0, t0, s, crf); 645 tcg_temp_free(t0); 646 } 647 648 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 649 { 650 if (NARROW_MODE(ctx)) { 651 gen_op_cmpi32(reg, 0, 1, 0); 652 } else { 653 gen_op_cmpi(reg, 0, 1, 0); 654 } 655 } 656 657 /* cmp */ 658 static void gen_cmp(DisasContext *ctx) 659 { 660 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 661 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 662 1, crfD(ctx->opcode)); 663 } else { 664 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 665 1, crfD(ctx->opcode)); 666 } 667 } 668 669 /* cmpi */ 670 static void gen_cmpi(DisasContext *ctx) 671 { 672 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 673 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 674 1, crfD(ctx->opcode)); 675 } else { 676 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 677 1, crfD(ctx->opcode)); 678 } 679 } 680 681 /* cmpl */ 682 static void gen_cmpl(DisasContext *ctx) 683 { 684 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 685 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 686 0, crfD(ctx->opcode)); 687 } else { 688 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 689 0, crfD(ctx->opcode)); 690 } 691 } 692 693 /* cmpli */ 694 static void gen_cmpli(DisasContext *ctx) 695 { 696 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 697 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 698 0, crfD(ctx->opcode)); 699 } else { 700 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 701 0, crfD(ctx->opcode)); 702 } 703 } 704 705 /* cmprb - range comparison: isupper, isaplha, islower*/ 706 static void gen_cmprb(DisasContext *ctx) 707 { 708 TCGv_i32 src1 = tcg_temp_new_i32(); 709 TCGv_i32 src2 = tcg_temp_new_i32(); 710 TCGv_i32 src2lo = tcg_temp_new_i32(); 711 TCGv_i32 src2hi = tcg_temp_new_i32(); 712 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 713 714 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 715 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 716 717 tcg_gen_andi_i32(src1, src1, 0xFF); 718 tcg_gen_ext8u_i32(src2lo, src2); 719 tcg_gen_shri_i32(src2, src2, 8); 720 tcg_gen_ext8u_i32(src2hi, src2); 721 722 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 723 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 724 tcg_gen_and_i32(crf, src2lo, src2hi); 725 726 if (ctx->opcode & 0x00200000) { 727 tcg_gen_shri_i32(src2, src2, 8); 728 tcg_gen_ext8u_i32(src2lo, src2); 729 tcg_gen_shri_i32(src2, src2, 8); 730 tcg_gen_ext8u_i32(src2hi, src2); 731 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 732 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 733 tcg_gen_and_i32(src2lo, src2lo, src2hi); 734 tcg_gen_or_i32(crf, crf, src2lo); 735 } 736 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 737 tcg_temp_free_i32(src1); 738 tcg_temp_free_i32(src2); 739 tcg_temp_free_i32(src2lo); 740 tcg_temp_free_i32(src2hi); 741 } 742 743 #if defined(TARGET_PPC64) 744 /* cmpeqb */ 745 static void gen_cmpeqb(DisasContext *ctx) 746 { 747 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 748 cpu_gpr[rB(ctx->opcode)]); 749 } 750 #endif 751 752 /* isel (PowerPC 2.03 specification) */ 753 static void gen_isel(DisasContext *ctx) 754 { 755 uint32_t bi = rC(ctx->opcode); 756 uint32_t mask = 0x08 >> (bi & 0x03); 757 TCGv t0 = tcg_temp_new(); 758 TCGv zr; 759 760 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 761 tcg_gen_andi_tl(t0, t0, mask); 762 763 zr = tcg_const_tl(0); 764 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 765 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 766 cpu_gpr[rB(ctx->opcode)]); 767 tcg_temp_free(zr); 768 tcg_temp_free(t0); 769 } 770 771 /* cmpb: PowerPC 2.05 specification */ 772 static void gen_cmpb(DisasContext *ctx) 773 { 774 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 775 cpu_gpr[rB(ctx->opcode)]); 776 } 777 778 /*** Integer arithmetic ***/ 779 780 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 781 TCGv arg1, TCGv arg2, int sub) 782 { 783 TCGv t0 = tcg_temp_new(); 784 785 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 786 tcg_gen_xor_tl(t0, arg1, arg2); 787 if (sub) { 788 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 789 } else { 790 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 791 } 792 tcg_temp_free(t0); 793 if (NARROW_MODE(ctx)) { 794 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 795 if (is_isa300(ctx)) { 796 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 797 } 798 } else { 799 if (is_isa300(ctx)) { 800 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 801 } 802 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 803 } 804 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 805 } 806 807 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 808 TCGv res, TCGv arg0, TCGv arg1, 809 TCGv ca32, int sub) 810 { 811 TCGv t0; 812 813 if (!is_isa300(ctx)) { 814 return; 815 } 816 817 t0 = tcg_temp_new(); 818 if (sub) { 819 tcg_gen_eqv_tl(t0, arg0, arg1); 820 } else { 821 tcg_gen_xor_tl(t0, arg0, arg1); 822 } 823 tcg_gen_xor_tl(t0, t0, res); 824 tcg_gen_extract_tl(ca32, t0, 32, 1); 825 tcg_temp_free(t0); 826 } 827 828 /* Common add function */ 829 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 830 TCGv arg2, TCGv ca, TCGv ca32, 831 bool add_ca, bool compute_ca, 832 bool compute_ov, bool compute_rc0) 833 { 834 TCGv t0 = ret; 835 836 if (compute_ca || compute_ov) { 837 t0 = tcg_temp_new(); 838 } 839 840 if (compute_ca) { 841 if (NARROW_MODE(ctx)) { 842 /* Caution: a non-obvious corner case of the spec is that we 843 must produce the *entire* 64-bit addition, but produce the 844 carry into bit 32. */ 845 TCGv t1 = tcg_temp_new(); 846 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 847 tcg_gen_add_tl(t0, arg1, arg2); 848 if (add_ca) { 849 tcg_gen_add_tl(t0, t0, ca); 850 } 851 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 852 tcg_temp_free(t1); 853 tcg_gen_extract_tl(ca, ca, 32, 1); 854 if (is_isa300(ctx)) { 855 tcg_gen_mov_tl(ca32, ca); 856 } 857 } else { 858 TCGv zero = tcg_const_tl(0); 859 if (add_ca) { 860 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 861 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 862 } else { 863 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 864 } 865 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 866 tcg_temp_free(zero); 867 } 868 } else { 869 tcg_gen_add_tl(t0, arg1, arg2); 870 if (add_ca) { 871 tcg_gen_add_tl(t0, t0, ca); 872 } 873 } 874 875 if (compute_ov) { 876 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 877 } 878 if (unlikely(compute_rc0)) { 879 gen_set_Rc0(ctx, t0); 880 } 881 882 if (t0 != ret) { 883 tcg_gen_mov_tl(ret, t0); 884 tcg_temp_free(t0); 885 } 886 } 887 /* Add functions with two operands */ 888 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 889 static void glue(gen_, name)(DisasContext *ctx) \ 890 { \ 891 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 892 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 893 ca, glue(ca, 32), \ 894 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 895 } 896 /* Add functions with one operand and one immediate */ 897 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 898 add_ca, compute_ca, compute_ov) \ 899 static void glue(gen_, name)(DisasContext *ctx) \ 900 { \ 901 TCGv t0 = tcg_const_tl(const_val); \ 902 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 903 cpu_gpr[rA(ctx->opcode)], t0, \ 904 ca, glue(ca, 32), \ 905 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 906 tcg_temp_free(t0); \ 907 } 908 909 /* add add. addo addo. */ 910 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 911 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 912 /* addc addc. addco addco. */ 913 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 914 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 915 /* adde adde. addeo addeo. */ 916 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 917 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 918 /* addme addme. addmeo addmeo. */ 919 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 920 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 921 /* addex */ 922 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 923 /* addze addze. addzeo addzeo.*/ 924 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 925 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 926 /* addi */ 927 static void gen_addi(DisasContext *ctx) 928 { 929 target_long simm = SIMM(ctx->opcode); 930 931 if (rA(ctx->opcode) == 0) { 932 /* li case */ 933 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 934 } else { 935 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 936 cpu_gpr[rA(ctx->opcode)], simm); 937 } 938 } 939 /* addic addic.*/ 940 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 941 { 942 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 943 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 944 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 945 tcg_temp_free(c); 946 } 947 948 static void gen_addic(DisasContext *ctx) 949 { 950 gen_op_addic(ctx, 0); 951 } 952 953 static void gen_addic_(DisasContext *ctx) 954 { 955 gen_op_addic(ctx, 1); 956 } 957 958 /* addis */ 959 static void gen_addis(DisasContext *ctx) 960 { 961 target_long simm = SIMM(ctx->opcode); 962 963 if (rA(ctx->opcode) == 0) { 964 /* lis case */ 965 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 966 } else { 967 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 968 cpu_gpr[rA(ctx->opcode)], simm << 16); 969 } 970 } 971 972 /* addpcis */ 973 static void gen_addpcis(DisasContext *ctx) 974 { 975 target_long d = DX(ctx->opcode); 976 977 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16)); 978 } 979 980 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 981 TCGv arg2, int sign, int compute_ov) 982 { 983 TCGv_i32 t0 = tcg_temp_new_i32(); 984 TCGv_i32 t1 = tcg_temp_new_i32(); 985 TCGv_i32 t2 = tcg_temp_new_i32(); 986 TCGv_i32 t3 = tcg_temp_new_i32(); 987 988 tcg_gen_trunc_tl_i32(t0, arg1); 989 tcg_gen_trunc_tl_i32(t1, arg2); 990 if (sign) { 991 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 992 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 993 tcg_gen_and_i32(t2, t2, t3); 994 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 995 tcg_gen_or_i32(t2, t2, t3); 996 tcg_gen_movi_i32(t3, 0); 997 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 998 tcg_gen_div_i32(t3, t0, t1); 999 tcg_gen_extu_i32_tl(ret, t3); 1000 } else { 1001 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1002 tcg_gen_movi_i32(t3, 0); 1003 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1004 tcg_gen_divu_i32(t3, t0, t1); 1005 tcg_gen_extu_i32_tl(ret, t3); 1006 } 1007 if (compute_ov) { 1008 tcg_gen_extu_i32_tl(cpu_ov, t2); 1009 if (is_isa300(ctx)) { 1010 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1011 } 1012 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1013 } 1014 tcg_temp_free_i32(t0); 1015 tcg_temp_free_i32(t1); 1016 tcg_temp_free_i32(t2); 1017 tcg_temp_free_i32(t3); 1018 1019 if (unlikely(Rc(ctx->opcode) != 0)) 1020 gen_set_Rc0(ctx, ret); 1021 } 1022 /* Div functions */ 1023 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1024 static void glue(gen_, name)(DisasContext *ctx) \ 1025 { \ 1026 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1027 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1028 sign, compute_ov); \ 1029 } 1030 /* divwu divwu. divwuo divwuo. */ 1031 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1032 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1033 /* divw divw. divwo divwo. */ 1034 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1035 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1036 1037 /* div[wd]eu[o][.] */ 1038 #define GEN_DIVE(name, hlpr, compute_ov) \ 1039 static void gen_##name(DisasContext *ctx) \ 1040 { \ 1041 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1042 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1043 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1044 tcg_temp_free_i32(t0); \ 1045 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1046 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1047 } \ 1048 } 1049 1050 GEN_DIVE(divweu, divweu, 0); 1051 GEN_DIVE(divweuo, divweu, 1); 1052 GEN_DIVE(divwe, divwe, 0); 1053 GEN_DIVE(divweo, divwe, 1); 1054 1055 #if defined(TARGET_PPC64) 1056 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1057 TCGv arg2, int sign, int compute_ov) 1058 { 1059 TCGv_i64 t0 = tcg_temp_new_i64(); 1060 TCGv_i64 t1 = tcg_temp_new_i64(); 1061 TCGv_i64 t2 = tcg_temp_new_i64(); 1062 TCGv_i64 t3 = tcg_temp_new_i64(); 1063 1064 tcg_gen_mov_i64(t0, arg1); 1065 tcg_gen_mov_i64(t1, arg2); 1066 if (sign) { 1067 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1068 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1069 tcg_gen_and_i64(t2, t2, t3); 1070 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1071 tcg_gen_or_i64(t2, t2, t3); 1072 tcg_gen_movi_i64(t3, 0); 1073 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1074 tcg_gen_div_i64(ret, t0, t1); 1075 } else { 1076 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1077 tcg_gen_movi_i64(t3, 0); 1078 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1079 tcg_gen_divu_i64(ret, t0, t1); 1080 } 1081 if (compute_ov) { 1082 tcg_gen_mov_tl(cpu_ov, t2); 1083 if (is_isa300(ctx)) { 1084 tcg_gen_mov_tl(cpu_ov32, t2); 1085 } 1086 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1087 } 1088 tcg_temp_free_i64(t0); 1089 tcg_temp_free_i64(t1); 1090 tcg_temp_free_i64(t2); 1091 tcg_temp_free_i64(t3); 1092 1093 if (unlikely(Rc(ctx->opcode) != 0)) 1094 gen_set_Rc0(ctx, ret); 1095 } 1096 1097 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1098 static void glue(gen_, name)(DisasContext *ctx) \ 1099 { \ 1100 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1101 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1102 sign, compute_ov); \ 1103 } 1104 /* divdu divdu. divduo divduo. */ 1105 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1106 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1107 /* divd divd. divdo divdo. */ 1108 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1109 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1110 1111 GEN_DIVE(divdeu, divdeu, 0); 1112 GEN_DIVE(divdeuo, divdeu, 1); 1113 GEN_DIVE(divde, divde, 0); 1114 GEN_DIVE(divdeo, divde, 1); 1115 #endif 1116 1117 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1118 TCGv arg2, int sign) 1119 { 1120 TCGv_i32 t0 = tcg_temp_new_i32(); 1121 TCGv_i32 t1 = tcg_temp_new_i32(); 1122 1123 tcg_gen_trunc_tl_i32(t0, arg1); 1124 tcg_gen_trunc_tl_i32(t1, arg2); 1125 if (sign) { 1126 TCGv_i32 t2 = tcg_temp_new_i32(); 1127 TCGv_i32 t3 = tcg_temp_new_i32(); 1128 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1129 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1130 tcg_gen_and_i32(t2, t2, t3); 1131 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1132 tcg_gen_or_i32(t2, t2, t3); 1133 tcg_gen_movi_i32(t3, 0); 1134 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1135 tcg_gen_rem_i32(t3, t0, t1); 1136 tcg_gen_ext_i32_tl(ret, t3); 1137 tcg_temp_free_i32(t2); 1138 tcg_temp_free_i32(t3); 1139 } else { 1140 TCGv_i32 t2 = tcg_const_i32(1); 1141 TCGv_i32 t3 = tcg_const_i32(0); 1142 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1143 tcg_gen_remu_i32(t3, t0, t1); 1144 tcg_gen_extu_i32_tl(ret, t3); 1145 tcg_temp_free_i32(t2); 1146 tcg_temp_free_i32(t3); 1147 } 1148 tcg_temp_free_i32(t0); 1149 tcg_temp_free_i32(t1); 1150 } 1151 1152 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1153 static void glue(gen_, name)(DisasContext *ctx) \ 1154 { \ 1155 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1156 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1157 sign); \ 1158 } 1159 1160 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1161 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1162 1163 #if defined(TARGET_PPC64) 1164 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1165 TCGv arg2, int sign) 1166 { 1167 TCGv_i64 t0 = tcg_temp_new_i64(); 1168 TCGv_i64 t1 = tcg_temp_new_i64(); 1169 1170 tcg_gen_mov_i64(t0, arg1); 1171 tcg_gen_mov_i64(t1, arg2); 1172 if (sign) { 1173 TCGv_i64 t2 = tcg_temp_new_i64(); 1174 TCGv_i64 t3 = tcg_temp_new_i64(); 1175 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1176 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1177 tcg_gen_and_i64(t2, t2, t3); 1178 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1179 tcg_gen_or_i64(t2, t2, t3); 1180 tcg_gen_movi_i64(t3, 0); 1181 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1182 tcg_gen_rem_i64(ret, t0, t1); 1183 tcg_temp_free_i64(t2); 1184 tcg_temp_free_i64(t3); 1185 } else { 1186 TCGv_i64 t2 = tcg_const_i64(1); 1187 TCGv_i64 t3 = tcg_const_i64(0); 1188 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1189 tcg_gen_remu_i64(ret, t0, t1); 1190 tcg_temp_free_i64(t2); 1191 tcg_temp_free_i64(t3); 1192 } 1193 tcg_temp_free_i64(t0); 1194 tcg_temp_free_i64(t1); 1195 } 1196 1197 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1198 static void glue(gen_, name)(DisasContext *ctx) \ 1199 { \ 1200 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1201 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1202 sign); \ 1203 } 1204 1205 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1206 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1207 #endif 1208 1209 /* mulhw mulhw. */ 1210 static void gen_mulhw(DisasContext *ctx) 1211 { 1212 TCGv_i32 t0 = tcg_temp_new_i32(); 1213 TCGv_i32 t1 = tcg_temp_new_i32(); 1214 1215 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1216 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1217 tcg_gen_muls2_i32(t0, t1, t0, t1); 1218 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1219 tcg_temp_free_i32(t0); 1220 tcg_temp_free_i32(t1); 1221 if (unlikely(Rc(ctx->opcode) != 0)) 1222 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1223 } 1224 1225 /* mulhwu mulhwu. */ 1226 static void gen_mulhwu(DisasContext *ctx) 1227 { 1228 TCGv_i32 t0 = tcg_temp_new_i32(); 1229 TCGv_i32 t1 = tcg_temp_new_i32(); 1230 1231 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1232 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1233 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1234 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1235 tcg_temp_free_i32(t0); 1236 tcg_temp_free_i32(t1); 1237 if (unlikely(Rc(ctx->opcode) != 0)) 1238 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1239 } 1240 1241 /* mullw mullw. */ 1242 static void gen_mullw(DisasContext *ctx) 1243 { 1244 #if defined(TARGET_PPC64) 1245 TCGv_i64 t0, t1; 1246 t0 = tcg_temp_new_i64(); 1247 t1 = tcg_temp_new_i64(); 1248 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1249 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1250 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1251 tcg_temp_free(t0); 1252 tcg_temp_free(t1); 1253 #else 1254 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1255 cpu_gpr[rB(ctx->opcode)]); 1256 #endif 1257 if (unlikely(Rc(ctx->opcode) != 0)) 1258 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1259 } 1260 1261 /* mullwo mullwo. */ 1262 static void gen_mullwo(DisasContext *ctx) 1263 { 1264 TCGv_i32 t0 = tcg_temp_new_i32(); 1265 TCGv_i32 t1 = tcg_temp_new_i32(); 1266 1267 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1268 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1269 tcg_gen_muls2_i32(t0, t1, t0, t1); 1270 #if defined(TARGET_PPC64) 1271 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1272 #else 1273 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1274 #endif 1275 1276 tcg_gen_sari_i32(t0, t0, 31); 1277 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1278 tcg_gen_extu_i32_tl(cpu_ov, t0); 1279 if (is_isa300(ctx)) { 1280 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1281 } 1282 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1283 1284 tcg_temp_free_i32(t0); 1285 tcg_temp_free_i32(t1); 1286 if (unlikely(Rc(ctx->opcode) != 0)) 1287 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1288 } 1289 1290 /* mulli */ 1291 static void gen_mulli(DisasContext *ctx) 1292 { 1293 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1294 SIMM(ctx->opcode)); 1295 } 1296 1297 #if defined(TARGET_PPC64) 1298 /* mulhd mulhd. */ 1299 static void gen_mulhd(DisasContext *ctx) 1300 { 1301 TCGv lo = tcg_temp_new(); 1302 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1303 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1304 tcg_temp_free(lo); 1305 if (unlikely(Rc(ctx->opcode) != 0)) { 1306 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1307 } 1308 } 1309 1310 /* mulhdu mulhdu. */ 1311 static void gen_mulhdu(DisasContext *ctx) 1312 { 1313 TCGv lo = tcg_temp_new(); 1314 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1315 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1316 tcg_temp_free(lo); 1317 if (unlikely(Rc(ctx->opcode) != 0)) { 1318 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1319 } 1320 } 1321 1322 /* mulld mulld. */ 1323 static void gen_mulld(DisasContext *ctx) 1324 { 1325 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1326 cpu_gpr[rB(ctx->opcode)]); 1327 if (unlikely(Rc(ctx->opcode) != 0)) 1328 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1329 } 1330 1331 /* mulldo mulldo. */ 1332 static void gen_mulldo(DisasContext *ctx) 1333 { 1334 TCGv_i64 t0 = tcg_temp_new_i64(); 1335 TCGv_i64 t1 = tcg_temp_new_i64(); 1336 1337 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1338 cpu_gpr[rB(ctx->opcode)]); 1339 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1340 1341 tcg_gen_sari_i64(t0, t0, 63); 1342 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1343 if (is_isa300(ctx)) { 1344 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1345 } 1346 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1347 1348 tcg_temp_free_i64(t0); 1349 tcg_temp_free_i64(t1); 1350 1351 if (unlikely(Rc(ctx->opcode) != 0)) { 1352 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1353 } 1354 } 1355 #endif 1356 1357 /* Common subf function */ 1358 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1359 TCGv arg2, bool add_ca, bool compute_ca, 1360 bool compute_ov, bool compute_rc0) 1361 { 1362 TCGv t0 = ret; 1363 1364 if (compute_ca || compute_ov) { 1365 t0 = tcg_temp_new(); 1366 } 1367 1368 if (compute_ca) { 1369 /* dest = ~arg1 + arg2 [+ ca]. */ 1370 if (NARROW_MODE(ctx)) { 1371 /* Caution: a non-obvious corner case of the spec is that we 1372 must produce the *entire* 64-bit addition, but produce the 1373 carry into bit 32. */ 1374 TCGv inv1 = tcg_temp_new(); 1375 TCGv t1 = tcg_temp_new(); 1376 tcg_gen_not_tl(inv1, arg1); 1377 if (add_ca) { 1378 tcg_gen_add_tl(t0, arg2, cpu_ca); 1379 } else { 1380 tcg_gen_addi_tl(t0, arg2, 1); 1381 } 1382 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1383 tcg_gen_add_tl(t0, t0, inv1); 1384 tcg_temp_free(inv1); 1385 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1386 tcg_temp_free(t1); 1387 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 1388 if (is_isa300(ctx)) { 1389 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 1390 } 1391 } else if (add_ca) { 1392 TCGv zero, inv1 = tcg_temp_new(); 1393 tcg_gen_not_tl(inv1, arg1); 1394 zero = tcg_const_tl(0); 1395 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1396 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1397 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 1398 tcg_temp_free(zero); 1399 tcg_temp_free(inv1); 1400 } else { 1401 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1402 tcg_gen_sub_tl(t0, arg2, arg1); 1403 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 1404 } 1405 } else if (add_ca) { 1406 /* Since we're ignoring carry-out, we can simplify the 1407 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1408 tcg_gen_sub_tl(t0, arg2, arg1); 1409 tcg_gen_add_tl(t0, t0, cpu_ca); 1410 tcg_gen_subi_tl(t0, t0, 1); 1411 } else { 1412 tcg_gen_sub_tl(t0, arg2, arg1); 1413 } 1414 1415 if (compute_ov) { 1416 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1417 } 1418 if (unlikely(compute_rc0)) { 1419 gen_set_Rc0(ctx, t0); 1420 } 1421 1422 if (t0 != ret) { 1423 tcg_gen_mov_tl(ret, t0); 1424 tcg_temp_free(t0); 1425 } 1426 } 1427 /* Sub functions with Two operands functions */ 1428 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1429 static void glue(gen_, name)(DisasContext *ctx) \ 1430 { \ 1431 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1432 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1433 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1434 } 1435 /* Sub functions with one operand and one immediate */ 1436 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1437 add_ca, compute_ca, compute_ov) \ 1438 static void glue(gen_, name)(DisasContext *ctx) \ 1439 { \ 1440 TCGv t0 = tcg_const_tl(const_val); \ 1441 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1442 cpu_gpr[rA(ctx->opcode)], t0, \ 1443 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1444 tcg_temp_free(t0); \ 1445 } 1446 /* subf subf. subfo subfo. */ 1447 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1448 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1449 /* subfc subfc. subfco subfco. */ 1450 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1451 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1452 /* subfe subfe. subfeo subfo. */ 1453 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1454 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1455 /* subfme subfme. subfmeo subfmeo. */ 1456 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1457 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1458 /* subfze subfze. subfzeo subfzeo.*/ 1459 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1460 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1461 1462 /* subfic */ 1463 static void gen_subfic(DisasContext *ctx) 1464 { 1465 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1466 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1467 c, 0, 1, 0, 0); 1468 tcg_temp_free(c); 1469 } 1470 1471 /* neg neg. nego nego. */ 1472 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1473 { 1474 TCGv zero = tcg_const_tl(0); 1475 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1476 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1477 tcg_temp_free(zero); 1478 } 1479 1480 static void gen_neg(DisasContext *ctx) 1481 { 1482 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 1483 if (unlikely(Rc(ctx->opcode))) { 1484 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1485 } 1486 } 1487 1488 static void gen_nego(DisasContext *ctx) 1489 { 1490 gen_op_arith_neg(ctx, 1); 1491 } 1492 1493 /*** Integer logical ***/ 1494 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1495 static void glue(gen_, name)(DisasContext *ctx) \ 1496 { \ 1497 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1498 cpu_gpr[rB(ctx->opcode)]); \ 1499 if (unlikely(Rc(ctx->opcode) != 0)) \ 1500 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1501 } 1502 1503 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1504 static void glue(gen_, name)(DisasContext *ctx) \ 1505 { \ 1506 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1507 if (unlikely(Rc(ctx->opcode) != 0)) \ 1508 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1509 } 1510 1511 /* and & and. */ 1512 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1513 /* andc & andc. */ 1514 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1515 1516 /* andi. */ 1517 static void gen_andi_(DisasContext *ctx) 1518 { 1519 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1520 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1521 } 1522 1523 /* andis. */ 1524 static void gen_andis_(DisasContext *ctx) 1525 { 1526 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1527 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1528 } 1529 1530 /* cntlzw */ 1531 static void gen_cntlzw(DisasContext *ctx) 1532 { 1533 TCGv_i32 t = tcg_temp_new_i32(); 1534 1535 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1536 tcg_gen_clzi_i32(t, t, 32); 1537 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1538 tcg_temp_free_i32(t); 1539 1540 if (unlikely(Rc(ctx->opcode) != 0)) 1541 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1542 } 1543 1544 /* cnttzw */ 1545 static void gen_cnttzw(DisasContext *ctx) 1546 { 1547 TCGv_i32 t = tcg_temp_new_i32(); 1548 1549 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1550 tcg_gen_ctzi_i32(t, t, 32); 1551 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1552 tcg_temp_free_i32(t); 1553 1554 if (unlikely(Rc(ctx->opcode) != 0)) { 1555 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1556 } 1557 } 1558 1559 /* eqv & eqv. */ 1560 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1561 /* extsb & extsb. */ 1562 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1563 /* extsh & extsh. */ 1564 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1565 /* nand & nand. */ 1566 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1567 /* nor & nor. */ 1568 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1569 1570 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1571 static void gen_pause(DisasContext *ctx) 1572 { 1573 TCGv_i32 t0 = tcg_const_i32(0); 1574 tcg_gen_st_i32(t0, cpu_env, 1575 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1576 tcg_temp_free_i32(t0); 1577 1578 /* Stop translation, this gives other CPUs a chance to run */ 1579 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 1580 } 1581 #endif /* defined(TARGET_PPC64) */ 1582 1583 /* or & or. */ 1584 static void gen_or(DisasContext *ctx) 1585 { 1586 int rs, ra, rb; 1587 1588 rs = rS(ctx->opcode); 1589 ra = rA(ctx->opcode); 1590 rb = rB(ctx->opcode); 1591 /* Optimisation for mr. ri case */ 1592 if (rs != ra || rs != rb) { 1593 if (rs != rb) 1594 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1595 else 1596 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1597 if (unlikely(Rc(ctx->opcode) != 0)) 1598 gen_set_Rc0(ctx, cpu_gpr[ra]); 1599 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1600 gen_set_Rc0(ctx, cpu_gpr[rs]); 1601 #if defined(TARGET_PPC64) 1602 } else if (rs != 0) { /* 0 is nop */ 1603 int prio = 0; 1604 1605 switch (rs) { 1606 case 1: 1607 /* Set process priority to low */ 1608 prio = 2; 1609 break; 1610 case 6: 1611 /* Set process priority to medium-low */ 1612 prio = 3; 1613 break; 1614 case 2: 1615 /* Set process priority to normal */ 1616 prio = 4; 1617 break; 1618 #if !defined(CONFIG_USER_ONLY) 1619 case 31: 1620 if (!ctx->pr) { 1621 /* Set process priority to very low */ 1622 prio = 1; 1623 } 1624 break; 1625 case 5: 1626 if (!ctx->pr) { 1627 /* Set process priority to medium-hight */ 1628 prio = 5; 1629 } 1630 break; 1631 case 3: 1632 if (!ctx->pr) { 1633 /* Set process priority to high */ 1634 prio = 6; 1635 } 1636 break; 1637 case 7: 1638 if (ctx->hv && !ctx->pr) { 1639 /* Set process priority to very high */ 1640 prio = 7; 1641 } 1642 break; 1643 #endif 1644 default: 1645 break; 1646 } 1647 if (prio) { 1648 TCGv t0 = tcg_temp_new(); 1649 gen_load_spr(t0, SPR_PPR); 1650 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1651 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1652 gen_store_spr(SPR_PPR, t0); 1653 tcg_temp_free(t0); 1654 } 1655 #if !defined(CONFIG_USER_ONLY) 1656 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1657 * CPU and the kernel hangs. This applies to all encodings other 1658 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1659 * and all currently undefined. 1660 */ 1661 gen_pause(ctx); 1662 #endif 1663 #endif 1664 } 1665 } 1666 /* orc & orc. */ 1667 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1668 1669 /* xor & xor. */ 1670 static void gen_xor(DisasContext *ctx) 1671 { 1672 /* Optimisation for "set to zero" case */ 1673 if (rS(ctx->opcode) != rB(ctx->opcode)) 1674 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1675 else 1676 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1677 if (unlikely(Rc(ctx->opcode) != 0)) 1678 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1679 } 1680 1681 /* ori */ 1682 static void gen_ori(DisasContext *ctx) 1683 { 1684 target_ulong uimm = UIMM(ctx->opcode); 1685 1686 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1687 return; 1688 } 1689 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1690 } 1691 1692 /* oris */ 1693 static void gen_oris(DisasContext *ctx) 1694 { 1695 target_ulong uimm = UIMM(ctx->opcode); 1696 1697 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1698 /* NOP */ 1699 return; 1700 } 1701 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1702 } 1703 1704 /* xori */ 1705 static void gen_xori(DisasContext *ctx) 1706 { 1707 target_ulong uimm = UIMM(ctx->opcode); 1708 1709 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1710 /* NOP */ 1711 return; 1712 } 1713 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1714 } 1715 1716 /* xoris */ 1717 static void gen_xoris(DisasContext *ctx) 1718 { 1719 target_ulong uimm = UIMM(ctx->opcode); 1720 1721 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1722 /* NOP */ 1723 return; 1724 } 1725 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1726 } 1727 1728 /* popcntb : PowerPC 2.03 specification */ 1729 static void gen_popcntb(DisasContext *ctx) 1730 { 1731 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1732 } 1733 1734 static void gen_popcntw(DisasContext *ctx) 1735 { 1736 #if defined(TARGET_PPC64) 1737 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1738 #else 1739 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1740 #endif 1741 } 1742 1743 #if defined(TARGET_PPC64) 1744 /* popcntd: PowerPC 2.06 specification */ 1745 static void gen_popcntd(DisasContext *ctx) 1746 { 1747 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1748 } 1749 #endif 1750 1751 /* prtyw: PowerPC 2.05 specification */ 1752 static void gen_prtyw(DisasContext *ctx) 1753 { 1754 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1755 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1756 TCGv t0 = tcg_temp_new(); 1757 tcg_gen_shri_tl(t0, rs, 16); 1758 tcg_gen_xor_tl(ra, rs, t0); 1759 tcg_gen_shri_tl(t0, ra, 8); 1760 tcg_gen_xor_tl(ra, ra, t0); 1761 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1762 tcg_temp_free(t0); 1763 } 1764 1765 #if defined(TARGET_PPC64) 1766 /* prtyd: PowerPC 2.05 specification */ 1767 static void gen_prtyd(DisasContext *ctx) 1768 { 1769 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1770 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1771 TCGv t0 = tcg_temp_new(); 1772 tcg_gen_shri_tl(t0, rs, 32); 1773 tcg_gen_xor_tl(ra, rs, t0); 1774 tcg_gen_shri_tl(t0, ra, 16); 1775 tcg_gen_xor_tl(ra, ra, t0); 1776 tcg_gen_shri_tl(t0, ra, 8); 1777 tcg_gen_xor_tl(ra, ra, t0); 1778 tcg_gen_andi_tl(ra, ra, 1); 1779 tcg_temp_free(t0); 1780 } 1781 #endif 1782 1783 #if defined(TARGET_PPC64) 1784 /* bpermd */ 1785 static void gen_bpermd(DisasContext *ctx) 1786 { 1787 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1788 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1789 } 1790 #endif 1791 1792 #if defined(TARGET_PPC64) 1793 /* extsw & extsw. */ 1794 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1795 1796 /* cntlzd */ 1797 static void gen_cntlzd(DisasContext *ctx) 1798 { 1799 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1800 if (unlikely(Rc(ctx->opcode) != 0)) 1801 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1802 } 1803 1804 /* cnttzd */ 1805 static void gen_cnttzd(DisasContext *ctx) 1806 { 1807 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1808 if (unlikely(Rc(ctx->opcode) != 0)) { 1809 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1810 } 1811 } 1812 1813 /* darn */ 1814 static void gen_darn(DisasContext *ctx) 1815 { 1816 int l = L(ctx->opcode); 1817 1818 if (l == 0) { 1819 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1820 } else if (l <= 2) { 1821 /* Return 64-bit random for both CRN and RRN */ 1822 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1823 } else { 1824 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1825 } 1826 } 1827 #endif 1828 1829 /*** Integer rotate ***/ 1830 1831 /* rlwimi & rlwimi. */ 1832 static void gen_rlwimi(DisasContext *ctx) 1833 { 1834 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1835 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1836 uint32_t sh = SH(ctx->opcode); 1837 uint32_t mb = MB(ctx->opcode); 1838 uint32_t me = ME(ctx->opcode); 1839 1840 if (sh == (31-me) && mb <= me) { 1841 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1842 } else { 1843 target_ulong mask; 1844 TCGv t1; 1845 1846 #if defined(TARGET_PPC64) 1847 mb += 32; 1848 me += 32; 1849 #endif 1850 mask = MASK(mb, me); 1851 1852 t1 = tcg_temp_new(); 1853 if (mask <= 0xffffffffu) { 1854 TCGv_i32 t0 = tcg_temp_new_i32(); 1855 tcg_gen_trunc_tl_i32(t0, t_rs); 1856 tcg_gen_rotli_i32(t0, t0, sh); 1857 tcg_gen_extu_i32_tl(t1, t0); 1858 tcg_temp_free_i32(t0); 1859 } else { 1860 #if defined(TARGET_PPC64) 1861 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1862 tcg_gen_rotli_i64(t1, t1, sh); 1863 #else 1864 g_assert_not_reached(); 1865 #endif 1866 } 1867 1868 tcg_gen_andi_tl(t1, t1, mask); 1869 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1870 tcg_gen_or_tl(t_ra, t_ra, t1); 1871 tcg_temp_free(t1); 1872 } 1873 if (unlikely(Rc(ctx->opcode) != 0)) { 1874 gen_set_Rc0(ctx, t_ra); 1875 } 1876 } 1877 1878 /* rlwinm & rlwinm. */ 1879 static void gen_rlwinm(DisasContext *ctx) 1880 { 1881 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1882 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1883 int sh = SH(ctx->opcode); 1884 int mb = MB(ctx->opcode); 1885 int me = ME(ctx->opcode); 1886 int len = me - mb + 1; 1887 int rsh = (32 - sh) & 31; 1888 1889 if (sh != 0 && len > 0 && me == (31 - sh)) { 1890 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 1891 } else if (me == 31 && rsh + len <= 32) { 1892 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 1893 } else { 1894 target_ulong mask; 1895 #if defined(TARGET_PPC64) 1896 mb += 32; 1897 me += 32; 1898 #endif 1899 mask = MASK(mb, me); 1900 if (sh == 0) { 1901 tcg_gen_andi_tl(t_ra, t_rs, mask); 1902 } else if (mask <= 0xffffffffu) { 1903 TCGv_i32 t0 = tcg_temp_new_i32(); 1904 tcg_gen_trunc_tl_i32(t0, t_rs); 1905 tcg_gen_rotli_i32(t0, t0, sh); 1906 tcg_gen_andi_i32(t0, t0, mask); 1907 tcg_gen_extu_i32_tl(t_ra, t0); 1908 tcg_temp_free_i32(t0); 1909 } else { 1910 #if defined(TARGET_PPC64) 1911 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1912 tcg_gen_rotli_i64(t_ra, t_ra, sh); 1913 tcg_gen_andi_i64(t_ra, t_ra, mask); 1914 #else 1915 g_assert_not_reached(); 1916 #endif 1917 } 1918 } 1919 if (unlikely(Rc(ctx->opcode) != 0)) { 1920 gen_set_Rc0(ctx, t_ra); 1921 } 1922 } 1923 1924 /* rlwnm & rlwnm. */ 1925 static void gen_rlwnm(DisasContext *ctx) 1926 { 1927 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1928 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1929 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 1930 uint32_t mb = MB(ctx->opcode); 1931 uint32_t me = ME(ctx->opcode); 1932 target_ulong mask; 1933 1934 #if defined(TARGET_PPC64) 1935 mb += 32; 1936 me += 32; 1937 #endif 1938 mask = MASK(mb, me); 1939 1940 if (mask <= 0xffffffffu) { 1941 TCGv_i32 t0 = tcg_temp_new_i32(); 1942 TCGv_i32 t1 = tcg_temp_new_i32(); 1943 tcg_gen_trunc_tl_i32(t0, t_rb); 1944 tcg_gen_trunc_tl_i32(t1, t_rs); 1945 tcg_gen_andi_i32(t0, t0, 0x1f); 1946 tcg_gen_rotl_i32(t1, t1, t0); 1947 tcg_gen_extu_i32_tl(t_ra, t1); 1948 tcg_temp_free_i32(t0); 1949 tcg_temp_free_i32(t1); 1950 } else { 1951 #if defined(TARGET_PPC64) 1952 TCGv_i64 t0 = tcg_temp_new_i64(); 1953 tcg_gen_andi_i64(t0, t_rb, 0x1f); 1954 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1955 tcg_gen_rotl_i64(t_ra, t_ra, t0); 1956 tcg_temp_free_i64(t0); 1957 #else 1958 g_assert_not_reached(); 1959 #endif 1960 } 1961 1962 tcg_gen_andi_tl(t_ra, t_ra, mask); 1963 1964 if (unlikely(Rc(ctx->opcode) != 0)) { 1965 gen_set_Rc0(ctx, t_ra); 1966 } 1967 } 1968 1969 #if defined(TARGET_PPC64) 1970 #define GEN_PPC64_R2(name, opc1, opc2) \ 1971 static void glue(gen_, name##0)(DisasContext *ctx) \ 1972 { \ 1973 gen_##name(ctx, 0); \ 1974 } \ 1975 \ 1976 static void glue(gen_, name##1)(DisasContext *ctx) \ 1977 { \ 1978 gen_##name(ctx, 1); \ 1979 } 1980 #define GEN_PPC64_R4(name, opc1, opc2) \ 1981 static void glue(gen_, name##0)(DisasContext *ctx) \ 1982 { \ 1983 gen_##name(ctx, 0, 0); \ 1984 } \ 1985 \ 1986 static void glue(gen_, name##1)(DisasContext *ctx) \ 1987 { \ 1988 gen_##name(ctx, 0, 1); \ 1989 } \ 1990 \ 1991 static void glue(gen_, name##2)(DisasContext *ctx) \ 1992 { \ 1993 gen_##name(ctx, 1, 0); \ 1994 } \ 1995 \ 1996 static void glue(gen_, name##3)(DisasContext *ctx) \ 1997 { \ 1998 gen_##name(ctx, 1, 1); \ 1999 } 2000 2001 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2002 { 2003 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2004 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2005 int len = me - mb + 1; 2006 int rsh = (64 - sh) & 63; 2007 2008 if (sh != 0 && len > 0 && me == (63 - sh)) { 2009 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2010 } else if (me == 63 && rsh + len <= 64) { 2011 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2012 } else { 2013 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2014 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2015 } 2016 if (unlikely(Rc(ctx->opcode) != 0)) { 2017 gen_set_Rc0(ctx, t_ra); 2018 } 2019 } 2020 2021 /* rldicl - rldicl. */ 2022 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2023 { 2024 uint32_t sh, mb; 2025 2026 sh = SH(ctx->opcode) | (shn << 5); 2027 mb = MB(ctx->opcode) | (mbn << 5); 2028 gen_rldinm(ctx, mb, 63, sh); 2029 } 2030 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2031 2032 /* rldicr - rldicr. */ 2033 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2034 { 2035 uint32_t sh, me; 2036 2037 sh = SH(ctx->opcode) | (shn << 5); 2038 me = MB(ctx->opcode) | (men << 5); 2039 gen_rldinm(ctx, 0, me, sh); 2040 } 2041 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2042 2043 /* rldic - rldic. */ 2044 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2045 { 2046 uint32_t sh, mb; 2047 2048 sh = SH(ctx->opcode) | (shn << 5); 2049 mb = MB(ctx->opcode) | (mbn << 5); 2050 gen_rldinm(ctx, mb, 63 - sh, sh); 2051 } 2052 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2053 2054 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2055 { 2056 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2057 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2058 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2059 TCGv t0; 2060 2061 t0 = tcg_temp_new(); 2062 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2063 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2064 tcg_temp_free(t0); 2065 2066 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2067 if (unlikely(Rc(ctx->opcode) != 0)) { 2068 gen_set_Rc0(ctx, t_ra); 2069 } 2070 } 2071 2072 /* rldcl - rldcl. */ 2073 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2074 { 2075 uint32_t mb; 2076 2077 mb = MB(ctx->opcode) | (mbn << 5); 2078 gen_rldnm(ctx, mb, 63); 2079 } 2080 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2081 2082 /* rldcr - rldcr. */ 2083 static inline void gen_rldcr(DisasContext *ctx, int men) 2084 { 2085 uint32_t me; 2086 2087 me = MB(ctx->opcode) | (men << 5); 2088 gen_rldnm(ctx, 0, me); 2089 } 2090 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2091 2092 /* rldimi - rldimi. */ 2093 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2094 { 2095 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2096 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2097 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2098 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2099 uint32_t me = 63 - sh; 2100 2101 if (mb <= me) { 2102 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2103 } else { 2104 target_ulong mask = MASK(mb, me); 2105 TCGv t1 = tcg_temp_new(); 2106 2107 tcg_gen_rotli_tl(t1, t_rs, sh); 2108 tcg_gen_andi_tl(t1, t1, mask); 2109 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2110 tcg_gen_or_tl(t_ra, t_ra, t1); 2111 tcg_temp_free(t1); 2112 } 2113 if (unlikely(Rc(ctx->opcode) != 0)) { 2114 gen_set_Rc0(ctx, t_ra); 2115 } 2116 } 2117 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2118 #endif 2119 2120 /*** Integer shift ***/ 2121 2122 /* slw & slw. */ 2123 static void gen_slw(DisasContext *ctx) 2124 { 2125 TCGv t0, t1; 2126 2127 t0 = tcg_temp_new(); 2128 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2129 #if defined(TARGET_PPC64) 2130 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2131 tcg_gen_sari_tl(t0, t0, 0x3f); 2132 #else 2133 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2134 tcg_gen_sari_tl(t0, t0, 0x1f); 2135 #endif 2136 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2137 t1 = tcg_temp_new(); 2138 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2139 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2140 tcg_temp_free(t1); 2141 tcg_temp_free(t0); 2142 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2143 if (unlikely(Rc(ctx->opcode) != 0)) 2144 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2145 } 2146 2147 /* sraw & sraw. */ 2148 static void gen_sraw(DisasContext *ctx) 2149 { 2150 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2151 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2152 if (unlikely(Rc(ctx->opcode) != 0)) 2153 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2154 } 2155 2156 /* srawi & srawi. */ 2157 static void gen_srawi(DisasContext *ctx) 2158 { 2159 int sh = SH(ctx->opcode); 2160 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2161 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2162 if (sh == 0) { 2163 tcg_gen_ext32s_tl(dst, src); 2164 tcg_gen_movi_tl(cpu_ca, 0); 2165 if (is_isa300(ctx)) { 2166 tcg_gen_movi_tl(cpu_ca32, 0); 2167 } 2168 } else { 2169 TCGv t0; 2170 tcg_gen_ext32s_tl(dst, src); 2171 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2172 t0 = tcg_temp_new(); 2173 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2174 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2175 tcg_temp_free(t0); 2176 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2177 if (is_isa300(ctx)) { 2178 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2179 } 2180 tcg_gen_sari_tl(dst, dst, sh); 2181 } 2182 if (unlikely(Rc(ctx->opcode) != 0)) { 2183 gen_set_Rc0(ctx, dst); 2184 } 2185 } 2186 2187 /* srw & srw. */ 2188 static void gen_srw(DisasContext *ctx) 2189 { 2190 TCGv t0, t1; 2191 2192 t0 = tcg_temp_new(); 2193 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2194 #if defined(TARGET_PPC64) 2195 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2196 tcg_gen_sari_tl(t0, t0, 0x3f); 2197 #else 2198 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2199 tcg_gen_sari_tl(t0, t0, 0x1f); 2200 #endif 2201 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2202 tcg_gen_ext32u_tl(t0, t0); 2203 t1 = tcg_temp_new(); 2204 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2205 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2206 tcg_temp_free(t1); 2207 tcg_temp_free(t0); 2208 if (unlikely(Rc(ctx->opcode) != 0)) 2209 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2210 } 2211 2212 #if defined(TARGET_PPC64) 2213 /* sld & sld. */ 2214 static void gen_sld(DisasContext *ctx) 2215 { 2216 TCGv t0, t1; 2217 2218 t0 = tcg_temp_new(); 2219 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2220 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2221 tcg_gen_sari_tl(t0, t0, 0x3f); 2222 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2223 t1 = tcg_temp_new(); 2224 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2225 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2226 tcg_temp_free(t1); 2227 tcg_temp_free(t0); 2228 if (unlikely(Rc(ctx->opcode) != 0)) 2229 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2230 } 2231 2232 /* srad & srad. */ 2233 static void gen_srad(DisasContext *ctx) 2234 { 2235 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2236 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2237 if (unlikely(Rc(ctx->opcode) != 0)) 2238 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2239 } 2240 /* sradi & sradi. */ 2241 static inline void gen_sradi(DisasContext *ctx, int n) 2242 { 2243 int sh = SH(ctx->opcode) + (n << 5); 2244 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2245 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2246 if (sh == 0) { 2247 tcg_gen_mov_tl(dst, src); 2248 tcg_gen_movi_tl(cpu_ca, 0); 2249 if (is_isa300(ctx)) { 2250 tcg_gen_movi_tl(cpu_ca32, 0); 2251 } 2252 } else { 2253 TCGv t0; 2254 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2255 t0 = tcg_temp_new(); 2256 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2257 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2258 tcg_temp_free(t0); 2259 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2260 if (is_isa300(ctx)) { 2261 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2262 } 2263 tcg_gen_sari_tl(dst, src, sh); 2264 } 2265 if (unlikely(Rc(ctx->opcode) != 0)) { 2266 gen_set_Rc0(ctx, dst); 2267 } 2268 } 2269 2270 static void gen_sradi0(DisasContext *ctx) 2271 { 2272 gen_sradi(ctx, 0); 2273 } 2274 2275 static void gen_sradi1(DisasContext *ctx) 2276 { 2277 gen_sradi(ctx, 1); 2278 } 2279 2280 /* extswsli & extswsli. */ 2281 static inline void gen_extswsli(DisasContext *ctx, int n) 2282 { 2283 int sh = SH(ctx->opcode) + (n << 5); 2284 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2285 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2286 2287 tcg_gen_ext32s_tl(dst, src); 2288 tcg_gen_shli_tl(dst, dst, sh); 2289 if (unlikely(Rc(ctx->opcode) != 0)) { 2290 gen_set_Rc0(ctx, dst); 2291 } 2292 } 2293 2294 static void gen_extswsli0(DisasContext *ctx) 2295 { 2296 gen_extswsli(ctx, 0); 2297 } 2298 2299 static void gen_extswsli1(DisasContext *ctx) 2300 { 2301 gen_extswsli(ctx, 1); 2302 } 2303 2304 /* srd & srd. */ 2305 static void gen_srd(DisasContext *ctx) 2306 { 2307 TCGv t0, t1; 2308 2309 t0 = tcg_temp_new(); 2310 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2311 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2312 tcg_gen_sari_tl(t0, t0, 0x3f); 2313 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2314 t1 = tcg_temp_new(); 2315 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2316 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2317 tcg_temp_free(t1); 2318 tcg_temp_free(t0); 2319 if (unlikely(Rc(ctx->opcode) != 0)) 2320 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2321 } 2322 #endif 2323 2324 /*** Addressing modes ***/ 2325 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2326 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2327 target_long maskl) 2328 { 2329 target_long simm = SIMM(ctx->opcode); 2330 2331 simm &= ~maskl; 2332 if (rA(ctx->opcode) == 0) { 2333 if (NARROW_MODE(ctx)) { 2334 simm = (uint32_t)simm; 2335 } 2336 tcg_gen_movi_tl(EA, simm); 2337 } else if (likely(simm != 0)) { 2338 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2339 if (NARROW_MODE(ctx)) { 2340 tcg_gen_ext32u_tl(EA, EA); 2341 } 2342 } else { 2343 if (NARROW_MODE(ctx)) { 2344 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2345 } else { 2346 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2347 } 2348 } 2349 } 2350 2351 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2352 { 2353 if (rA(ctx->opcode) == 0) { 2354 if (NARROW_MODE(ctx)) { 2355 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2356 } else { 2357 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2358 } 2359 } else { 2360 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2361 if (NARROW_MODE(ctx)) { 2362 tcg_gen_ext32u_tl(EA, EA); 2363 } 2364 } 2365 } 2366 2367 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2368 { 2369 if (rA(ctx->opcode) == 0) { 2370 tcg_gen_movi_tl(EA, 0); 2371 } else if (NARROW_MODE(ctx)) { 2372 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2373 } else { 2374 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2375 } 2376 } 2377 2378 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2379 target_long val) 2380 { 2381 tcg_gen_addi_tl(ret, arg1, val); 2382 if (NARROW_MODE(ctx)) { 2383 tcg_gen_ext32u_tl(ret, ret); 2384 } 2385 } 2386 2387 static inline void gen_align_no_le(DisasContext *ctx) 2388 { 2389 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2390 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2391 } 2392 2393 /*** Integer load ***/ 2394 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2395 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2396 2397 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2398 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2399 TCGv val, \ 2400 TCGv addr) \ 2401 { \ 2402 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2403 } 2404 2405 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2406 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2407 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2408 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2409 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2410 2411 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2412 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2413 2414 #define GEN_QEMU_LOAD_64(ldop, op) \ 2415 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2416 TCGv_i64 val, \ 2417 TCGv addr) \ 2418 { \ 2419 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2420 } 2421 2422 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2423 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2424 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2425 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2426 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2427 2428 #if defined(TARGET_PPC64) 2429 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2430 #endif 2431 2432 #define GEN_QEMU_STORE_TL(stop, op) \ 2433 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2434 TCGv val, \ 2435 TCGv addr) \ 2436 { \ 2437 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2438 } 2439 2440 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2441 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2442 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2443 2444 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2445 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2446 2447 #define GEN_QEMU_STORE_64(stop, op) \ 2448 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2449 TCGv_i64 val, \ 2450 TCGv addr) \ 2451 { \ 2452 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2453 } 2454 2455 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2456 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2457 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2458 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2459 2460 #if defined(TARGET_PPC64) 2461 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2462 #endif 2463 2464 #define GEN_LD(name, ldop, opc, type) \ 2465 static void glue(gen_, name)(DisasContext *ctx) \ 2466 { \ 2467 TCGv EA; \ 2468 gen_set_access_type(ctx, ACCESS_INT); \ 2469 EA = tcg_temp_new(); \ 2470 gen_addr_imm_index(ctx, EA, 0); \ 2471 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2472 tcg_temp_free(EA); \ 2473 } 2474 2475 #define GEN_LDU(name, ldop, opc, type) \ 2476 static void glue(gen_, name##u)(DisasContext *ctx) \ 2477 { \ 2478 TCGv EA; \ 2479 if (unlikely(rA(ctx->opcode) == 0 || \ 2480 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2481 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2482 return; \ 2483 } \ 2484 gen_set_access_type(ctx, ACCESS_INT); \ 2485 EA = tcg_temp_new(); \ 2486 if (type == PPC_64B) \ 2487 gen_addr_imm_index(ctx, EA, 0x03); \ 2488 else \ 2489 gen_addr_imm_index(ctx, EA, 0); \ 2490 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2491 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2492 tcg_temp_free(EA); \ 2493 } 2494 2495 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2496 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2497 { \ 2498 TCGv EA; \ 2499 if (unlikely(rA(ctx->opcode) == 0 || \ 2500 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2501 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2502 return; \ 2503 } \ 2504 gen_set_access_type(ctx, ACCESS_INT); \ 2505 EA = tcg_temp_new(); \ 2506 gen_addr_reg_index(ctx, EA); \ 2507 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2508 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2509 tcg_temp_free(EA); \ 2510 } 2511 2512 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2513 static void glue(gen_, name##x)(DisasContext *ctx) \ 2514 { \ 2515 TCGv EA; \ 2516 chk; \ 2517 gen_set_access_type(ctx, ACCESS_INT); \ 2518 EA = tcg_temp_new(); \ 2519 gen_addr_reg_index(ctx, EA); \ 2520 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2521 tcg_temp_free(EA); \ 2522 } 2523 2524 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2525 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2526 2527 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2528 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2529 2530 #define GEN_LDS(name, ldop, op, type) \ 2531 GEN_LD(name, ldop, op | 0x20, type); \ 2532 GEN_LDU(name, ldop, op | 0x21, type); \ 2533 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2534 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2535 2536 /* lbz lbzu lbzux lbzx */ 2537 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2538 /* lha lhau lhaux lhax */ 2539 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2540 /* lhz lhzu lhzux lhzx */ 2541 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2542 /* lwz lwzu lwzux lwzx */ 2543 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2544 2545 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 2546 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2547 { \ 2548 TCGv EA; \ 2549 CHK_SV; \ 2550 gen_set_access_type(ctx, ACCESS_INT); \ 2551 EA = tcg_temp_new(); \ 2552 gen_addr_reg_index(ctx, EA); \ 2553 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 2554 tcg_temp_free(EA); \ 2555 } 2556 2557 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 2558 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 2559 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 2560 #if defined(TARGET_PPC64) 2561 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 2562 #endif 2563 2564 #if defined(TARGET_PPC64) 2565 /* lwaux */ 2566 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2567 /* lwax */ 2568 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2569 /* ldux */ 2570 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2571 /* ldx */ 2572 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2573 2574 /* CI load/store variants */ 2575 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2576 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2577 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2578 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2579 2580 static void gen_ld(DisasContext *ctx) 2581 { 2582 TCGv EA; 2583 if (Rc(ctx->opcode)) { 2584 if (unlikely(rA(ctx->opcode) == 0 || 2585 rA(ctx->opcode) == rD(ctx->opcode))) { 2586 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2587 return; 2588 } 2589 } 2590 gen_set_access_type(ctx, ACCESS_INT); 2591 EA = tcg_temp_new(); 2592 gen_addr_imm_index(ctx, EA, 0x03); 2593 if (ctx->opcode & 0x02) { 2594 /* lwa (lwau is undefined) */ 2595 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2596 } else { 2597 /* ld - ldu */ 2598 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2599 } 2600 if (Rc(ctx->opcode)) 2601 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2602 tcg_temp_free(EA); 2603 } 2604 2605 /* lq */ 2606 static void gen_lq(DisasContext *ctx) 2607 { 2608 int ra, rd; 2609 TCGv EA, hi, lo; 2610 2611 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2612 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2613 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2614 2615 if (!legal_in_user_mode && ctx->pr) { 2616 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2617 return; 2618 } 2619 2620 if (!le_is_supported && ctx->le_mode) { 2621 gen_align_no_le(ctx); 2622 return; 2623 } 2624 ra = rA(ctx->opcode); 2625 rd = rD(ctx->opcode); 2626 if (unlikely((rd & 1) || rd == ra)) { 2627 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2628 return; 2629 } 2630 2631 gen_set_access_type(ctx, ACCESS_INT); 2632 EA = tcg_temp_new(); 2633 gen_addr_imm_index(ctx, EA, 0x0F); 2634 2635 /* Note that the low part is always in RD+1, even in LE mode. */ 2636 lo = cpu_gpr[rd + 1]; 2637 hi = cpu_gpr[rd]; 2638 2639 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2640 if (HAVE_ATOMIC128) { 2641 TCGv_i32 oi = tcg_temp_new_i32(); 2642 if (ctx->le_mode) { 2643 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2644 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 2645 } else { 2646 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2647 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 2648 } 2649 tcg_temp_free_i32(oi); 2650 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 2651 } else { 2652 /* Restart with exclusive lock. */ 2653 gen_helper_exit_atomic(cpu_env); 2654 ctx->base.is_jmp = DISAS_NORETURN; 2655 } 2656 } else if (ctx->le_mode) { 2657 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2658 gen_addr_add(ctx, EA, EA, 8); 2659 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2660 } else { 2661 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2662 gen_addr_add(ctx, EA, EA, 8); 2663 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2664 } 2665 tcg_temp_free(EA); 2666 } 2667 #endif 2668 2669 /*** Integer store ***/ 2670 #define GEN_ST(name, stop, opc, type) \ 2671 static void glue(gen_, name)(DisasContext *ctx) \ 2672 { \ 2673 TCGv EA; \ 2674 gen_set_access_type(ctx, ACCESS_INT); \ 2675 EA = tcg_temp_new(); \ 2676 gen_addr_imm_index(ctx, EA, 0); \ 2677 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2678 tcg_temp_free(EA); \ 2679 } 2680 2681 #define GEN_STU(name, stop, opc, type) \ 2682 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2683 { \ 2684 TCGv EA; \ 2685 if (unlikely(rA(ctx->opcode) == 0)) { \ 2686 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2687 return; \ 2688 } \ 2689 gen_set_access_type(ctx, ACCESS_INT); \ 2690 EA = tcg_temp_new(); \ 2691 if (type == PPC_64B) \ 2692 gen_addr_imm_index(ctx, EA, 0x03); \ 2693 else \ 2694 gen_addr_imm_index(ctx, EA, 0); \ 2695 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2696 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2697 tcg_temp_free(EA); \ 2698 } 2699 2700 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2701 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2702 { \ 2703 TCGv EA; \ 2704 if (unlikely(rA(ctx->opcode) == 0)) { \ 2705 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2706 return; \ 2707 } \ 2708 gen_set_access_type(ctx, ACCESS_INT); \ 2709 EA = tcg_temp_new(); \ 2710 gen_addr_reg_index(ctx, EA); \ 2711 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2712 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2713 tcg_temp_free(EA); \ 2714 } 2715 2716 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2717 static void glue(gen_, name##x)(DisasContext *ctx) \ 2718 { \ 2719 TCGv EA; \ 2720 chk; \ 2721 gen_set_access_type(ctx, ACCESS_INT); \ 2722 EA = tcg_temp_new(); \ 2723 gen_addr_reg_index(ctx, EA); \ 2724 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2725 tcg_temp_free(EA); \ 2726 } 2727 #define GEN_STX(name, stop, opc2, opc3, type) \ 2728 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2729 2730 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2731 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2732 2733 #define GEN_STS(name, stop, op, type) \ 2734 GEN_ST(name, stop, op | 0x20, type); \ 2735 GEN_STU(name, stop, op | 0x21, type); \ 2736 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2737 GEN_STX(name, stop, 0x17, op | 0x00, type) 2738 2739 /* stb stbu stbux stbx */ 2740 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2741 /* sth sthu sthux sthx */ 2742 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2743 /* stw stwu stwux stwx */ 2744 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2745 2746 #define GEN_STEPX(name, stop, opc2, opc3) \ 2747 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2748 { \ 2749 TCGv EA; \ 2750 CHK_SV; \ 2751 gen_set_access_type(ctx, ACCESS_INT); \ 2752 EA = tcg_temp_new(); \ 2753 gen_addr_reg_index(ctx, EA); \ 2754 tcg_gen_qemu_st_tl( \ 2755 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 2756 tcg_temp_free(EA); \ 2757 } 2758 2759 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 2760 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 2761 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 2762 #if defined(TARGET_PPC64) 2763 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04) 2764 #endif 2765 2766 #if defined(TARGET_PPC64) 2767 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2768 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2769 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2770 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2771 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2772 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2773 2774 static void gen_std(DisasContext *ctx) 2775 { 2776 int rs; 2777 TCGv EA; 2778 2779 rs = rS(ctx->opcode); 2780 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2781 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2782 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2783 TCGv hi, lo; 2784 2785 if (!(ctx->insns_flags & PPC_64BX)) { 2786 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2787 } 2788 2789 if (!legal_in_user_mode && ctx->pr) { 2790 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2791 return; 2792 } 2793 2794 if (!le_is_supported && ctx->le_mode) { 2795 gen_align_no_le(ctx); 2796 return; 2797 } 2798 2799 if (unlikely(rs & 1)) { 2800 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2801 return; 2802 } 2803 gen_set_access_type(ctx, ACCESS_INT); 2804 EA = tcg_temp_new(); 2805 gen_addr_imm_index(ctx, EA, 0x03); 2806 2807 /* Note that the low part is always in RS+1, even in LE mode. */ 2808 lo = cpu_gpr[rs + 1]; 2809 hi = cpu_gpr[rs]; 2810 2811 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2812 if (HAVE_ATOMIC128) { 2813 TCGv_i32 oi = tcg_temp_new_i32(); 2814 if (ctx->le_mode) { 2815 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2816 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 2817 } else { 2818 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2819 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 2820 } 2821 tcg_temp_free_i32(oi); 2822 } else { 2823 /* Restart with exclusive lock. */ 2824 gen_helper_exit_atomic(cpu_env); 2825 ctx->base.is_jmp = DISAS_NORETURN; 2826 } 2827 } else if (ctx->le_mode) { 2828 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2829 gen_addr_add(ctx, EA, EA, 8); 2830 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2831 } else { 2832 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2833 gen_addr_add(ctx, EA, EA, 8); 2834 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2835 } 2836 tcg_temp_free(EA); 2837 } else { 2838 /* std / stdu */ 2839 if (Rc(ctx->opcode)) { 2840 if (unlikely(rA(ctx->opcode) == 0)) { 2841 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2842 return; 2843 } 2844 } 2845 gen_set_access_type(ctx, ACCESS_INT); 2846 EA = tcg_temp_new(); 2847 gen_addr_imm_index(ctx, EA, 0x03); 2848 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2849 if (Rc(ctx->opcode)) 2850 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2851 tcg_temp_free(EA); 2852 } 2853 } 2854 #endif 2855 /*** Integer load and store with byte reverse ***/ 2856 2857 /* lhbrx */ 2858 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2859 2860 /* lwbrx */ 2861 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2862 2863 #if defined(TARGET_PPC64) 2864 /* ldbrx */ 2865 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2866 /* stdbrx */ 2867 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2868 #endif /* TARGET_PPC64 */ 2869 2870 /* sthbrx */ 2871 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2872 /* stwbrx */ 2873 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2874 2875 /*** Integer load and store multiple ***/ 2876 2877 /* lmw */ 2878 static void gen_lmw(DisasContext *ctx) 2879 { 2880 TCGv t0; 2881 TCGv_i32 t1; 2882 2883 if (ctx->le_mode) { 2884 gen_align_no_le(ctx); 2885 return; 2886 } 2887 gen_set_access_type(ctx, ACCESS_INT); 2888 t0 = tcg_temp_new(); 2889 t1 = tcg_const_i32(rD(ctx->opcode)); 2890 gen_addr_imm_index(ctx, t0, 0); 2891 gen_helper_lmw(cpu_env, t0, t1); 2892 tcg_temp_free(t0); 2893 tcg_temp_free_i32(t1); 2894 } 2895 2896 /* stmw */ 2897 static void gen_stmw(DisasContext *ctx) 2898 { 2899 TCGv t0; 2900 TCGv_i32 t1; 2901 2902 if (ctx->le_mode) { 2903 gen_align_no_le(ctx); 2904 return; 2905 } 2906 gen_set_access_type(ctx, ACCESS_INT); 2907 t0 = tcg_temp_new(); 2908 t1 = tcg_const_i32(rS(ctx->opcode)); 2909 gen_addr_imm_index(ctx, t0, 0); 2910 gen_helper_stmw(cpu_env, t0, t1); 2911 tcg_temp_free(t0); 2912 tcg_temp_free_i32(t1); 2913 } 2914 2915 /*** Integer load and store strings ***/ 2916 2917 /* lswi */ 2918 /* PowerPC32 specification says we must generate an exception if 2919 * rA is in the range of registers to be loaded. 2920 * In an other hand, IBM says this is valid, but rA won't be loaded. 2921 * For now, I'll follow the spec... 2922 */ 2923 static void gen_lswi(DisasContext *ctx) 2924 { 2925 TCGv t0; 2926 TCGv_i32 t1, t2; 2927 int nb = NB(ctx->opcode); 2928 int start = rD(ctx->opcode); 2929 int ra = rA(ctx->opcode); 2930 int nr; 2931 2932 if (ctx->le_mode) { 2933 gen_align_no_le(ctx); 2934 return; 2935 } 2936 if (nb == 0) 2937 nb = 32; 2938 nr = DIV_ROUND_UP(nb, 4); 2939 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2940 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2941 return; 2942 } 2943 gen_set_access_type(ctx, ACCESS_INT); 2944 t0 = tcg_temp_new(); 2945 gen_addr_register(ctx, t0); 2946 t1 = tcg_const_i32(nb); 2947 t2 = tcg_const_i32(start); 2948 gen_helper_lsw(cpu_env, t0, t1, t2); 2949 tcg_temp_free(t0); 2950 tcg_temp_free_i32(t1); 2951 tcg_temp_free_i32(t2); 2952 } 2953 2954 /* lswx */ 2955 static void gen_lswx(DisasContext *ctx) 2956 { 2957 TCGv t0; 2958 TCGv_i32 t1, t2, t3; 2959 2960 if (ctx->le_mode) { 2961 gen_align_no_le(ctx); 2962 return; 2963 } 2964 gen_set_access_type(ctx, ACCESS_INT); 2965 t0 = tcg_temp_new(); 2966 gen_addr_reg_index(ctx, t0); 2967 t1 = tcg_const_i32(rD(ctx->opcode)); 2968 t2 = tcg_const_i32(rA(ctx->opcode)); 2969 t3 = tcg_const_i32(rB(ctx->opcode)); 2970 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 2971 tcg_temp_free(t0); 2972 tcg_temp_free_i32(t1); 2973 tcg_temp_free_i32(t2); 2974 tcg_temp_free_i32(t3); 2975 } 2976 2977 /* stswi */ 2978 static void gen_stswi(DisasContext *ctx) 2979 { 2980 TCGv t0; 2981 TCGv_i32 t1, t2; 2982 int nb = NB(ctx->opcode); 2983 2984 if (ctx->le_mode) { 2985 gen_align_no_le(ctx); 2986 return; 2987 } 2988 gen_set_access_type(ctx, ACCESS_INT); 2989 t0 = tcg_temp_new(); 2990 gen_addr_register(ctx, t0); 2991 if (nb == 0) 2992 nb = 32; 2993 t1 = tcg_const_i32(nb); 2994 t2 = tcg_const_i32(rS(ctx->opcode)); 2995 gen_helper_stsw(cpu_env, t0, t1, t2); 2996 tcg_temp_free(t0); 2997 tcg_temp_free_i32(t1); 2998 tcg_temp_free_i32(t2); 2999 } 3000 3001 /* stswx */ 3002 static void gen_stswx(DisasContext *ctx) 3003 { 3004 TCGv t0; 3005 TCGv_i32 t1, t2; 3006 3007 if (ctx->le_mode) { 3008 gen_align_no_le(ctx); 3009 return; 3010 } 3011 gen_set_access_type(ctx, ACCESS_INT); 3012 t0 = tcg_temp_new(); 3013 gen_addr_reg_index(ctx, t0); 3014 t1 = tcg_temp_new_i32(); 3015 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3016 tcg_gen_andi_i32(t1, t1, 0x7F); 3017 t2 = tcg_const_i32(rS(ctx->opcode)); 3018 gen_helper_stsw(cpu_env, t0, t1, t2); 3019 tcg_temp_free(t0); 3020 tcg_temp_free_i32(t1); 3021 tcg_temp_free_i32(t2); 3022 } 3023 3024 /*** Memory synchronisation ***/ 3025 /* eieio */ 3026 static void gen_eieio(DisasContext *ctx) 3027 { 3028 TCGBar bar = TCG_MO_LD_ST; 3029 3030 /* 3031 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3032 * tell the CPU it is a store-forwarding barrier. 3033 */ 3034 if (ctx->opcode & 0x2000000) { 3035 /* 3036 * ISA says that "Reserved fields in instructions are ignored 3037 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3038 * as this is not an instruction software should be using, 3039 * complain to the user. 3040 */ 3041 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3042 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3043 TARGET_FMT_lx "\n", ctx->base.pc_next - 4); 3044 } else { 3045 bar = TCG_MO_ST_LD; 3046 } 3047 } 3048 3049 tcg_gen_mb(bar | TCG_BAR_SC); 3050 } 3051 3052 #if !defined(CONFIG_USER_ONLY) 3053 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3054 { 3055 TCGv_i32 t; 3056 TCGLabel *l; 3057 3058 if (!ctx->lazy_tlb_flush) { 3059 return; 3060 } 3061 l = gen_new_label(); 3062 t = tcg_temp_new_i32(); 3063 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3064 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3065 if (global) { 3066 gen_helper_check_tlb_flush_global(cpu_env); 3067 } else { 3068 gen_helper_check_tlb_flush_local(cpu_env); 3069 } 3070 gen_set_label(l); 3071 tcg_temp_free_i32(t); 3072 } 3073 #else 3074 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3075 #endif 3076 3077 /* isync */ 3078 static void gen_isync(DisasContext *ctx) 3079 { 3080 /* 3081 * We need to check for a pending TLB flush. This can only happen in 3082 * kernel mode however so check MSR_PR 3083 */ 3084 if (!ctx->pr) { 3085 gen_check_tlb_flush(ctx, false); 3086 } 3087 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3088 gen_stop_exception(ctx); 3089 } 3090 3091 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3092 3093 static void gen_load_locked(DisasContext *ctx, TCGMemOp memop) 3094 { 3095 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3096 TCGv t0 = tcg_temp_new(); 3097 3098 gen_set_access_type(ctx, ACCESS_RES); 3099 gen_addr_reg_index(ctx, t0); 3100 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3101 tcg_gen_mov_tl(cpu_reserve, t0); 3102 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3103 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3104 tcg_temp_free(t0); 3105 } 3106 3107 #define LARX(name, memop) \ 3108 static void gen_##name(DisasContext *ctx) \ 3109 { \ 3110 gen_load_locked(ctx, memop); \ 3111 } 3112 3113 /* lwarx */ 3114 LARX(lbarx, DEF_MEMOP(MO_UB)) 3115 LARX(lharx, DEF_MEMOP(MO_UW)) 3116 LARX(lwarx, DEF_MEMOP(MO_UL)) 3117 3118 static void gen_fetch_inc_conditional(DisasContext *ctx, TCGMemOp memop, 3119 TCGv EA, TCGCond cond, int addend) 3120 { 3121 TCGv t = tcg_temp_new(); 3122 TCGv t2 = tcg_temp_new(); 3123 TCGv u = tcg_temp_new(); 3124 3125 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3126 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3127 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3128 tcg_gen_addi_tl(u, t, addend); 3129 3130 /* E.g. for fetch and increment bounded... */ 3131 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3132 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3133 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3134 3135 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3136 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3137 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3138 3139 tcg_temp_free(t); 3140 tcg_temp_free(t2); 3141 tcg_temp_free(u); 3142 } 3143 3144 static void gen_ld_atomic(DisasContext *ctx, TCGMemOp memop) 3145 { 3146 uint32_t gpr_FC = FC(ctx->opcode); 3147 TCGv EA = tcg_temp_new(); 3148 int rt = rD(ctx->opcode); 3149 bool need_serial; 3150 TCGv src, dst; 3151 3152 gen_addr_register(ctx, EA); 3153 dst = cpu_gpr[rt]; 3154 src = cpu_gpr[(rt + 1) & 31]; 3155 3156 need_serial = false; 3157 memop |= MO_ALIGN; 3158 switch (gpr_FC) { 3159 case 0: /* Fetch and add */ 3160 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3161 break; 3162 case 1: /* Fetch and xor */ 3163 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3164 break; 3165 case 2: /* Fetch and or */ 3166 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3167 break; 3168 case 3: /* Fetch and 'and' */ 3169 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3170 break; 3171 case 4: /* Fetch and max unsigned */ 3172 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3173 break; 3174 case 5: /* Fetch and max signed */ 3175 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3176 break; 3177 case 6: /* Fetch and min unsigned */ 3178 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3179 break; 3180 case 7: /* Fetch and min signed */ 3181 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3182 break; 3183 case 8: /* Swap */ 3184 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3185 break; 3186 3187 case 16: /* Compare and swap not equal */ 3188 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3189 need_serial = true; 3190 } else { 3191 TCGv t0 = tcg_temp_new(); 3192 TCGv t1 = tcg_temp_new(); 3193 3194 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3195 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3196 tcg_gen_mov_tl(t1, src); 3197 } else { 3198 tcg_gen_ext32u_tl(t1, src); 3199 } 3200 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3201 cpu_gpr[(rt + 2) & 31], t0); 3202 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3203 tcg_gen_mov_tl(dst, t0); 3204 3205 tcg_temp_free(t0); 3206 tcg_temp_free(t1); 3207 } 3208 break; 3209 3210 case 24: /* Fetch and increment bounded */ 3211 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3212 need_serial = true; 3213 } else { 3214 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3215 } 3216 break; 3217 case 25: /* Fetch and increment equal */ 3218 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3219 need_serial = true; 3220 } else { 3221 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3222 } 3223 break; 3224 case 28: /* Fetch and decrement bounded */ 3225 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3226 need_serial = true; 3227 } else { 3228 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3229 } 3230 break; 3231 3232 default: 3233 /* invoke data storage error handler */ 3234 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3235 } 3236 tcg_temp_free(EA); 3237 3238 if (need_serial) { 3239 /* Restart with exclusive lock. */ 3240 gen_helper_exit_atomic(cpu_env); 3241 ctx->base.is_jmp = DISAS_NORETURN; 3242 } 3243 } 3244 3245 static void gen_lwat(DisasContext *ctx) 3246 { 3247 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3248 } 3249 3250 #ifdef TARGET_PPC64 3251 static void gen_ldat(DisasContext *ctx) 3252 { 3253 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3254 } 3255 #endif 3256 3257 static void gen_st_atomic(DisasContext *ctx, TCGMemOp memop) 3258 { 3259 uint32_t gpr_FC = FC(ctx->opcode); 3260 TCGv EA = tcg_temp_new(); 3261 TCGv src, discard; 3262 3263 gen_addr_register(ctx, EA); 3264 src = cpu_gpr[rD(ctx->opcode)]; 3265 discard = tcg_temp_new(); 3266 3267 memop |= MO_ALIGN; 3268 switch (gpr_FC) { 3269 case 0: /* add and Store */ 3270 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3271 break; 3272 case 1: /* xor and Store */ 3273 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3274 break; 3275 case 2: /* Or and Store */ 3276 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3277 break; 3278 case 3: /* 'and' and Store */ 3279 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3280 break; 3281 case 4: /* Store max unsigned */ 3282 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3283 break; 3284 case 5: /* Store max signed */ 3285 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3286 break; 3287 case 6: /* Store min unsigned */ 3288 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3289 break; 3290 case 7: /* Store min signed */ 3291 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3292 break; 3293 case 24: /* Store twin */ 3294 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3295 /* Restart with exclusive lock. */ 3296 gen_helper_exit_atomic(cpu_env); 3297 ctx->base.is_jmp = DISAS_NORETURN; 3298 } else { 3299 TCGv t = tcg_temp_new(); 3300 TCGv t2 = tcg_temp_new(); 3301 TCGv s = tcg_temp_new(); 3302 TCGv s2 = tcg_temp_new(); 3303 TCGv ea_plus_s = tcg_temp_new(); 3304 3305 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3306 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3307 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3308 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3309 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3310 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3311 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3312 3313 tcg_temp_free(ea_plus_s); 3314 tcg_temp_free(s2); 3315 tcg_temp_free(s); 3316 tcg_temp_free(t2); 3317 tcg_temp_free(t); 3318 } 3319 break; 3320 default: 3321 /* invoke data storage error handler */ 3322 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3323 } 3324 tcg_temp_free(discard); 3325 tcg_temp_free(EA); 3326 } 3327 3328 static void gen_stwat(DisasContext *ctx) 3329 { 3330 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3331 } 3332 3333 #ifdef TARGET_PPC64 3334 static void gen_stdat(DisasContext *ctx) 3335 { 3336 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3337 } 3338 #endif 3339 3340 static void gen_conditional_store(DisasContext *ctx, TCGMemOp memop) 3341 { 3342 TCGLabel *l1 = gen_new_label(); 3343 TCGLabel *l2 = gen_new_label(); 3344 TCGv t0 = tcg_temp_new(); 3345 int reg = rS(ctx->opcode); 3346 3347 gen_set_access_type(ctx, ACCESS_RES); 3348 gen_addr_reg_index(ctx, t0); 3349 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 3350 tcg_temp_free(t0); 3351 3352 t0 = tcg_temp_new(); 3353 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3354 cpu_gpr[reg], ctx->mem_idx, 3355 DEF_MEMOP(memop) | MO_ALIGN); 3356 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3357 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3358 tcg_gen_or_tl(t0, t0, cpu_so); 3359 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3360 tcg_temp_free(t0); 3361 tcg_gen_br(l2); 3362 3363 gen_set_label(l1); 3364 3365 /* Address mismatch implies failure. But we still need to provide the 3366 memory barrier semantics of the instruction. */ 3367 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3368 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3369 3370 gen_set_label(l2); 3371 tcg_gen_movi_tl(cpu_reserve, -1); 3372 } 3373 3374 #define STCX(name, memop) \ 3375 static void gen_##name(DisasContext *ctx) \ 3376 { \ 3377 gen_conditional_store(ctx, memop); \ 3378 } 3379 3380 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3381 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3382 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3383 3384 #if defined(TARGET_PPC64) 3385 /* ldarx */ 3386 LARX(ldarx, DEF_MEMOP(MO_Q)) 3387 /* stdcx. */ 3388 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3389 3390 /* lqarx */ 3391 static void gen_lqarx(DisasContext *ctx) 3392 { 3393 int rd = rD(ctx->opcode); 3394 TCGv EA, hi, lo; 3395 3396 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3397 (rd == rB(ctx->opcode)))) { 3398 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3399 return; 3400 } 3401 3402 gen_set_access_type(ctx, ACCESS_RES); 3403 EA = tcg_temp_new(); 3404 gen_addr_reg_index(ctx, EA); 3405 3406 /* Note that the low part is always in RD+1, even in LE mode. */ 3407 lo = cpu_gpr[rd + 1]; 3408 hi = cpu_gpr[rd]; 3409 3410 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3411 if (HAVE_ATOMIC128) { 3412 TCGv_i32 oi = tcg_temp_new_i32(); 3413 if (ctx->le_mode) { 3414 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16, 3415 ctx->mem_idx)); 3416 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3417 } else { 3418 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16, 3419 ctx->mem_idx)); 3420 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3421 } 3422 tcg_temp_free_i32(oi); 3423 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3424 } else { 3425 /* Restart with exclusive lock. */ 3426 gen_helper_exit_atomic(cpu_env); 3427 ctx->base.is_jmp = DISAS_NORETURN; 3428 tcg_temp_free(EA); 3429 return; 3430 } 3431 } else if (ctx->le_mode) { 3432 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 3433 tcg_gen_mov_tl(cpu_reserve, EA); 3434 gen_addr_add(ctx, EA, EA, 8); 3435 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3436 } else { 3437 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 3438 tcg_gen_mov_tl(cpu_reserve, EA); 3439 gen_addr_add(ctx, EA, EA, 8); 3440 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3441 } 3442 tcg_temp_free(EA); 3443 3444 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 3445 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3446 } 3447 3448 /* stqcx. */ 3449 static void gen_stqcx_(DisasContext *ctx) 3450 { 3451 int rs = rS(ctx->opcode); 3452 TCGv EA, hi, lo; 3453 3454 if (unlikely(rs & 1)) { 3455 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3456 return; 3457 } 3458 3459 gen_set_access_type(ctx, ACCESS_RES); 3460 EA = tcg_temp_new(); 3461 gen_addr_reg_index(ctx, EA); 3462 3463 /* Note that the low part is always in RS+1, even in LE mode. */ 3464 lo = cpu_gpr[rs + 1]; 3465 hi = cpu_gpr[rs]; 3466 3467 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3468 if (HAVE_CMPXCHG128) { 3469 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16); 3470 if (ctx->le_mode) { 3471 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 3472 EA, lo, hi, oi); 3473 } else { 3474 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 3475 EA, lo, hi, oi); 3476 } 3477 tcg_temp_free_i32(oi); 3478 } else { 3479 /* Restart with exclusive lock. */ 3480 gen_helper_exit_atomic(cpu_env); 3481 ctx->base.is_jmp = DISAS_NORETURN; 3482 } 3483 tcg_temp_free(EA); 3484 } else { 3485 TCGLabel *lab_fail = gen_new_label(); 3486 TCGLabel *lab_over = gen_new_label(); 3487 TCGv_i64 t0 = tcg_temp_new_i64(); 3488 TCGv_i64 t1 = tcg_temp_new_i64(); 3489 3490 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 3491 tcg_temp_free(EA); 3492 3493 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 3494 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3495 ? offsetof(CPUPPCState, reserve_val2) 3496 : offsetof(CPUPPCState, reserve_val))); 3497 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3498 3499 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3500 gen_qemu_ld64_i64(ctx, t0, t0); 3501 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3502 ? offsetof(CPUPPCState, reserve_val) 3503 : offsetof(CPUPPCState, reserve_val2))); 3504 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3505 3506 /* Success */ 3507 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 3508 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3509 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 3510 3511 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3512 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 3513 tcg_gen_br(lab_over); 3514 3515 gen_set_label(lab_fail); 3516 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3517 3518 gen_set_label(lab_over); 3519 tcg_gen_movi_tl(cpu_reserve, -1); 3520 tcg_temp_free_i64(t0); 3521 tcg_temp_free_i64(t1); 3522 } 3523 } 3524 #endif /* defined(TARGET_PPC64) */ 3525 3526 /* sync */ 3527 static void gen_sync(DisasContext *ctx) 3528 { 3529 uint32_t l = (ctx->opcode >> 21) & 3; 3530 3531 /* 3532 * We may need to check for a pending TLB flush. 3533 * 3534 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3535 * 3536 * Additionally, this can only happen in kernel mode however so 3537 * check MSR_PR as well. 3538 */ 3539 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3540 gen_check_tlb_flush(ctx, true); 3541 } 3542 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3543 } 3544 3545 /* wait */ 3546 static void gen_wait(DisasContext *ctx) 3547 { 3548 TCGv_i32 t0 = tcg_const_i32(1); 3549 tcg_gen_st_i32(t0, cpu_env, 3550 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3551 tcg_temp_free_i32(t0); 3552 /* Stop translation, as the CPU is supposed to sleep from now */ 3553 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3554 } 3555 3556 #if defined(TARGET_PPC64) 3557 static void gen_doze(DisasContext *ctx) 3558 { 3559 #if defined(CONFIG_USER_ONLY) 3560 GEN_PRIV; 3561 #else 3562 TCGv_i32 t; 3563 3564 CHK_HV; 3565 t = tcg_const_i32(PPC_PM_DOZE); 3566 gen_helper_pminsn(cpu_env, t); 3567 tcg_temp_free_i32(t); 3568 gen_stop_exception(ctx); 3569 #endif /* defined(CONFIG_USER_ONLY) */ 3570 } 3571 3572 static void gen_nap(DisasContext *ctx) 3573 { 3574 #if defined(CONFIG_USER_ONLY) 3575 GEN_PRIV; 3576 #else 3577 TCGv_i32 t; 3578 3579 CHK_HV; 3580 t = tcg_const_i32(PPC_PM_NAP); 3581 gen_helper_pminsn(cpu_env, t); 3582 tcg_temp_free_i32(t); 3583 gen_stop_exception(ctx); 3584 #endif /* defined(CONFIG_USER_ONLY) */ 3585 } 3586 3587 static void gen_stop(DisasContext *ctx) 3588 { 3589 gen_nap(ctx); 3590 } 3591 3592 static void gen_sleep(DisasContext *ctx) 3593 { 3594 #if defined(CONFIG_USER_ONLY) 3595 GEN_PRIV; 3596 #else 3597 TCGv_i32 t; 3598 3599 CHK_HV; 3600 t = tcg_const_i32(PPC_PM_SLEEP); 3601 gen_helper_pminsn(cpu_env, t); 3602 tcg_temp_free_i32(t); 3603 gen_stop_exception(ctx); 3604 #endif /* defined(CONFIG_USER_ONLY) */ 3605 } 3606 3607 static void gen_rvwinkle(DisasContext *ctx) 3608 { 3609 #if defined(CONFIG_USER_ONLY) 3610 GEN_PRIV; 3611 #else 3612 TCGv_i32 t; 3613 3614 CHK_HV; 3615 t = tcg_const_i32(PPC_PM_RVWINKLE); 3616 gen_helper_pminsn(cpu_env, t); 3617 tcg_temp_free_i32(t); 3618 gen_stop_exception(ctx); 3619 #endif /* defined(CONFIG_USER_ONLY) */ 3620 } 3621 #endif /* #if defined(TARGET_PPC64) */ 3622 3623 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3624 { 3625 #if defined(TARGET_PPC64) 3626 if (ctx->has_cfar) 3627 tcg_gen_movi_tl(cpu_cfar, nip); 3628 #endif 3629 } 3630 3631 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3632 { 3633 if (unlikely(ctx->singlestep_enabled)) { 3634 return false; 3635 } 3636 3637 #ifndef CONFIG_USER_ONLY 3638 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3639 #else 3640 return true; 3641 #endif 3642 } 3643 3644 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 3645 { 3646 int sse = ctx->singlestep_enabled; 3647 if (unlikely(sse)) { 3648 if (sse & GDBSTUB_SINGLE_STEP) { 3649 gen_debug_exception(ctx); 3650 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) { 3651 uint32_t excp = gen_prep_dbgex(ctx); 3652 gen_exception(ctx, excp); 3653 } 3654 tcg_gen_exit_tb(NULL, 0); 3655 } else { 3656 tcg_gen_lookup_and_goto_ptr(); 3657 } 3658 } 3659 3660 /*** Branch ***/ 3661 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3662 { 3663 if (NARROW_MODE(ctx)) { 3664 dest = (uint32_t) dest; 3665 } 3666 if (use_goto_tb(ctx, dest)) { 3667 tcg_gen_goto_tb(n); 3668 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3669 tcg_gen_exit_tb(ctx->base.tb, n); 3670 } else { 3671 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3672 gen_lookup_and_goto_ptr(ctx); 3673 } 3674 } 3675 3676 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3677 { 3678 if (NARROW_MODE(ctx)) { 3679 nip = (uint32_t)nip; 3680 } 3681 tcg_gen_movi_tl(cpu_lr, nip); 3682 } 3683 3684 /* b ba bl bla */ 3685 static void gen_b(DisasContext *ctx) 3686 { 3687 target_ulong li, target; 3688 3689 ctx->exception = POWERPC_EXCP_BRANCH; 3690 /* sign extend LI */ 3691 li = LI(ctx->opcode); 3692 li = (li ^ 0x02000000) - 0x02000000; 3693 if (likely(AA(ctx->opcode) == 0)) { 3694 target = ctx->base.pc_next + li - 4; 3695 } else { 3696 target = li; 3697 } 3698 if (LK(ctx->opcode)) { 3699 gen_setlr(ctx, ctx->base.pc_next); 3700 } 3701 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3702 gen_goto_tb(ctx, 0, target); 3703 } 3704 3705 #define BCOND_IM 0 3706 #define BCOND_LR 1 3707 #define BCOND_CTR 2 3708 #define BCOND_TAR 3 3709 3710 static void gen_bcond(DisasContext *ctx, int type) 3711 { 3712 uint32_t bo = BO(ctx->opcode); 3713 TCGLabel *l1; 3714 TCGv target; 3715 ctx->exception = POWERPC_EXCP_BRANCH; 3716 3717 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3718 target = tcg_temp_local_new(); 3719 if (type == BCOND_CTR) 3720 tcg_gen_mov_tl(target, cpu_ctr); 3721 else if (type == BCOND_TAR) 3722 gen_load_spr(target, SPR_TAR); 3723 else 3724 tcg_gen_mov_tl(target, cpu_lr); 3725 } else { 3726 target = NULL; 3727 } 3728 if (LK(ctx->opcode)) 3729 gen_setlr(ctx, ctx->base.pc_next); 3730 l1 = gen_new_label(); 3731 if ((bo & 0x4) == 0) { 3732 /* Decrement and test CTR */ 3733 TCGv temp = tcg_temp_new(); 3734 if (unlikely(type == BCOND_CTR)) { 3735 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3736 return; 3737 } 3738 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3739 if (NARROW_MODE(ctx)) { 3740 tcg_gen_ext32u_tl(temp, cpu_ctr); 3741 } else { 3742 tcg_gen_mov_tl(temp, cpu_ctr); 3743 } 3744 if (bo & 0x2) { 3745 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3746 } else { 3747 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3748 } 3749 tcg_temp_free(temp); 3750 } 3751 if ((bo & 0x10) == 0) { 3752 /* Test CR */ 3753 uint32_t bi = BI(ctx->opcode); 3754 uint32_t mask = 0x08 >> (bi & 0x03); 3755 TCGv_i32 temp = tcg_temp_new_i32(); 3756 3757 if (bo & 0x8) { 3758 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3759 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3760 } else { 3761 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3762 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3763 } 3764 tcg_temp_free_i32(temp); 3765 } 3766 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3767 if (type == BCOND_IM) { 3768 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3769 if (likely(AA(ctx->opcode) == 0)) { 3770 gen_goto_tb(ctx, 0, ctx->base.pc_next + li - 4); 3771 } else { 3772 gen_goto_tb(ctx, 0, li); 3773 } 3774 } else { 3775 if (NARROW_MODE(ctx)) { 3776 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3777 } else { 3778 tcg_gen_andi_tl(cpu_nip, target, ~3); 3779 } 3780 gen_lookup_and_goto_ptr(ctx); 3781 tcg_temp_free(target); 3782 } 3783 if ((bo & 0x14) != 0x14) { 3784 /* fallthrough case */ 3785 gen_set_label(l1); 3786 gen_goto_tb(ctx, 1, ctx->base.pc_next); 3787 } 3788 } 3789 3790 static void gen_bc(DisasContext *ctx) 3791 { 3792 gen_bcond(ctx, BCOND_IM); 3793 } 3794 3795 static void gen_bcctr(DisasContext *ctx) 3796 { 3797 gen_bcond(ctx, BCOND_CTR); 3798 } 3799 3800 static void gen_bclr(DisasContext *ctx) 3801 { 3802 gen_bcond(ctx, BCOND_LR); 3803 } 3804 3805 static void gen_bctar(DisasContext *ctx) 3806 { 3807 gen_bcond(ctx, BCOND_TAR); 3808 } 3809 3810 /*** Condition register logical ***/ 3811 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3812 static void glue(gen_, name)(DisasContext *ctx) \ 3813 { \ 3814 uint8_t bitmask; \ 3815 int sh; \ 3816 TCGv_i32 t0, t1; \ 3817 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3818 t0 = tcg_temp_new_i32(); \ 3819 if (sh > 0) \ 3820 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3821 else if (sh < 0) \ 3822 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3823 else \ 3824 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3825 t1 = tcg_temp_new_i32(); \ 3826 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3827 if (sh > 0) \ 3828 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3829 else if (sh < 0) \ 3830 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3831 else \ 3832 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3833 tcg_op(t0, t0, t1); \ 3834 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3835 tcg_gen_andi_i32(t0, t0, bitmask); \ 3836 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3837 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3838 tcg_temp_free_i32(t0); \ 3839 tcg_temp_free_i32(t1); \ 3840 } 3841 3842 /* crand */ 3843 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3844 /* crandc */ 3845 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3846 /* creqv */ 3847 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3848 /* crnand */ 3849 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3850 /* crnor */ 3851 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3852 /* cror */ 3853 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3854 /* crorc */ 3855 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3856 /* crxor */ 3857 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3858 3859 /* mcrf */ 3860 static void gen_mcrf(DisasContext *ctx) 3861 { 3862 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3863 } 3864 3865 /*** System linkage ***/ 3866 3867 /* rfi (supervisor only) */ 3868 static void gen_rfi(DisasContext *ctx) 3869 { 3870 #if defined(CONFIG_USER_ONLY) 3871 GEN_PRIV; 3872 #else 3873 /* This instruction doesn't exist anymore on 64-bit server 3874 * processors compliant with arch 2.x 3875 */ 3876 if (ctx->insns_flags & PPC_SEGMENT_64B) { 3877 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3878 return; 3879 } 3880 /* Restore CPU state */ 3881 CHK_SV; 3882 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3883 gen_io_start(); 3884 } 3885 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3886 gen_helper_rfi(cpu_env); 3887 gen_sync_exception(ctx); 3888 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3889 gen_io_end(); 3890 } 3891 #endif 3892 } 3893 3894 #if defined(TARGET_PPC64) 3895 static void gen_rfid(DisasContext *ctx) 3896 { 3897 #if defined(CONFIG_USER_ONLY) 3898 GEN_PRIV; 3899 #else 3900 /* Restore CPU state */ 3901 CHK_SV; 3902 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3903 gen_io_start(); 3904 } 3905 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3906 gen_helper_rfid(cpu_env); 3907 gen_sync_exception(ctx); 3908 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3909 gen_io_end(); 3910 } 3911 #endif 3912 } 3913 3914 static void gen_hrfid(DisasContext *ctx) 3915 { 3916 #if defined(CONFIG_USER_ONLY) 3917 GEN_PRIV; 3918 #else 3919 /* Restore CPU state */ 3920 CHK_HV; 3921 gen_helper_hrfid(cpu_env); 3922 gen_sync_exception(ctx); 3923 #endif 3924 } 3925 #endif 3926 3927 /* sc */ 3928 #if defined(CONFIG_USER_ONLY) 3929 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3930 #else 3931 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3932 #endif 3933 static void gen_sc(DisasContext *ctx) 3934 { 3935 uint32_t lev; 3936 3937 lev = (ctx->opcode >> 5) & 0x7F; 3938 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3939 } 3940 3941 /*** Trap ***/ 3942 3943 /* Check for unconditional traps (always or never) */ 3944 static bool check_unconditional_trap(DisasContext *ctx) 3945 { 3946 /* Trap never */ 3947 if (TO(ctx->opcode) == 0) { 3948 return true; 3949 } 3950 /* Trap always */ 3951 if (TO(ctx->opcode) == 31) { 3952 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 3953 return true; 3954 } 3955 return false; 3956 } 3957 3958 /* tw */ 3959 static void gen_tw(DisasContext *ctx) 3960 { 3961 TCGv_i32 t0; 3962 3963 if (check_unconditional_trap(ctx)) { 3964 return; 3965 } 3966 t0 = tcg_const_i32(TO(ctx->opcode)); 3967 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3968 t0); 3969 tcg_temp_free_i32(t0); 3970 } 3971 3972 /* twi */ 3973 static void gen_twi(DisasContext *ctx) 3974 { 3975 TCGv t0; 3976 TCGv_i32 t1; 3977 3978 if (check_unconditional_trap(ctx)) { 3979 return; 3980 } 3981 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3982 t1 = tcg_const_i32(TO(ctx->opcode)); 3983 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3984 tcg_temp_free(t0); 3985 tcg_temp_free_i32(t1); 3986 } 3987 3988 #if defined(TARGET_PPC64) 3989 /* td */ 3990 static void gen_td(DisasContext *ctx) 3991 { 3992 TCGv_i32 t0; 3993 3994 if (check_unconditional_trap(ctx)) { 3995 return; 3996 } 3997 t0 = tcg_const_i32(TO(ctx->opcode)); 3998 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3999 t0); 4000 tcg_temp_free_i32(t0); 4001 } 4002 4003 /* tdi */ 4004 static void gen_tdi(DisasContext *ctx) 4005 { 4006 TCGv t0; 4007 TCGv_i32 t1; 4008 4009 if (check_unconditional_trap(ctx)) { 4010 return; 4011 } 4012 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4013 t1 = tcg_const_i32(TO(ctx->opcode)); 4014 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4015 tcg_temp_free(t0); 4016 tcg_temp_free_i32(t1); 4017 } 4018 #endif 4019 4020 /*** Processor control ***/ 4021 4022 static void gen_read_xer(DisasContext *ctx, TCGv dst) 4023 { 4024 TCGv t0 = tcg_temp_new(); 4025 TCGv t1 = tcg_temp_new(); 4026 TCGv t2 = tcg_temp_new(); 4027 tcg_gen_mov_tl(dst, cpu_xer); 4028 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 4029 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 4030 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 4031 tcg_gen_or_tl(t0, t0, t1); 4032 tcg_gen_or_tl(dst, dst, t2); 4033 tcg_gen_or_tl(dst, dst, t0); 4034 if (is_isa300(ctx)) { 4035 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 4036 tcg_gen_or_tl(dst, dst, t0); 4037 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 4038 tcg_gen_or_tl(dst, dst, t0); 4039 } 4040 tcg_temp_free(t0); 4041 tcg_temp_free(t1); 4042 tcg_temp_free(t2); 4043 } 4044 4045 static void gen_write_xer(TCGv src) 4046 { 4047 /* Write all flags, while reading back check for isa300 */ 4048 tcg_gen_andi_tl(cpu_xer, src, 4049 ~((1u << XER_SO) | 4050 (1u << XER_OV) | (1u << XER_OV32) | 4051 (1u << XER_CA) | (1u << XER_CA32))); 4052 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 4053 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 4054 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 4055 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 4056 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 4057 } 4058 4059 /* mcrxr */ 4060 static void gen_mcrxr(DisasContext *ctx) 4061 { 4062 TCGv_i32 t0 = tcg_temp_new_i32(); 4063 TCGv_i32 t1 = tcg_temp_new_i32(); 4064 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4065 4066 tcg_gen_trunc_tl_i32(t0, cpu_so); 4067 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4068 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4069 tcg_gen_shli_i32(t0, t0, 3); 4070 tcg_gen_shli_i32(t1, t1, 2); 4071 tcg_gen_shli_i32(dst, dst, 1); 4072 tcg_gen_or_i32(dst, dst, t0); 4073 tcg_gen_or_i32(dst, dst, t1); 4074 tcg_temp_free_i32(t0); 4075 tcg_temp_free_i32(t1); 4076 4077 tcg_gen_movi_tl(cpu_so, 0); 4078 tcg_gen_movi_tl(cpu_ov, 0); 4079 tcg_gen_movi_tl(cpu_ca, 0); 4080 } 4081 4082 #ifdef TARGET_PPC64 4083 /* mcrxrx */ 4084 static void gen_mcrxrx(DisasContext *ctx) 4085 { 4086 TCGv t0 = tcg_temp_new(); 4087 TCGv t1 = tcg_temp_new(); 4088 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4089 4090 /* copy OV and OV32 */ 4091 tcg_gen_shli_tl(t0, cpu_ov, 1); 4092 tcg_gen_or_tl(t0, t0, cpu_ov32); 4093 tcg_gen_shli_tl(t0, t0, 2); 4094 /* copy CA and CA32 */ 4095 tcg_gen_shli_tl(t1, cpu_ca, 1); 4096 tcg_gen_or_tl(t1, t1, cpu_ca32); 4097 tcg_gen_or_tl(t0, t0, t1); 4098 tcg_gen_trunc_tl_i32(dst, t0); 4099 tcg_temp_free(t0); 4100 tcg_temp_free(t1); 4101 } 4102 #endif 4103 4104 /* mfcr mfocrf */ 4105 static void gen_mfcr(DisasContext *ctx) 4106 { 4107 uint32_t crm, crn; 4108 4109 if (likely(ctx->opcode & 0x00100000)) { 4110 crm = CRM(ctx->opcode); 4111 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4112 crn = ctz32 (crm); 4113 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4114 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4115 cpu_gpr[rD(ctx->opcode)], crn * 4); 4116 } 4117 } else { 4118 TCGv_i32 t0 = tcg_temp_new_i32(); 4119 tcg_gen_mov_i32(t0, cpu_crf[0]); 4120 tcg_gen_shli_i32(t0, t0, 4); 4121 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4122 tcg_gen_shli_i32(t0, t0, 4); 4123 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4124 tcg_gen_shli_i32(t0, t0, 4); 4125 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4126 tcg_gen_shli_i32(t0, t0, 4); 4127 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4128 tcg_gen_shli_i32(t0, t0, 4); 4129 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4130 tcg_gen_shli_i32(t0, t0, 4); 4131 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4132 tcg_gen_shli_i32(t0, t0, 4); 4133 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4134 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4135 tcg_temp_free_i32(t0); 4136 } 4137 } 4138 4139 /* mfmsr */ 4140 static void gen_mfmsr(DisasContext *ctx) 4141 { 4142 CHK_SV; 4143 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4144 } 4145 4146 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 4147 { 4148 #if 0 4149 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 4150 printf("ERROR: try to access SPR %d !\n", sprn); 4151 #endif 4152 } 4153 #define SPR_NOACCESS (&spr_noaccess) 4154 4155 /* mfspr */ 4156 static inline void gen_op_mfspr(DisasContext *ctx) 4157 { 4158 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4159 uint32_t sprn = SPR(ctx->opcode); 4160 4161 #if defined(CONFIG_USER_ONLY) 4162 read_cb = ctx->spr_cb[sprn].uea_read; 4163 #else 4164 if (ctx->pr) { 4165 read_cb = ctx->spr_cb[sprn].uea_read; 4166 } else if (ctx->hv) { 4167 read_cb = ctx->spr_cb[sprn].hea_read; 4168 } else { 4169 read_cb = ctx->spr_cb[sprn].oea_read; 4170 } 4171 #endif 4172 if (likely(read_cb != NULL)) { 4173 if (likely(read_cb != SPR_NOACCESS)) { 4174 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4175 } else { 4176 /* Privilege exception */ 4177 /* This is a hack to avoid warnings when running Linux: 4178 * this OS breaks the PowerPC virtualisation model, 4179 * allowing userland application to read the PVR 4180 */ 4181 if (sprn != SPR_PVR) { 4182 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4183 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4184 ctx->base.pc_next - 4); 4185 } 4186 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4187 } 4188 } else { 4189 /* ISA 2.07 defines these as no-ops */ 4190 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4191 (sprn >= 808 && sprn <= 811)) { 4192 /* This is a nop */ 4193 return; 4194 } 4195 /* Not defined */ 4196 qemu_log_mask(LOG_GUEST_ERROR, 4197 "Trying to read invalid spr %d (0x%03x) at " 4198 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4199 4200 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4201 * it can generate a priv, a hv emu or a no-op 4202 */ 4203 if (sprn & 0x10) { 4204 if (ctx->pr) { 4205 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4206 } 4207 } else { 4208 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4209 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4210 } 4211 } 4212 } 4213 } 4214 4215 static void gen_mfspr(DisasContext *ctx) 4216 { 4217 gen_op_mfspr(ctx); 4218 } 4219 4220 /* mftb */ 4221 static void gen_mftb(DisasContext *ctx) 4222 { 4223 gen_op_mfspr(ctx); 4224 } 4225 4226 /* mtcrf mtocrf*/ 4227 static void gen_mtcrf(DisasContext *ctx) 4228 { 4229 uint32_t crm, crn; 4230 4231 crm = CRM(ctx->opcode); 4232 if (likely((ctx->opcode & 0x00100000))) { 4233 if (crm && ((crm & (crm - 1)) == 0)) { 4234 TCGv_i32 temp = tcg_temp_new_i32(); 4235 crn = ctz32 (crm); 4236 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4237 tcg_gen_shri_i32(temp, temp, crn * 4); 4238 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4239 tcg_temp_free_i32(temp); 4240 } 4241 } else { 4242 TCGv_i32 temp = tcg_temp_new_i32(); 4243 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4244 for (crn = 0 ; crn < 8 ; crn++) { 4245 if (crm & (1 << crn)) { 4246 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4247 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4248 } 4249 } 4250 tcg_temp_free_i32(temp); 4251 } 4252 } 4253 4254 /* mtmsr */ 4255 #if defined(TARGET_PPC64) 4256 static void gen_mtmsrd(DisasContext *ctx) 4257 { 4258 CHK_SV; 4259 4260 #if !defined(CONFIG_USER_ONLY) 4261 if (ctx->opcode & 0x00010000) { 4262 /* Special form that does not need any synchronisation */ 4263 TCGv t0 = tcg_temp_new(); 4264 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4265 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4266 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4267 tcg_temp_free(t0); 4268 } else { 4269 /* XXX: we need to update nip before the store 4270 * if we enter power saving mode, we will exit the loop 4271 * directly from ppc_store_msr 4272 */ 4273 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4274 gen_io_start(); 4275 } 4276 gen_update_nip(ctx, ctx->base.pc_next); 4277 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4278 /* Must stop the translation as machine state (may have) changed */ 4279 /* Note that mtmsr is not always defined as context-synchronizing */ 4280 gen_stop_exception(ctx); 4281 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4282 gen_io_end(); 4283 } 4284 } 4285 #endif /* !defined(CONFIG_USER_ONLY) */ 4286 } 4287 #endif /* defined(TARGET_PPC64) */ 4288 4289 static void gen_mtmsr(DisasContext *ctx) 4290 { 4291 CHK_SV; 4292 4293 #if !defined(CONFIG_USER_ONLY) 4294 if (ctx->opcode & 0x00010000) { 4295 /* Special form that does not need any synchronisation */ 4296 TCGv t0 = tcg_temp_new(); 4297 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4298 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4299 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4300 tcg_temp_free(t0); 4301 } else { 4302 TCGv msr = tcg_temp_new(); 4303 4304 /* XXX: we need to update nip before the store 4305 * if we enter power saving mode, we will exit the loop 4306 * directly from ppc_store_msr 4307 */ 4308 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4309 gen_io_start(); 4310 } 4311 gen_update_nip(ctx, ctx->base.pc_next); 4312 #if defined(TARGET_PPC64) 4313 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4314 #else 4315 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4316 #endif 4317 gen_helper_store_msr(cpu_env, msr); 4318 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4319 gen_io_end(); 4320 } 4321 tcg_temp_free(msr); 4322 /* Must stop the translation as machine state (may have) changed */ 4323 /* Note that mtmsr is not always defined as context-synchronizing */ 4324 gen_stop_exception(ctx); 4325 } 4326 #endif 4327 } 4328 4329 /* mtspr */ 4330 static void gen_mtspr(DisasContext *ctx) 4331 { 4332 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4333 uint32_t sprn = SPR(ctx->opcode); 4334 4335 #if defined(CONFIG_USER_ONLY) 4336 write_cb = ctx->spr_cb[sprn].uea_write; 4337 #else 4338 if (ctx->pr) { 4339 write_cb = ctx->spr_cb[sprn].uea_write; 4340 } else if (ctx->hv) { 4341 write_cb = ctx->spr_cb[sprn].hea_write; 4342 } else { 4343 write_cb = ctx->spr_cb[sprn].oea_write; 4344 } 4345 #endif 4346 if (likely(write_cb != NULL)) { 4347 if (likely(write_cb != SPR_NOACCESS)) { 4348 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4349 } else { 4350 /* Privilege exception */ 4351 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 4352 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4353 ctx->base.pc_next - 4); 4354 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4355 } 4356 } else { 4357 /* ISA 2.07 defines these as no-ops */ 4358 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4359 (sprn >= 808 && sprn <= 811)) { 4360 /* This is a nop */ 4361 return; 4362 } 4363 4364 /* Not defined */ 4365 qemu_log_mask(LOG_GUEST_ERROR, 4366 "Trying to write invalid spr %d (0x%03x) at " 4367 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4368 4369 4370 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4371 * it can generate a priv, a hv emu or a no-op 4372 */ 4373 if (sprn & 0x10) { 4374 if (ctx->pr) { 4375 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4376 } 4377 } else { 4378 if (ctx->pr || sprn == 0) { 4379 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4380 } 4381 } 4382 } 4383 } 4384 4385 #if defined(TARGET_PPC64) 4386 /* setb */ 4387 static void gen_setb(DisasContext *ctx) 4388 { 4389 TCGv_i32 t0 = tcg_temp_new_i32(); 4390 TCGv_i32 t8 = tcg_temp_new_i32(); 4391 TCGv_i32 tm1 = tcg_temp_new_i32(); 4392 int crf = crfS(ctx->opcode); 4393 4394 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4395 tcg_gen_movi_i32(t8, 8); 4396 tcg_gen_movi_i32(tm1, -1); 4397 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4398 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4399 4400 tcg_temp_free_i32(t0); 4401 tcg_temp_free_i32(t8); 4402 tcg_temp_free_i32(tm1); 4403 } 4404 #endif 4405 4406 /*** Cache management ***/ 4407 4408 /* dcbf */ 4409 static void gen_dcbf(DisasContext *ctx) 4410 { 4411 /* XXX: specification says this is treated as a load by the MMU */ 4412 TCGv t0; 4413 gen_set_access_type(ctx, ACCESS_CACHE); 4414 t0 = tcg_temp_new(); 4415 gen_addr_reg_index(ctx, t0); 4416 gen_qemu_ld8u(ctx, t0, t0); 4417 tcg_temp_free(t0); 4418 } 4419 4420 /* dcbfep (external PID dcbf) */ 4421 static void gen_dcbfep(DisasContext *ctx) 4422 { 4423 /* XXX: specification says this is treated as a load by the MMU */ 4424 TCGv t0; 4425 CHK_SV; 4426 gen_set_access_type(ctx, ACCESS_CACHE); 4427 t0 = tcg_temp_new(); 4428 gen_addr_reg_index(ctx, t0); 4429 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4430 tcg_temp_free(t0); 4431 } 4432 4433 /* dcbi (Supervisor only) */ 4434 static void gen_dcbi(DisasContext *ctx) 4435 { 4436 #if defined(CONFIG_USER_ONLY) 4437 GEN_PRIV; 4438 #else 4439 TCGv EA, val; 4440 4441 CHK_SV; 4442 EA = tcg_temp_new(); 4443 gen_set_access_type(ctx, ACCESS_CACHE); 4444 gen_addr_reg_index(ctx, EA); 4445 val = tcg_temp_new(); 4446 /* XXX: specification says this should be treated as a store by the MMU */ 4447 gen_qemu_ld8u(ctx, val, EA); 4448 gen_qemu_st8(ctx, val, EA); 4449 tcg_temp_free(val); 4450 tcg_temp_free(EA); 4451 #endif /* defined(CONFIG_USER_ONLY) */ 4452 } 4453 4454 /* dcdst */ 4455 static void gen_dcbst(DisasContext *ctx) 4456 { 4457 /* XXX: specification say this is treated as a load by the MMU */ 4458 TCGv t0; 4459 gen_set_access_type(ctx, ACCESS_CACHE); 4460 t0 = tcg_temp_new(); 4461 gen_addr_reg_index(ctx, t0); 4462 gen_qemu_ld8u(ctx, t0, t0); 4463 tcg_temp_free(t0); 4464 } 4465 4466 /* dcbstep (dcbstep External PID version) */ 4467 static void gen_dcbstep(DisasContext *ctx) 4468 { 4469 /* XXX: specification say this is treated as a load by the MMU */ 4470 TCGv t0; 4471 gen_set_access_type(ctx, ACCESS_CACHE); 4472 t0 = tcg_temp_new(); 4473 gen_addr_reg_index(ctx, t0); 4474 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4475 tcg_temp_free(t0); 4476 } 4477 4478 /* dcbt */ 4479 static void gen_dcbt(DisasContext *ctx) 4480 { 4481 /* interpreted as no-op */ 4482 /* XXX: specification say this is treated as a load by the MMU 4483 * but does not generate any exception 4484 */ 4485 } 4486 4487 /* dcbtep */ 4488 static void gen_dcbtep(DisasContext *ctx) 4489 { 4490 /* interpreted as no-op */ 4491 /* XXX: specification say this is treated as a load by the MMU 4492 * but does not generate any exception 4493 */ 4494 } 4495 4496 /* dcbtst */ 4497 static void gen_dcbtst(DisasContext *ctx) 4498 { 4499 /* interpreted as no-op */ 4500 /* XXX: specification say this is treated as a load by the MMU 4501 * but does not generate any exception 4502 */ 4503 } 4504 4505 /* dcbtstep */ 4506 static void gen_dcbtstep(DisasContext *ctx) 4507 { 4508 /* interpreted as no-op */ 4509 /* XXX: specification say this is treated as a load by the MMU 4510 * but does not generate any exception 4511 */ 4512 } 4513 4514 /* dcbtls */ 4515 static void gen_dcbtls(DisasContext *ctx) 4516 { 4517 /* Always fails locking the cache */ 4518 TCGv t0 = tcg_temp_new(); 4519 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4520 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4521 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4522 tcg_temp_free(t0); 4523 } 4524 4525 /* dcbz */ 4526 static void gen_dcbz(DisasContext *ctx) 4527 { 4528 TCGv tcgv_addr; 4529 TCGv_i32 tcgv_op; 4530 4531 gen_set_access_type(ctx, ACCESS_CACHE); 4532 tcgv_addr = tcg_temp_new(); 4533 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4534 gen_addr_reg_index(ctx, tcgv_addr); 4535 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4536 tcg_temp_free(tcgv_addr); 4537 tcg_temp_free_i32(tcgv_op); 4538 } 4539 4540 /* dcbzep */ 4541 static void gen_dcbzep(DisasContext *ctx) 4542 { 4543 TCGv tcgv_addr; 4544 TCGv_i32 tcgv_op; 4545 4546 gen_set_access_type(ctx, ACCESS_CACHE); 4547 tcgv_addr = tcg_temp_new(); 4548 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4549 gen_addr_reg_index(ctx, tcgv_addr); 4550 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 4551 tcg_temp_free(tcgv_addr); 4552 tcg_temp_free_i32(tcgv_op); 4553 } 4554 4555 /* dst / dstt */ 4556 static void gen_dst(DisasContext *ctx) 4557 { 4558 if (rA(ctx->opcode) == 0) { 4559 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4560 } else { 4561 /* interpreted as no-op */ 4562 } 4563 } 4564 4565 /* dstst /dststt */ 4566 static void gen_dstst(DisasContext *ctx) 4567 { 4568 if (rA(ctx->opcode) == 0) { 4569 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4570 } else { 4571 /* interpreted as no-op */ 4572 } 4573 4574 } 4575 4576 /* dss / dssall */ 4577 static void gen_dss(DisasContext *ctx) 4578 { 4579 /* interpreted as no-op */ 4580 } 4581 4582 /* icbi */ 4583 static void gen_icbi(DisasContext *ctx) 4584 { 4585 TCGv t0; 4586 gen_set_access_type(ctx, ACCESS_CACHE); 4587 t0 = tcg_temp_new(); 4588 gen_addr_reg_index(ctx, t0); 4589 gen_helper_icbi(cpu_env, t0); 4590 tcg_temp_free(t0); 4591 } 4592 4593 /* icbiep */ 4594 static void gen_icbiep(DisasContext *ctx) 4595 { 4596 TCGv t0; 4597 gen_set_access_type(ctx, ACCESS_CACHE); 4598 t0 = tcg_temp_new(); 4599 gen_addr_reg_index(ctx, t0); 4600 gen_helper_icbiep(cpu_env, t0); 4601 tcg_temp_free(t0); 4602 } 4603 4604 /* Optional: */ 4605 /* dcba */ 4606 static void gen_dcba(DisasContext *ctx) 4607 { 4608 /* interpreted as no-op */ 4609 /* XXX: specification say this is treated as a store by the MMU 4610 * but does not generate any exception 4611 */ 4612 } 4613 4614 /*** Segment register manipulation ***/ 4615 /* Supervisor only: */ 4616 4617 /* mfsr */ 4618 static void gen_mfsr(DisasContext *ctx) 4619 { 4620 #if defined(CONFIG_USER_ONLY) 4621 GEN_PRIV; 4622 #else 4623 TCGv t0; 4624 4625 CHK_SV; 4626 t0 = tcg_const_tl(SR(ctx->opcode)); 4627 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4628 tcg_temp_free(t0); 4629 #endif /* defined(CONFIG_USER_ONLY) */ 4630 } 4631 4632 /* mfsrin */ 4633 static void gen_mfsrin(DisasContext *ctx) 4634 { 4635 #if defined(CONFIG_USER_ONLY) 4636 GEN_PRIV; 4637 #else 4638 TCGv t0; 4639 4640 CHK_SV; 4641 t0 = tcg_temp_new(); 4642 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4643 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4644 tcg_temp_free(t0); 4645 #endif /* defined(CONFIG_USER_ONLY) */ 4646 } 4647 4648 /* mtsr */ 4649 static void gen_mtsr(DisasContext *ctx) 4650 { 4651 #if defined(CONFIG_USER_ONLY) 4652 GEN_PRIV; 4653 #else 4654 TCGv t0; 4655 4656 CHK_SV; 4657 t0 = tcg_const_tl(SR(ctx->opcode)); 4658 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4659 tcg_temp_free(t0); 4660 #endif /* defined(CONFIG_USER_ONLY) */ 4661 } 4662 4663 /* mtsrin */ 4664 static void gen_mtsrin(DisasContext *ctx) 4665 { 4666 #if defined(CONFIG_USER_ONLY) 4667 GEN_PRIV; 4668 #else 4669 TCGv t0; 4670 CHK_SV; 4671 4672 t0 = tcg_temp_new(); 4673 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4674 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4675 tcg_temp_free(t0); 4676 #endif /* defined(CONFIG_USER_ONLY) */ 4677 } 4678 4679 #if defined(TARGET_PPC64) 4680 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4681 4682 /* mfsr */ 4683 static void gen_mfsr_64b(DisasContext *ctx) 4684 { 4685 #if defined(CONFIG_USER_ONLY) 4686 GEN_PRIV; 4687 #else 4688 TCGv t0; 4689 4690 CHK_SV; 4691 t0 = tcg_const_tl(SR(ctx->opcode)); 4692 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4693 tcg_temp_free(t0); 4694 #endif /* defined(CONFIG_USER_ONLY) */ 4695 } 4696 4697 /* mfsrin */ 4698 static void gen_mfsrin_64b(DisasContext *ctx) 4699 { 4700 #if defined(CONFIG_USER_ONLY) 4701 GEN_PRIV; 4702 #else 4703 TCGv t0; 4704 4705 CHK_SV; 4706 t0 = tcg_temp_new(); 4707 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4708 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4709 tcg_temp_free(t0); 4710 #endif /* defined(CONFIG_USER_ONLY) */ 4711 } 4712 4713 /* mtsr */ 4714 static void gen_mtsr_64b(DisasContext *ctx) 4715 { 4716 #if defined(CONFIG_USER_ONLY) 4717 GEN_PRIV; 4718 #else 4719 TCGv t0; 4720 4721 CHK_SV; 4722 t0 = tcg_const_tl(SR(ctx->opcode)); 4723 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4724 tcg_temp_free(t0); 4725 #endif /* defined(CONFIG_USER_ONLY) */ 4726 } 4727 4728 /* mtsrin */ 4729 static void gen_mtsrin_64b(DisasContext *ctx) 4730 { 4731 #if defined(CONFIG_USER_ONLY) 4732 GEN_PRIV; 4733 #else 4734 TCGv t0; 4735 4736 CHK_SV; 4737 t0 = tcg_temp_new(); 4738 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4739 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4740 tcg_temp_free(t0); 4741 #endif /* defined(CONFIG_USER_ONLY) */ 4742 } 4743 4744 /* slbmte */ 4745 static void gen_slbmte(DisasContext *ctx) 4746 { 4747 #if defined(CONFIG_USER_ONLY) 4748 GEN_PRIV; 4749 #else 4750 CHK_SV; 4751 4752 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4753 cpu_gpr[rS(ctx->opcode)]); 4754 #endif /* defined(CONFIG_USER_ONLY) */ 4755 } 4756 4757 static void gen_slbmfee(DisasContext *ctx) 4758 { 4759 #if defined(CONFIG_USER_ONLY) 4760 GEN_PRIV; 4761 #else 4762 CHK_SV; 4763 4764 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4765 cpu_gpr[rB(ctx->opcode)]); 4766 #endif /* defined(CONFIG_USER_ONLY) */ 4767 } 4768 4769 static void gen_slbmfev(DisasContext *ctx) 4770 { 4771 #if defined(CONFIG_USER_ONLY) 4772 GEN_PRIV; 4773 #else 4774 CHK_SV; 4775 4776 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4777 cpu_gpr[rB(ctx->opcode)]); 4778 #endif /* defined(CONFIG_USER_ONLY) */ 4779 } 4780 4781 static void gen_slbfee_(DisasContext *ctx) 4782 { 4783 #if defined(CONFIG_USER_ONLY) 4784 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4785 #else 4786 TCGLabel *l1, *l2; 4787 4788 if (unlikely(ctx->pr)) { 4789 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4790 return; 4791 } 4792 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4793 cpu_gpr[rB(ctx->opcode)]); 4794 l1 = gen_new_label(); 4795 l2 = gen_new_label(); 4796 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4797 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4798 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4799 tcg_gen_br(l2); 4800 gen_set_label(l1); 4801 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4802 gen_set_label(l2); 4803 #endif 4804 } 4805 #endif /* defined(TARGET_PPC64) */ 4806 4807 /*** Lookaside buffer management ***/ 4808 /* Optional & supervisor only: */ 4809 4810 /* tlbia */ 4811 static void gen_tlbia(DisasContext *ctx) 4812 { 4813 #if defined(CONFIG_USER_ONLY) 4814 GEN_PRIV; 4815 #else 4816 CHK_HV; 4817 4818 gen_helper_tlbia(cpu_env); 4819 #endif /* defined(CONFIG_USER_ONLY) */ 4820 } 4821 4822 /* tlbiel */ 4823 static void gen_tlbiel(DisasContext *ctx) 4824 { 4825 #if defined(CONFIG_USER_ONLY) 4826 GEN_PRIV; 4827 #else 4828 CHK_SV; 4829 4830 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4831 #endif /* defined(CONFIG_USER_ONLY) */ 4832 } 4833 4834 /* tlbie */ 4835 static void gen_tlbie(DisasContext *ctx) 4836 { 4837 #if defined(CONFIG_USER_ONLY) 4838 GEN_PRIV; 4839 #else 4840 TCGv_i32 t1; 4841 4842 if (ctx->gtse) { 4843 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */ 4844 } else { 4845 CHK_HV; /* Else hypervisor privileged */ 4846 } 4847 4848 if (NARROW_MODE(ctx)) { 4849 TCGv t0 = tcg_temp_new(); 4850 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4851 gen_helper_tlbie(cpu_env, t0); 4852 tcg_temp_free(t0); 4853 } else { 4854 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4855 } 4856 t1 = tcg_temp_new_i32(); 4857 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4858 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4859 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4860 tcg_temp_free_i32(t1); 4861 #endif /* defined(CONFIG_USER_ONLY) */ 4862 } 4863 4864 /* tlbsync */ 4865 static void gen_tlbsync(DisasContext *ctx) 4866 { 4867 #if defined(CONFIG_USER_ONLY) 4868 GEN_PRIV; 4869 #else 4870 4871 if (ctx->gtse) { 4872 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 4873 } else { 4874 CHK_HV; /* Else hypervisor privileged */ 4875 } 4876 4877 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4878 if (ctx->insns_flags & PPC_BOOKE) { 4879 gen_check_tlb_flush(ctx, true); 4880 } 4881 #endif /* defined(CONFIG_USER_ONLY) */ 4882 } 4883 4884 #if defined(TARGET_PPC64) 4885 /* slbia */ 4886 static void gen_slbia(DisasContext *ctx) 4887 { 4888 #if defined(CONFIG_USER_ONLY) 4889 GEN_PRIV; 4890 #else 4891 CHK_SV; 4892 4893 gen_helper_slbia(cpu_env); 4894 #endif /* defined(CONFIG_USER_ONLY) */ 4895 } 4896 4897 /* slbie */ 4898 static void gen_slbie(DisasContext *ctx) 4899 { 4900 #if defined(CONFIG_USER_ONLY) 4901 GEN_PRIV; 4902 #else 4903 CHK_SV; 4904 4905 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4906 #endif /* defined(CONFIG_USER_ONLY) */ 4907 } 4908 4909 /* slbieg */ 4910 static void gen_slbieg(DisasContext *ctx) 4911 { 4912 #if defined(CONFIG_USER_ONLY) 4913 GEN_PRIV; 4914 #else 4915 CHK_SV; 4916 4917 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4918 #endif /* defined(CONFIG_USER_ONLY) */ 4919 } 4920 4921 /* slbsync */ 4922 static void gen_slbsync(DisasContext *ctx) 4923 { 4924 #if defined(CONFIG_USER_ONLY) 4925 GEN_PRIV; 4926 #else 4927 CHK_SV; 4928 gen_check_tlb_flush(ctx, true); 4929 #endif /* defined(CONFIG_USER_ONLY) */ 4930 } 4931 4932 #endif /* defined(TARGET_PPC64) */ 4933 4934 /*** External control ***/ 4935 /* Optional: */ 4936 4937 /* eciwx */ 4938 static void gen_eciwx(DisasContext *ctx) 4939 { 4940 TCGv t0; 4941 /* Should check EAR[E] ! */ 4942 gen_set_access_type(ctx, ACCESS_EXT); 4943 t0 = tcg_temp_new(); 4944 gen_addr_reg_index(ctx, t0); 4945 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4946 DEF_MEMOP(MO_UL | MO_ALIGN)); 4947 tcg_temp_free(t0); 4948 } 4949 4950 /* ecowx */ 4951 static void gen_ecowx(DisasContext *ctx) 4952 { 4953 TCGv t0; 4954 /* Should check EAR[E] ! */ 4955 gen_set_access_type(ctx, ACCESS_EXT); 4956 t0 = tcg_temp_new(); 4957 gen_addr_reg_index(ctx, t0); 4958 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4959 DEF_MEMOP(MO_UL | MO_ALIGN)); 4960 tcg_temp_free(t0); 4961 } 4962 4963 /* PowerPC 601 specific instructions */ 4964 4965 /* abs - abs. */ 4966 static void gen_abs(DisasContext *ctx) 4967 { 4968 TCGLabel *l1 = gen_new_label(); 4969 TCGLabel *l2 = gen_new_label(); 4970 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 4971 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4972 tcg_gen_br(l2); 4973 gen_set_label(l1); 4974 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4975 gen_set_label(l2); 4976 if (unlikely(Rc(ctx->opcode) != 0)) 4977 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4978 } 4979 4980 /* abso - abso. */ 4981 static void gen_abso(DisasContext *ctx) 4982 { 4983 TCGLabel *l1 = gen_new_label(); 4984 TCGLabel *l2 = gen_new_label(); 4985 TCGLabel *l3 = gen_new_label(); 4986 /* Start with XER OV disabled, the most likely case */ 4987 tcg_gen_movi_tl(cpu_ov, 0); 4988 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 4989 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 4990 tcg_gen_movi_tl(cpu_ov, 1); 4991 tcg_gen_movi_tl(cpu_so, 1); 4992 tcg_gen_br(l2); 4993 gen_set_label(l1); 4994 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4995 tcg_gen_br(l3); 4996 gen_set_label(l2); 4997 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4998 gen_set_label(l3); 4999 if (unlikely(Rc(ctx->opcode) != 0)) 5000 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5001 } 5002 5003 /* clcs */ 5004 static void gen_clcs(DisasContext *ctx) 5005 { 5006 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5007 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5008 tcg_temp_free_i32(t0); 5009 /* Rc=1 sets CR0 to an undefined state */ 5010 } 5011 5012 /* div - div. */ 5013 static void gen_div(DisasContext *ctx) 5014 { 5015 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5016 cpu_gpr[rB(ctx->opcode)]); 5017 if (unlikely(Rc(ctx->opcode) != 0)) 5018 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5019 } 5020 5021 /* divo - divo. */ 5022 static void gen_divo(DisasContext *ctx) 5023 { 5024 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5025 cpu_gpr[rB(ctx->opcode)]); 5026 if (unlikely(Rc(ctx->opcode) != 0)) 5027 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5028 } 5029 5030 /* divs - divs. */ 5031 static void gen_divs(DisasContext *ctx) 5032 { 5033 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5034 cpu_gpr[rB(ctx->opcode)]); 5035 if (unlikely(Rc(ctx->opcode) != 0)) 5036 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5037 } 5038 5039 /* divso - divso. */ 5040 static void gen_divso(DisasContext *ctx) 5041 { 5042 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5043 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5044 if (unlikely(Rc(ctx->opcode) != 0)) 5045 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5046 } 5047 5048 /* doz - doz. */ 5049 static void gen_doz(DisasContext *ctx) 5050 { 5051 TCGLabel *l1 = gen_new_label(); 5052 TCGLabel *l2 = gen_new_label(); 5053 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5054 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5055 tcg_gen_br(l2); 5056 gen_set_label(l1); 5057 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5058 gen_set_label(l2); 5059 if (unlikely(Rc(ctx->opcode) != 0)) 5060 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5061 } 5062 5063 /* dozo - dozo. */ 5064 static void gen_dozo(DisasContext *ctx) 5065 { 5066 TCGLabel *l1 = gen_new_label(); 5067 TCGLabel *l2 = gen_new_label(); 5068 TCGv t0 = tcg_temp_new(); 5069 TCGv t1 = tcg_temp_new(); 5070 TCGv t2 = tcg_temp_new(); 5071 /* Start with XER OV disabled, the most likely case */ 5072 tcg_gen_movi_tl(cpu_ov, 0); 5073 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5074 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5075 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5076 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5077 tcg_gen_andc_tl(t1, t1, t2); 5078 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5079 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5080 tcg_gen_movi_tl(cpu_ov, 1); 5081 tcg_gen_movi_tl(cpu_so, 1); 5082 tcg_gen_br(l2); 5083 gen_set_label(l1); 5084 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5085 gen_set_label(l2); 5086 tcg_temp_free(t0); 5087 tcg_temp_free(t1); 5088 tcg_temp_free(t2); 5089 if (unlikely(Rc(ctx->opcode) != 0)) 5090 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5091 } 5092 5093 /* dozi */ 5094 static void gen_dozi(DisasContext *ctx) 5095 { 5096 target_long simm = SIMM(ctx->opcode); 5097 TCGLabel *l1 = gen_new_label(); 5098 TCGLabel *l2 = gen_new_label(); 5099 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5100 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5101 tcg_gen_br(l2); 5102 gen_set_label(l1); 5103 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5104 gen_set_label(l2); 5105 if (unlikely(Rc(ctx->opcode) != 0)) 5106 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5107 } 5108 5109 /* lscbx - lscbx. */ 5110 static void gen_lscbx(DisasContext *ctx) 5111 { 5112 TCGv t0 = tcg_temp_new(); 5113 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5114 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5115 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5116 5117 gen_addr_reg_index(ctx, t0); 5118 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5119 tcg_temp_free_i32(t1); 5120 tcg_temp_free_i32(t2); 5121 tcg_temp_free_i32(t3); 5122 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5123 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5124 if (unlikely(Rc(ctx->opcode) != 0)) 5125 gen_set_Rc0(ctx, t0); 5126 tcg_temp_free(t0); 5127 } 5128 5129 /* maskg - maskg. */ 5130 static void gen_maskg(DisasContext *ctx) 5131 { 5132 TCGLabel *l1 = gen_new_label(); 5133 TCGv t0 = tcg_temp_new(); 5134 TCGv t1 = tcg_temp_new(); 5135 TCGv t2 = tcg_temp_new(); 5136 TCGv t3 = tcg_temp_new(); 5137 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5138 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5139 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5140 tcg_gen_addi_tl(t2, t0, 1); 5141 tcg_gen_shr_tl(t2, t3, t2); 5142 tcg_gen_shr_tl(t3, t3, t1); 5143 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5144 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5145 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5146 gen_set_label(l1); 5147 tcg_temp_free(t0); 5148 tcg_temp_free(t1); 5149 tcg_temp_free(t2); 5150 tcg_temp_free(t3); 5151 if (unlikely(Rc(ctx->opcode) != 0)) 5152 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5153 } 5154 5155 /* maskir - maskir. */ 5156 static void gen_maskir(DisasContext *ctx) 5157 { 5158 TCGv t0 = tcg_temp_new(); 5159 TCGv t1 = tcg_temp_new(); 5160 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5161 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5162 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5163 tcg_temp_free(t0); 5164 tcg_temp_free(t1); 5165 if (unlikely(Rc(ctx->opcode) != 0)) 5166 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5167 } 5168 5169 /* mul - mul. */ 5170 static void gen_mul(DisasContext *ctx) 5171 { 5172 TCGv_i64 t0 = tcg_temp_new_i64(); 5173 TCGv_i64 t1 = tcg_temp_new_i64(); 5174 TCGv t2 = tcg_temp_new(); 5175 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5176 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5177 tcg_gen_mul_i64(t0, t0, t1); 5178 tcg_gen_trunc_i64_tl(t2, t0); 5179 gen_store_spr(SPR_MQ, t2); 5180 tcg_gen_shri_i64(t1, t0, 32); 5181 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5182 tcg_temp_free_i64(t0); 5183 tcg_temp_free_i64(t1); 5184 tcg_temp_free(t2); 5185 if (unlikely(Rc(ctx->opcode) != 0)) 5186 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5187 } 5188 5189 /* mulo - mulo. */ 5190 static void gen_mulo(DisasContext *ctx) 5191 { 5192 TCGLabel *l1 = gen_new_label(); 5193 TCGv_i64 t0 = tcg_temp_new_i64(); 5194 TCGv_i64 t1 = tcg_temp_new_i64(); 5195 TCGv t2 = tcg_temp_new(); 5196 /* Start with XER OV disabled, the most likely case */ 5197 tcg_gen_movi_tl(cpu_ov, 0); 5198 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5199 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5200 tcg_gen_mul_i64(t0, t0, t1); 5201 tcg_gen_trunc_i64_tl(t2, t0); 5202 gen_store_spr(SPR_MQ, t2); 5203 tcg_gen_shri_i64(t1, t0, 32); 5204 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5205 tcg_gen_ext32s_i64(t1, t0); 5206 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5207 tcg_gen_movi_tl(cpu_ov, 1); 5208 tcg_gen_movi_tl(cpu_so, 1); 5209 gen_set_label(l1); 5210 tcg_temp_free_i64(t0); 5211 tcg_temp_free_i64(t1); 5212 tcg_temp_free(t2); 5213 if (unlikely(Rc(ctx->opcode) != 0)) 5214 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5215 } 5216 5217 /* nabs - nabs. */ 5218 static void gen_nabs(DisasContext *ctx) 5219 { 5220 TCGLabel *l1 = gen_new_label(); 5221 TCGLabel *l2 = gen_new_label(); 5222 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5223 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5224 tcg_gen_br(l2); 5225 gen_set_label(l1); 5226 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5227 gen_set_label(l2); 5228 if (unlikely(Rc(ctx->opcode) != 0)) 5229 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5230 } 5231 5232 /* nabso - nabso. */ 5233 static void gen_nabso(DisasContext *ctx) 5234 { 5235 TCGLabel *l1 = gen_new_label(); 5236 TCGLabel *l2 = gen_new_label(); 5237 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5238 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5239 tcg_gen_br(l2); 5240 gen_set_label(l1); 5241 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5242 gen_set_label(l2); 5243 /* nabs never overflows */ 5244 tcg_gen_movi_tl(cpu_ov, 0); 5245 if (unlikely(Rc(ctx->opcode) != 0)) 5246 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5247 } 5248 5249 /* rlmi - rlmi. */ 5250 static void gen_rlmi(DisasContext *ctx) 5251 { 5252 uint32_t mb = MB(ctx->opcode); 5253 uint32_t me = ME(ctx->opcode); 5254 TCGv t0 = tcg_temp_new(); 5255 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5256 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5257 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5258 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 5259 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5260 tcg_temp_free(t0); 5261 if (unlikely(Rc(ctx->opcode) != 0)) 5262 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5263 } 5264 5265 /* rrib - rrib. */ 5266 static void gen_rrib(DisasContext *ctx) 5267 { 5268 TCGv t0 = tcg_temp_new(); 5269 TCGv t1 = tcg_temp_new(); 5270 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5271 tcg_gen_movi_tl(t1, 0x80000000); 5272 tcg_gen_shr_tl(t1, t1, t0); 5273 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5274 tcg_gen_and_tl(t0, t0, t1); 5275 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5276 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5277 tcg_temp_free(t0); 5278 tcg_temp_free(t1); 5279 if (unlikely(Rc(ctx->opcode) != 0)) 5280 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5281 } 5282 5283 /* sle - sle. */ 5284 static void gen_sle(DisasContext *ctx) 5285 { 5286 TCGv t0 = tcg_temp_new(); 5287 TCGv t1 = tcg_temp_new(); 5288 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5289 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5290 tcg_gen_subfi_tl(t1, 32, t1); 5291 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5292 tcg_gen_or_tl(t1, t0, t1); 5293 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5294 gen_store_spr(SPR_MQ, t1); 5295 tcg_temp_free(t0); 5296 tcg_temp_free(t1); 5297 if (unlikely(Rc(ctx->opcode) != 0)) 5298 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5299 } 5300 5301 /* sleq - sleq. */ 5302 static void gen_sleq(DisasContext *ctx) 5303 { 5304 TCGv t0 = tcg_temp_new(); 5305 TCGv t1 = tcg_temp_new(); 5306 TCGv t2 = tcg_temp_new(); 5307 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5308 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 5309 tcg_gen_shl_tl(t2, t2, t0); 5310 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5311 gen_load_spr(t1, SPR_MQ); 5312 gen_store_spr(SPR_MQ, t0); 5313 tcg_gen_and_tl(t0, t0, t2); 5314 tcg_gen_andc_tl(t1, t1, t2); 5315 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5316 tcg_temp_free(t0); 5317 tcg_temp_free(t1); 5318 tcg_temp_free(t2); 5319 if (unlikely(Rc(ctx->opcode) != 0)) 5320 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5321 } 5322 5323 /* sliq - sliq. */ 5324 static void gen_sliq(DisasContext *ctx) 5325 { 5326 int sh = SH(ctx->opcode); 5327 TCGv t0 = tcg_temp_new(); 5328 TCGv t1 = tcg_temp_new(); 5329 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5330 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5331 tcg_gen_or_tl(t1, t0, t1); 5332 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5333 gen_store_spr(SPR_MQ, t1); 5334 tcg_temp_free(t0); 5335 tcg_temp_free(t1); 5336 if (unlikely(Rc(ctx->opcode) != 0)) 5337 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5338 } 5339 5340 /* slliq - slliq. */ 5341 static void gen_slliq(DisasContext *ctx) 5342 { 5343 int sh = SH(ctx->opcode); 5344 TCGv t0 = tcg_temp_new(); 5345 TCGv t1 = tcg_temp_new(); 5346 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5347 gen_load_spr(t1, SPR_MQ); 5348 gen_store_spr(SPR_MQ, t0); 5349 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 5350 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 5351 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5352 tcg_temp_free(t0); 5353 tcg_temp_free(t1); 5354 if (unlikely(Rc(ctx->opcode) != 0)) 5355 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5356 } 5357 5358 /* sllq - sllq. */ 5359 static void gen_sllq(DisasContext *ctx) 5360 { 5361 TCGLabel *l1 = gen_new_label(); 5362 TCGLabel *l2 = gen_new_label(); 5363 TCGv t0 = tcg_temp_local_new(); 5364 TCGv t1 = tcg_temp_local_new(); 5365 TCGv t2 = tcg_temp_local_new(); 5366 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5367 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5368 tcg_gen_shl_tl(t1, t1, t2); 5369 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5370 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5371 gen_load_spr(t0, SPR_MQ); 5372 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5373 tcg_gen_br(l2); 5374 gen_set_label(l1); 5375 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5376 gen_load_spr(t2, SPR_MQ); 5377 tcg_gen_andc_tl(t1, t2, t1); 5378 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5379 gen_set_label(l2); 5380 tcg_temp_free(t0); 5381 tcg_temp_free(t1); 5382 tcg_temp_free(t2); 5383 if (unlikely(Rc(ctx->opcode) != 0)) 5384 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5385 } 5386 5387 /* slq - slq. */ 5388 static void gen_slq(DisasContext *ctx) 5389 { 5390 TCGLabel *l1 = gen_new_label(); 5391 TCGv t0 = tcg_temp_new(); 5392 TCGv t1 = tcg_temp_new(); 5393 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5394 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5395 tcg_gen_subfi_tl(t1, 32, t1); 5396 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5397 tcg_gen_or_tl(t1, t0, t1); 5398 gen_store_spr(SPR_MQ, t1); 5399 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5400 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5401 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5402 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5403 gen_set_label(l1); 5404 tcg_temp_free(t0); 5405 tcg_temp_free(t1); 5406 if (unlikely(Rc(ctx->opcode) != 0)) 5407 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5408 } 5409 5410 /* sraiq - sraiq. */ 5411 static void gen_sraiq(DisasContext *ctx) 5412 { 5413 int sh = SH(ctx->opcode); 5414 TCGLabel *l1 = gen_new_label(); 5415 TCGv t0 = tcg_temp_new(); 5416 TCGv t1 = tcg_temp_new(); 5417 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5418 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5419 tcg_gen_or_tl(t0, t0, t1); 5420 gen_store_spr(SPR_MQ, t0); 5421 tcg_gen_movi_tl(cpu_ca, 0); 5422 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5423 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5424 tcg_gen_movi_tl(cpu_ca, 1); 5425 gen_set_label(l1); 5426 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5427 tcg_temp_free(t0); 5428 tcg_temp_free(t1); 5429 if (unlikely(Rc(ctx->opcode) != 0)) 5430 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5431 } 5432 5433 /* sraq - sraq. */ 5434 static void gen_sraq(DisasContext *ctx) 5435 { 5436 TCGLabel *l1 = gen_new_label(); 5437 TCGLabel *l2 = gen_new_label(); 5438 TCGv t0 = tcg_temp_new(); 5439 TCGv t1 = tcg_temp_local_new(); 5440 TCGv t2 = tcg_temp_local_new(); 5441 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5442 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5443 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5444 tcg_gen_subfi_tl(t2, 32, t2); 5445 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5446 tcg_gen_or_tl(t0, t0, t2); 5447 gen_store_spr(SPR_MQ, t0); 5448 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5449 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5450 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5451 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5452 gen_set_label(l1); 5453 tcg_temp_free(t0); 5454 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5455 tcg_gen_movi_tl(cpu_ca, 0); 5456 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5457 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5458 tcg_gen_movi_tl(cpu_ca, 1); 5459 gen_set_label(l2); 5460 tcg_temp_free(t1); 5461 tcg_temp_free(t2); 5462 if (unlikely(Rc(ctx->opcode) != 0)) 5463 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5464 } 5465 5466 /* sre - sre. */ 5467 static void gen_sre(DisasContext *ctx) 5468 { 5469 TCGv t0 = tcg_temp_new(); 5470 TCGv t1 = tcg_temp_new(); 5471 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5472 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5473 tcg_gen_subfi_tl(t1, 32, t1); 5474 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5475 tcg_gen_or_tl(t1, t0, t1); 5476 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5477 gen_store_spr(SPR_MQ, t1); 5478 tcg_temp_free(t0); 5479 tcg_temp_free(t1); 5480 if (unlikely(Rc(ctx->opcode) != 0)) 5481 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5482 } 5483 5484 /* srea - srea. */ 5485 static void gen_srea(DisasContext *ctx) 5486 { 5487 TCGv t0 = tcg_temp_new(); 5488 TCGv t1 = tcg_temp_new(); 5489 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5490 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5491 gen_store_spr(SPR_MQ, t0); 5492 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5493 tcg_temp_free(t0); 5494 tcg_temp_free(t1); 5495 if (unlikely(Rc(ctx->opcode) != 0)) 5496 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5497 } 5498 5499 /* sreq */ 5500 static void gen_sreq(DisasContext *ctx) 5501 { 5502 TCGv t0 = tcg_temp_new(); 5503 TCGv t1 = tcg_temp_new(); 5504 TCGv t2 = tcg_temp_new(); 5505 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5506 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5507 tcg_gen_shr_tl(t1, t1, t0); 5508 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5509 gen_load_spr(t2, SPR_MQ); 5510 gen_store_spr(SPR_MQ, t0); 5511 tcg_gen_and_tl(t0, t0, t1); 5512 tcg_gen_andc_tl(t2, t2, t1); 5513 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5514 tcg_temp_free(t0); 5515 tcg_temp_free(t1); 5516 tcg_temp_free(t2); 5517 if (unlikely(Rc(ctx->opcode) != 0)) 5518 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5519 } 5520 5521 /* sriq */ 5522 static void gen_sriq(DisasContext *ctx) 5523 { 5524 int sh = SH(ctx->opcode); 5525 TCGv t0 = tcg_temp_new(); 5526 TCGv t1 = tcg_temp_new(); 5527 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5528 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5529 tcg_gen_or_tl(t1, t0, t1); 5530 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5531 gen_store_spr(SPR_MQ, t1); 5532 tcg_temp_free(t0); 5533 tcg_temp_free(t1); 5534 if (unlikely(Rc(ctx->opcode) != 0)) 5535 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5536 } 5537 5538 /* srliq */ 5539 static void gen_srliq(DisasContext *ctx) 5540 { 5541 int sh = SH(ctx->opcode); 5542 TCGv t0 = tcg_temp_new(); 5543 TCGv t1 = tcg_temp_new(); 5544 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5545 gen_load_spr(t1, SPR_MQ); 5546 gen_store_spr(SPR_MQ, t0); 5547 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5548 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5549 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5550 tcg_temp_free(t0); 5551 tcg_temp_free(t1); 5552 if (unlikely(Rc(ctx->opcode) != 0)) 5553 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5554 } 5555 5556 /* srlq */ 5557 static void gen_srlq(DisasContext *ctx) 5558 { 5559 TCGLabel *l1 = gen_new_label(); 5560 TCGLabel *l2 = gen_new_label(); 5561 TCGv t0 = tcg_temp_local_new(); 5562 TCGv t1 = tcg_temp_local_new(); 5563 TCGv t2 = tcg_temp_local_new(); 5564 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5565 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5566 tcg_gen_shr_tl(t2, t1, t2); 5567 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5568 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5569 gen_load_spr(t0, SPR_MQ); 5570 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5571 tcg_gen_br(l2); 5572 gen_set_label(l1); 5573 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5574 tcg_gen_and_tl(t0, t0, t2); 5575 gen_load_spr(t1, SPR_MQ); 5576 tcg_gen_andc_tl(t1, t1, t2); 5577 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5578 gen_set_label(l2); 5579 tcg_temp_free(t0); 5580 tcg_temp_free(t1); 5581 tcg_temp_free(t2); 5582 if (unlikely(Rc(ctx->opcode) != 0)) 5583 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5584 } 5585 5586 /* srq */ 5587 static void gen_srq(DisasContext *ctx) 5588 { 5589 TCGLabel *l1 = gen_new_label(); 5590 TCGv t0 = tcg_temp_new(); 5591 TCGv t1 = tcg_temp_new(); 5592 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5593 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5594 tcg_gen_subfi_tl(t1, 32, t1); 5595 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5596 tcg_gen_or_tl(t1, t0, t1); 5597 gen_store_spr(SPR_MQ, t1); 5598 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5599 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5600 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5601 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5602 gen_set_label(l1); 5603 tcg_temp_free(t0); 5604 tcg_temp_free(t1); 5605 if (unlikely(Rc(ctx->opcode) != 0)) 5606 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5607 } 5608 5609 /* PowerPC 602 specific instructions */ 5610 5611 /* dsa */ 5612 static void gen_dsa(DisasContext *ctx) 5613 { 5614 /* XXX: TODO */ 5615 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5616 } 5617 5618 /* esa */ 5619 static void gen_esa(DisasContext *ctx) 5620 { 5621 /* XXX: TODO */ 5622 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5623 } 5624 5625 /* mfrom */ 5626 static void gen_mfrom(DisasContext *ctx) 5627 { 5628 #if defined(CONFIG_USER_ONLY) 5629 GEN_PRIV; 5630 #else 5631 CHK_SV; 5632 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5633 #endif /* defined(CONFIG_USER_ONLY) */ 5634 } 5635 5636 /* 602 - 603 - G2 TLB management */ 5637 5638 /* tlbld */ 5639 static void gen_tlbld_6xx(DisasContext *ctx) 5640 { 5641 #if defined(CONFIG_USER_ONLY) 5642 GEN_PRIV; 5643 #else 5644 CHK_SV; 5645 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5646 #endif /* defined(CONFIG_USER_ONLY) */ 5647 } 5648 5649 /* tlbli */ 5650 static void gen_tlbli_6xx(DisasContext *ctx) 5651 { 5652 #if defined(CONFIG_USER_ONLY) 5653 GEN_PRIV; 5654 #else 5655 CHK_SV; 5656 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5657 #endif /* defined(CONFIG_USER_ONLY) */ 5658 } 5659 5660 /* 74xx TLB management */ 5661 5662 /* tlbld */ 5663 static void gen_tlbld_74xx(DisasContext *ctx) 5664 { 5665 #if defined(CONFIG_USER_ONLY) 5666 GEN_PRIV; 5667 #else 5668 CHK_SV; 5669 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5670 #endif /* defined(CONFIG_USER_ONLY) */ 5671 } 5672 5673 /* tlbli */ 5674 static void gen_tlbli_74xx(DisasContext *ctx) 5675 { 5676 #if defined(CONFIG_USER_ONLY) 5677 GEN_PRIV; 5678 #else 5679 CHK_SV; 5680 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5681 #endif /* defined(CONFIG_USER_ONLY) */ 5682 } 5683 5684 /* POWER instructions not in PowerPC 601 */ 5685 5686 /* clf */ 5687 static void gen_clf(DisasContext *ctx) 5688 { 5689 /* Cache line flush: implemented as no-op */ 5690 } 5691 5692 /* cli */ 5693 static void gen_cli(DisasContext *ctx) 5694 { 5695 #if defined(CONFIG_USER_ONLY) 5696 GEN_PRIV; 5697 #else 5698 /* Cache line invalidate: privileged and treated as no-op */ 5699 CHK_SV; 5700 #endif /* defined(CONFIG_USER_ONLY) */ 5701 } 5702 5703 /* dclst */ 5704 static void gen_dclst(DisasContext *ctx) 5705 { 5706 /* Data cache line store: treated as no-op */ 5707 } 5708 5709 static void gen_mfsri(DisasContext *ctx) 5710 { 5711 #if defined(CONFIG_USER_ONLY) 5712 GEN_PRIV; 5713 #else 5714 int ra = rA(ctx->opcode); 5715 int rd = rD(ctx->opcode); 5716 TCGv t0; 5717 5718 CHK_SV; 5719 t0 = tcg_temp_new(); 5720 gen_addr_reg_index(ctx, t0); 5721 tcg_gen_extract_tl(t0, t0, 28, 4); 5722 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5723 tcg_temp_free(t0); 5724 if (ra != 0 && ra != rd) 5725 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5726 #endif /* defined(CONFIG_USER_ONLY) */ 5727 } 5728 5729 static void gen_rac(DisasContext *ctx) 5730 { 5731 #if defined(CONFIG_USER_ONLY) 5732 GEN_PRIV; 5733 #else 5734 TCGv t0; 5735 5736 CHK_SV; 5737 t0 = tcg_temp_new(); 5738 gen_addr_reg_index(ctx, t0); 5739 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5740 tcg_temp_free(t0); 5741 #endif /* defined(CONFIG_USER_ONLY) */ 5742 } 5743 5744 static void gen_rfsvc(DisasContext *ctx) 5745 { 5746 #if defined(CONFIG_USER_ONLY) 5747 GEN_PRIV; 5748 #else 5749 CHK_SV; 5750 5751 gen_helper_rfsvc(cpu_env); 5752 gen_sync_exception(ctx); 5753 #endif /* defined(CONFIG_USER_ONLY) */ 5754 } 5755 5756 /* svc is not implemented for now */ 5757 5758 /* BookE specific instructions */ 5759 5760 /* XXX: not implemented on 440 ? */ 5761 static void gen_mfapidi(DisasContext *ctx) 5762 { 5763 /* XXX: TODO */ 5764 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5765 } 5766 5767 /* XXX: not implemented on 440 ? */ 5768 static void gen_tlbiva(DisasContext *ctx) 5769 { 5770 #if defined(CONFIG_USER_ONLY) 5771 GEN_PRIV; 5772 #else 5773 TCGv t0; 5774 5775 CHK_SV; 5776 t0 = tcg_temp_new(); 5777 gen_addr_reg_index(ctx, t0); 5778 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5779 tcg_temp_free(t0); 5780 #endif /* defined(CONFIG_USER_ONLY) */ 5781 } 5782 5783 /* All 405 MAC instructions are translated here */ 5784 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5785 int ra, int rb, int rt, int Rc) 5786 { 5787 TCGv t0, t1; 5788 5789 t0 = tcg_temp_local_new(); 5790 t1 = tcg_temp_local_new(); 5791 5792 switch (opc3 & 0x0D) { 5793 case 0x05: 5794 /* macchw - macchw. - macchwo - macchwo. */ 5795 /* macchws - macchws. - macchwso - macchwso. */ 5796 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5797 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5798 /* mulchw - mulchw. */ 5799 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5800 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5801 tcg_gen_ext16s_tl(t1, t1); 5802 break; 5803 case 0x04: 5804 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5805 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5806 /* mulchwu - mulchwu. */ 5807 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5808 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5809 tcg_gen_ext16u_tl(t1, t1); 5810 break; 5811 case 0x01: 5812 /* machhw - machhw. - machhwo - machhwo. */ 5813 /* machhws - machhws. - machhwso - machhwso. */ 5814 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5815 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5816 /* mulhhw - mulhhw. */ 5817 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5818 tcg_gen_ext16s_tl(t0, t0); 5819 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5820 tcg_gen_ext16s_tl(t1, t1); 5821 break; 5822 case 0x00: 5823 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5824 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5825 /* mulhhwu - mulhhwu. */ 5826 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5827 tcg_gen_ext16u_tl(t0, t0); 5828 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5829 tcg_gen_ext16u_tl(t1, t1); 5830 break; 5831 case 0x0D: 5832 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5833 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5834 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5835 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5836 /* mullhw - mullhw. */ 5837 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5838 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5839 break; 5840 case 0x0C: 5841 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5842 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5843 /* mullhwu - mullhwu. */ 5844 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5845 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5846 break; 5847 } 5848 if (opc2 & 0x04) { 5849 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5850 tcg_gen_mul_tl(t1, t0, t1); 5851 if (opc2 & 0x02) { 5852 /* nmultiply-and-accumulate (0x0E) */ 5853 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5854 } else { 5855 /* multiply-and-accumulate (0x0C) */ 5856 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5857 } 5858 5859 if (opc3 & 0x12) { 5860 /* Check overflow and/or saturate */ 5861 TCGLabel *l1 = gen_new_label(); 5862 5863 if (opc3 & 0x10) { 5864 /* Start with XER OV disabled, the most likely case */ 5865 tcg_gen_movi_tl(cpu_ov, 0); 5866 } 5867 if (opc3 & 0x01) { 5868 /* Signed */ 5869 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5870 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5871 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5872 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5873 if (opc3 & 0x02) { 5874 /* Saturate */ 5875 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5876 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5877 } 5878 } else { 5879 /* Unsigned */ 5880 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5881 if (opc3 & 0x02) { 5882 /* Saturate */ 5883 tcg_gen_movi_tl(t0, UINT32_MAX); 5884 } 5885 } 5886 if (opc3 & 0x10) { 5887 /* Check overflow */ 5888 tcg_gen_movi_tl(cpu_ov, 1); 5889 tcg_gen_movi_tl(cpu_so, 1); 5890 } 5891 gen_set_label(l1); 5892 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5893 } 5894 } else { 5895 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5896 } 5897 tcg_temp_free(t0); 5898 tcg_temp_free(t1); 5899 if (unlikely(Rc) != 0) { 5900 /* Update Rc0 */ 5901 gen_set_Rc0(ctx, cpu_gpr[rt]); 5902 } 5903 } 5904 5905 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5906 static void glue(gen_, name)(DisasContext *ctx) \ 5907 { \ 5908 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5909 rD(ctx->opcode), Rc(ctx->opcode)); \ 5910 } 5911 5912 /* macchw - macchw. */ 5913 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5914 /* macchwo - macchwo. */ 5915 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5916 /* macchws - macchws. */ 5917 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5918 /* macchwso - macchwso. */ 5919 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5920 /* macchwsu - macchwsu. */ 5921 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5922 /* macchwsuo - macchwsuo. */ 5923 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5924 /* macchwu - macchwu. */ 5925 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5926 /* macchwuo - macchwuo. */ 5927 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5928 /* machhw - machhw. */ 5929 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5930 /* machhwo - machhwo. */ 5931 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5932 /* machhws - machhws. */ 5933 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5934 /* machhwso - machhwso. */ 5935 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5936 /* machhwsu - machhwsu. */ 5937 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5938 /* machhwsuo - machhwsuo. */ 5939 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5940 /* machhwu - machhwu. */ 5941 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5942 /* machhwuo - machhwuo. */ 5943 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5944 /* maclhw - maclhw. */ 5945 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5946 /* maclhwo - maclhwo. */ 5947 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5948 /* maclhws - maclhws. */ 5949 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5950 /* maclhwso - maclhwso. */ 5951 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 5952 /* maclhwu - maclhwu. */ 5953 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 5954 /* maclhwuo - maclhwuo. */ 5955 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 5956 /* maclhwsu - maclhwsu. */ 5957 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 5958 /* maclhwsuo - maclhwsuo. */ 5959 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 5960 /* nmacchw - nmacchw. */ 5961 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 5962 /* nmacchwo - nmacchwo. */ 5963 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 5964 /* nmacchws - nmacchws. */ 5965 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 5966 /* nmacchwso - nmacchwso. */ 5967 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 5968 /* nmachhw - nmachhw. */ 5969 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 5970 /* nmachhwo - nmachhwo. */ 5971 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 5972 /* nmachhws - nmachhws. */ 5973 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 5974 /* nmachhwso - nmachhwso. */ 5975 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 5976 /* nmaclhw - nmaclhw. */ 5977 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 5978 /* nmaclhwo - nmaclhwo. */ 5979 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 5980 /* nmaclhws - nmaclhws. */ 5981 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 5982 /* nmaclhwso - nmaclhwso. */ 5983 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 5984 5985 /* mulchw - mulchw. */ 5986 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 5987 /* mulchwu - mulchwu. */ 5988 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 5989 /* mulhhw - mulhhw. */ 5990 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 5991 /* mulhhwu - mulhhwu. */ 5992 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 5993 /* mullhw - mullhw. */ 5994 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 5995 /* mullhwu - mullhwu. */ 5996 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 5997 5998 /* mfdcr */ 5999 static void gen_mfdcr(DisasContext *ctx) 6000 { 6001 #if defined(CONFIG_USER_ONLY) 6002 GEN_PRIV; 6003 #else 6004 TCGv dcrn; 6005 6006 CHK_SV; 6007 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6008 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6009 tcg_temp_free(dcrn); 6010 #endif /* defined(CONFIG_USER_ONLY) */ 6011 } 6012 6013 /* mtdcr */ 6014 static void gen_mtdcr(DisasContext *ctx) 6015 { 6016 #if defined(CONFIG_USER_ONLY) 6017 GEN_PRIV; 6018 #else 6019 TCGv dcrn; 6020 6021 CHK_SV; 6022 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6023 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6024 tcg_temp_free(dcrn); 6025 #endif /* defined(CONFIG_USER_ONLY) */ 6026 } 6027 6028 /* mfdcrx */ 6029 /* XXX: not implemented on 440 ? */ 6030 static void gen_mfdcrx(DisasContext *ctx) 6031 { 6032 #if defined(CONFIG_USER_ONLY) 6033 GEN_PRIV; 6034 #else 6035 CHK_SV; 6036 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6037 cpu_gpr[rA(ctx->opcode)]); 6038 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6039 #endif /* defined(CONFIG_USER_ONLY) */ 6040 } 6041 6042 /* mtdcrx */ 6043 /* XXX: not implemented on 440 ? */ 6044 static void gen_mtdcrx(DisasContext *ctx) 6045 { 6046 #if defined(CONFIG_USER_ONLY) 6047 GEN_PRIV; 6048 #else 6049 CHK_SV; 6050 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6051 cpu_gpr[rS(ctx->opcode)]); 6052 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6053 #endif /* defined(CONFIG_USER_ONLY) */ 6054 } 6055 6056 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6057 static void gen_mfdcrux(DisasContext *ctx) 6058 { 6059 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6060 cpu_gpr[rA(ctx->opcode)]); 6061 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6062 } 6063 6064 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6065 static void gen_mtdcrux(DisasContext *ctx) 6066 { 6067 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6068 cpu_gpr[rS(ctx->opcode)]); 6069 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6070 } 6071 6072 /* dccci */ 6073 static void gen_dccci(DisasContext *ctx) 6074 { 6075 CHK_SV; 6076 /* interpreted as no-op */ 6077 } 6078 6079 /* dcread */ 6080 static void gen_dcread(DisasContext *ctx) 6081 { 6082 #if defined(CONFIG_USER_ONLY) 6083 GEN_PRIV; 6084 #else 6085 TCGv EA, val; 6086 6087 CHK_SV; 6088 gen_set_access_type(ctx, ACCESS_CACHE); 6089 EA = tcg_temp_new(); 6090 gen_addr_reg_index(ctx, EA); 6091 val = tcg_temp_new(); 6092 gen_qemu_ld32u(ctx, val, EA); 6093 tcg_temp_free(val); 6094 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6095 tcg_temp_free(EA); 6096 #endif /* defined(CONFIG_USER_ONLY) */ 6097 } 6098 6099 /* icbt */ 6100 static void gen_icbt_40x(DisasContext *ctx) 6101 { 6102 /* interpreted as no-op */ 6103 /* XXX: specification say this is treated as a load by the MMU 6104 * but does not generate any exception 6105 */ 6106 } 6107 6108 /* iccci */ 6109 static void gen_iccci(DisasContext *ctx) 6110 { 6111 CHK_SV; 6112 /* interpreted as no-op */ 6113 } 6114 6115 /* icread */ 6116 static void gen_icread(DisasContext *ctx) 6117 { 6118 CHK_SV; 6119 /* interpreted as no-op */ 6120 } 6121 6122 /* rfci (supervisor only) */ 6123 static void gen_rfci_40x(DisasContext *ctx) 6124 { 6125 #if defined(CONFIG_USER_ONLY) 6126 GEN_PRIV; 6127 #else 6128 CHK_SV; 6129 /* Restore CPU state */ 6130 gen_helper_40x_rfci(cpu_env); 6131 gen_sync_exception(ctx); 6132 #endif /* defined(CONFIG_USER_ONLY) */ 6133 } 6134 6135 static void gen_rfci(DisasContext *ctx) 6136 { 6137 #if defined(CONFIG_USER_ONLY) 6138 GEN_PRIV; 6139 #else 6140 CHK_SV; 6141 /* Restore CPU state */ 6142 gen_helper_rfci(cpu_env); 6143 gen_sync_exception(ctx); 6144 #endif /* defined(CONFIG_USER_ONLY) */ 6145 } 6146 6147 /* BookE specific */ 6148 6149 /* XXX: not implemented on 440 ? */ 6150 static void gen_rfdi(DisasContext *ctx) 6151 { 6152 #if defined(CONFIG_USER_ONLY) 6153 GEN_PRIV; 6154 #else 6155 CHK_SV; 6156 /* Restore CPU state */ 6157 gen_helper_rfdi(cpu_env); 6158 gen_sync_exception(ctx); 6159 #endif /* defined(CONFIG_USER_ONLY) */ 6160 } 6161 6162 /* XXX: not implemented on 440 ? */ 6163 static void gen_rfmci(DisasContext *ctx) 6164 { 6165 #if defined(CONFIG_USER_ONLY) 6166 GEN_PRIV; 6167 #else 6168 CHK_SV; 6169 /* Restore CPU state */ 6170 gen_helper_rfmci(cpu_env); 6171 gen_sync_exception(ctx); 6172 #endif /* defined(CONFIG_USER_ONLY) */ 6173 } 6174 6175 /* TLB management - PowerPC 405 implementation */ 6176 6177 /* tlbre */ 6178 static void gen_tlbre_40x(DisasContext *ctx) 6179 { 6180 #if defined(CONFIG_USER_ONLY) 6181 GEN_PRIV; 6182 #else 6183 CHK_SV; 6184 switch (rB(ctx->opcode)) { 6185 case 0: 6186 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6187 cpu_gpr[rA(ctx->opcode)]); 6188 break; 6189 case 1: 6190 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6191 cpu_gpr[rA(ctx->opcode)]); 6192 break; 6193 default: 6194 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6195 break; 6196 } 6197 #endif /* defined(CONFIG_USER_ONLY) */ 6198 } 6199 6200 /* tlbsx - tlbsx. */ 6201 static void gen_tlbsx_40x(DisasContext *ctx) 6202 { 6203 #if defined(CONFIG_USER_ONLY) 6204 GEN_PRIV; 6205 #else 6206 TCGv t0; 6207 6208 CHK_SV; 6209 t0 = tcg_temp_new(); 6210 gen_addr_reg_index(ctx, t0); 6211 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6212 tcg_temp_free(t0); 6213 if (Rc(ctx->opcode)) { 6214 TCGLabel *l1 = gen_new_label(); 6215 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6216 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6217 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6218 gen_set_label(l1); 6219 } 6220 #endif /* defined(CONFIG_USER_ONLY) */ 6221 } 6222 6223 /* tlbwe */ 6224 static void gen_tlbwe_40x(DisasContext *ctx) 6225 { 6226 #if defined(CONFIG_USER_ONLY) 6227 GEN_PRIV; 6228 #else 6229 CHK_SV; 6230 6231 switch (rB(ctx->opcode)) { 6232 case 0: 6233 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6234 cpu_gpr[rS(ctx->opcode)]); 6235 break; 6236 case 1: 6237 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6238 cpu_gpr[rS(ctx->opcode)]); 6239 break; 6240 default: 6241 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6242 break; 6243 } 6244 #endif /* defined(CONFIG_USER_ONLY) */ 6245 } 6246 6247 /* TLB management - PowerPC 440 implementation */ 6248 6249 /* tlbre */ 6250 static void gen_tlbre_440(DisasContext *ctx) 6251 { 6252 #if defined(CONFIG_USER_ONLY) 6253 GEN_PRIV; 6254 #else 6255 CHK_SV; 6256 6257 switch (rB(ctx->opcode)) { 6258 case 0: 6259 case 1: 6260 case 2: 6261 { 6262 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6263 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6264 t0, cpu_gpr[rA(ctx->opcode)]); 6265 tcg_temp_free_i32(t0); 6266 } 6267 break; 6268 default: 6269 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6270 break; 6271 } 6272 #endif /* defined(CONFIG_USER_ONLY) */ 6273 } 6274 6275 /* tlbsx - tlbsx. */ 6276 static void gen_tlbsx_440(DisasContext *ctx) 6277 { 6278 #if defined(CONFIG_USER_ONLY) 6279 GEN_PRIV; 6280 #else 6281 TCGv t0; 6282 6283 CHK_SV; 6284 t0 = tcg_temp_new(); 6285 gen_addr_reg_index(ctx, t0); 6286 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6287 tcg_temp_free(t0); 6288 if (Rc(ctx->opcode)) { 6289 TCGLabel *l1 = gen_new_label(); 6290 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6291 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6292 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6293 gen_set_label(l1); 6294 } 6295 #endif /* defined(CONFIG_USER_ONLY) */ 6296 } 6297 6298 /* tlbwe */ 6299 static void gen_tlbwe_440(DisasContext *ctx) 6300 { 6301 #if defined(CONFIG_USER_ONLY) 6302 GEN_PRIV; 6303 #else 6304 CHK_SV; 6305 switch (rB(ctx->opcode)) { 6306 case 0: 6307 case 1: 6308 case 2: 6309 { 6310 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6311 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 6312 cpu_gpr[rS(ctx->opcode)]); 6313 tcg_temp_free_i32(t0); 6314 } 6315 break; 6316 default: 6317 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6318 break; 6319 } 6320 #endif /* defined(CONFIG_USER_ONLY) */ 6321 } 6322 6323 /* TLB management - PowerPC BookE 2.06 implementation */ 6324 6325 /* tlbre */ 6326 static void gen_tlbre_booke206(DisasContext *ctx) 6327 { 6328 #if defined(CONFIG_USER_ONLY) 6329 GEN_PRIV; 6330 #else 6331 CHK_SV; 6332 gen_helper_booke206_tlbre(cpu_env); 6333 #endif /* defined(CONFIG_USER_ONLY) */ 6334 } 6335 6336 /* tlbsx - tlbsx. */ 6337 static void gen_tlbsx_booke206(DisasContext *ctx) 6338 { 6339 #if defined(CONFIG_USER_ONLY) 6340 GEN_PRIV; 6341 #else 6342 TCGv t0; 6343 6344 CHK_SV; 6345 if (rA(ctx->opcode)) { 6346 t0 = tcg_temp_new(); 6347 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6348 } else { 6349 t0 = tcg_const_tl(0); 6350 } 6351 6352 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6353 gen_helper_booke206_tlbsx(cpu_env, t0); 6354 tcg_temp_free(t0); 6355 #endif /* defined(CONFIG_USER_ONLY) */ 6356 } 6357 6358 /* tlbwe */ 6359 static void gen_tlbwe_booke206(DisasContext *ctx) 6360 { 6361 #if defined(CONFIG_USER_ONLY) 6362 GEN_PRIV; 6363 #else 6364 CHK_SV; 6365 gen_helper_booke206_tlbwe(cpu_env); 6366 #endif /* defined(CONFIG_USER_ONLY) */ 6367 } 6368 6369 static void gen_tlbivax_booke206(DisasContext *ctx) 6370 { 6371 #if defined(CONFIG_USER_ONLY) 6372 GEN_PRIV; 6373 #else 6374 TCGv t0; 6375 6376 CHK_SV; 6377 t0 = tcg_temp_new(); 6378 gen_addr_reg_index(ctx, t0); 6379 gen_helper_booke206_tlbivax(cpu_env, t0); 6380 tcg_temp_free(t0); 6381 #endif /* defined(CONFIG_USER_ONLY) */ 6382 } 6383 6384 static void gen_tlbilx_booke206(DisasContext *ctx) 6385 { 6386 #if defined(CONFIG_USER_ONLY) 6387 GEN_PRIV; 6388 #else 6389 TCGv t0; 6390 6391 CHK_SV; 6392 t0 = tcg_temp_new(); 6393 gen_addr_reg_index(ctx, t0); 6394 6395 switch((ctx->opcode >> 21) & 0x3) { 6396 case 0: 6397 gen_helper_booke206_tlbilx0(cpu_env, t0); 6398 break; 6399 case 1: 6400 gen_helper_booke206_tlbilx1(cpu_env, t0); 6401 break; 6402 case 3: 6403 gen_helper_booke206_tlbilx3(cpu_env, t0); 6404 break; 6405 default: 6406 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6407 break; 6408 } 6409 6410 tcg_temp_free(t0); 6411 #endif /* defined(CONFIG_USER_ONLY) */ 6412 } 6413 6414 6415 /* wrtee */ 6416 static void gen_wrtee(DisasContext *ctx) 6417 { 6418 #if defined(CONFIG_USER_ONLY) 6419 GEN_PRIV; 6420 #else 6421 TCGv t0; 6422 6423 CHK_SV; 6424 t0 = tcg_temp_new(); 6425 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6426 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6427 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6428 tcg_temp_free(t0); 6429 /* Stop translation to have a chance to raise an exception 6430 * if we just set msr_ee to 1 6431 */ 6432 gen_stop_exception(ctx); 6433 #endif /* defined(CONFIG_USER_ONLY) */ 6434 } 6435 6436 /* wrteei */ 6437 static void gen_wrteei(DisasContext *ctx) 6438 { 6439 #if defined(CONFIG_USER_ONLY) 6440 GEN_PRIV; 6441 #else 6442 CHK_SV; 6443 if (ctx->opcode & 0x00008000) { 6444 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6445 /* Stop translation to have a chance to raise an exception */ 6446 gen_stop_exception(ctx); 6447 } else { 6448 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6449 } 6450 #endif /* defined(CONFIG_USER_ONLY) */ 6451 } 6452 6453 /* PowerPC 440 specific instructions */ 6454 6455 /* dlmzb */ 6456 static void gen_dlmzb(DisasContext *ctx) 6457 { 6458 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6459 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6460 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6461 tcg_temp_free_i32(t0); 6462 } 6463 6464 /* mbar replaces eieio on 440 */ 6465 static void gen_mbar(DisasContext *ctx) 6466 { 6467 /* interpreted as no-op */ 6468 } 6469 6470 /* msync replaces sync on 440 */ 6471 static void gen_msync_4xx(DisasContext *ctx) 6472 { 6473 /* Only e500 seems to treat reserved bits as invalid */ 6474 if ((ctx->insns_flags2 & PPC2_BOOKE206) && 6475 (ctx->opcode & 0x03FFF801)) { 6476 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6477 } 6478 /* otherwise interpreted as no-op */ 6479 } 6480 6481 /* icbt */ 6482 static void gen_icbt_440(DisasContext *ctx) 6483 { 6484 /* interpreted as no-op */ 6485 /* XXX: specification say this is treated as a load by the MMU 6486 * but does not generate any exception 6487 */ 6488 } 6489 6490 /* Embedded.Processor Control */ 6491 6492 static void gen_msgclr(DisasContext *ctx) 6493 { 6494 #if defined(CONFIG_USER_ONLY) 6495 GEN_PRIV; 6496 #else 6497 CHK_HV; 6498 /* 64-bit server processors compliant with arch 2.x */ 6499 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6500 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6501 } else { 6502 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6503 } 6504 #endif /* defined(CONFIG_USER_ONLY) */ 6505 } 6506 6507 static void gen_msgsnd(DisasContext *ctx) 6508 { 6509 #if defined(CONFIG_USER_ONLY) 6510 GEN_PRIV; 6511 #else 6512 CHK_HV; 6513 /* 64-bit server processors compliant with arch 2.x */ 6514 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6515 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6516 } else { 6517 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6518 } 6519 #endif /* defined(CONFIG_USER_ONLY) */ 6520 } 6521 6522 static void gen_msgsync(DisasContext *ctx) 6523 { 6524 #if defined(CONFIG_USER_ONLY) 6525 GEN_PRIV; 6526 #else 6527 CHK_HV; 6528 #endif /* defined(CONFIG_USER_ONLY) */ 6529 /* interpreted as no-op */ 6530 } 6531 6532 #if defined(TARGET_PPC64) 6533 static void gen_maddld(DisasContext *ctx) 6534 { 6535 TCGv_i64 t1 = tcg_temp_new_i64(); 6536 6537 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6538 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6539 tcg_temp_free_i64(t1); 6540 } 6541 6542 /* maddhd maddhdu */ 6543 static void gen_maddhd_maddhdu(DisasContext *ctx) 6544 { 6545 TCGv_i64 lo = tcg_temp_new_i64(); 6546 TCGv_i64 hi = tcg_temp_new_i64(); 6547 TCGv_i64 t1 = tcg_temp_new_i64(); 6548 6549 if (Rc(ctx->opcode)) { 6550 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6551 cpu_gpr[rB(ctx->opcode)]); 6552 tcg_gen_movi_i64(t1, 0); 6553 } else { 6554 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6555 cpu_gpr[rB(ctx->opcode)]); 6556 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6557 } 6558 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6559 cpu_gpr[rC(ctx->opcode)], t1); 6560 tcg_temp_free_i64(lo); 6561 tcg_temp_free_i64(hi); 6562 tcg_temp_free_i64(t1); 6563 } 6564 #endif /* defined(TARGET_PPC64) */ 6565 6566 static void gen_tbegin(DisasContext *ctx) 6567 { 6568 if (unlikely(!ctx->tm_enabled)) { 6569 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6570 return; 6571 } 6572 gen_helper_tbegin(cpu_env); 6573 } 6574 6575 #define GEN_TM_NOOP(name) \ 6576 static inline void gen_##name(DisasContext *ctx) \ 6577 { \ 6578 if (unlikely(!ctx->tm_enabled)) { \ 6579 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6580 return; \ 6581 } \ 6582 /* Because tbegin always fails in QEMU, these user \ 6583 * space instructions all have a simple implementation: \ 6584 * \ 6585 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6586 * = 0b0 || 0b00 || 0b0 \ 6587 */ \ 6588 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6589 } 6590 6591 GEN_TM_NOOP(tend); 6592 GEN_TM_NOOP(tabort); 6593 GEN_TM_NOOP(tabortwc); 6594 GEN_TM_NOOP(tabortwci); 6595 GEN_TM_NOOP(tabortdc); 6596 GEN_TM_NOOP(tabortdci); 6597 GEN_TM_NOOP(tsr); 6598 static inline void gen_cp_abort(DisasContext *ctx) 6599 { 6600 // Do Nothing 6601 } 6602 6603 #define GEN_CP_PASTE_NOOP(name) \ 6604 static inline void gen_##name(DisasContext *ctx) \ 6605 { \ 6606 /* Generate invalid exception until \ 6607 * we have an implementation of the copy \ 6608 * paste facility \ 6609 */ \ 6610 gen_invalid(ctx); \ 6611 } 6612 6613 GEN_CP_PASTE_NOOP(copy) 6614 GEN_CP_PASTE_NOOP(paste) 6615 6616 static void gen_tcheck(DisasContext *ctx) 6617 { 6618 if (unlikely(!ctx->tm_enabled)) { 6619 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6620 return; 6621 } 6622 /* Because tbegin always fails, the tcheck implementation 6623 * is simple: 6624 * 6625 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6626 * = 0b1 || 0b00 || 0b0 6627 */ 6628 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6629 } 6630 6631 #if defined(CONFIG_USER_ONLY) 6632 #define GEN_TM_PRIV_NOOP(name) \ 6633 static inline void gen_##name(DisasContext *ctx) \ 6634 { \ 6635 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6636 } 6637 6638 #else 6639 6640 #define GEN_TM_PRIV_NOOP(name) \ 6641 static inline void gen_##name(DisasContext *ctx) \ 6642 { \ 6643 CHK_SV; \ 6644 if (unlikely(!ctx->tm_enabled)) { \ 6645 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6646 return; \ 6647 } \ 6648 /* Because tbegin always fails, the implementation is \ 6649 * simple: \ 6650 * \ 6651 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6652 * = 0b0 || 0b00 | 0b0 \ 6653 */ \ 6654 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6655 } 6656 6657 #endif 6658 6659 GEN_TM_PRIV_NOOP(treclaim); 6660 GEN_TM_PRIV_NOOP(trechkpt); 6661 6662 static inline void get_fpr(TCGv_i64 dst, int regno) 6663 { 6664 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, vsr[regno].u64[0])); 6665 } 6666 6667 static inline void set_fpr(int regno, TCGv_i64 src) 6668 { 6669 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, vsr[regno].u64[0])); 6670 } 6671 6672 static inline void get_avr64(TCGv_i64 dst, int regno, bool high) 6673 { 6674 #ifdef HOST_WORDS_BIGENDIAN 6675 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, 6676 vsr[32 + regno].u64[(high ? 0 : 1)])); 6677 #else 6678 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, 6679 vsr[32 + regno].u64[(high ? 1 : 0)])); 6680 #endif 6681 } 6682 6683 static inline void set_avr64(int regno, TCGv_i64 src, bool high) 6684 { 6685 #ifdef HOST_WORDS_BIGENDIAN 6686 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, 6687 vsr[32 + regno].u64[(high ? 0 : 1)])); 6688 #else 6689 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, 6690 vsr[32 + regno].u64[(high ? 1 : 0)])); 6691 #endif 6692 } 6693 6694 #include "translate/fp-impl.inc.c" 6695 6696 #include "translate/vmx-impl.inc.c" 6697 6698 #include "translate/vsx-impl.inc.c" 6699 6700 #include "translate/dfp-impl.inc.c" 6701 6702 #include "translate/spe-impl.inc.c" 6703 6704 /* Handles lfdp, lxsd, lxssp */ 6705 static void gen_dform39(DisasContext *ctx) 6706 { 6707 switch (ctx->opcode & 0x3) { 6708 case 0: /* lfdp */ 6709 if (ctx->insns_flags2 & PPC2_ISA205) { 6710 return gen_lfdp(ctx); 6711 } 6712 break; 6713 case 2: /* lxsd */ 6714 if (ctx->insns_flags2 & PPC2_ISA300) { 6715 return gen_lxsd(ctx); 6716 } 6717 break; 6718 case 3: /* lxssp */ 6719 if (ctx->insns_flags2 & PPC2_ISA300) { 6720 return gen_lxssp(ctx); 6721 } 6722 break; 6723 } 6724 return gen_invalid(ctx); 6725 } 6726 6727 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 6728 static void gen_dform3D(DisasContext *ctx) 6729 { 6730 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 6731 switch (ctx->opcode & 0x7) { 6732 case 1: /* lxv */ 6733 if (ctx->insns_flags2 & PPC2_ISA300) { 6734 return gen_lxv(ctx); 6735 } 6736 break; 6737 case 5: /* stxv */ 6738 if (ctx->insns_flags2 & PPC2_ISA300) { 6739 return gen_stxv(ctx); 6740 } 6741 break; 6742 } 6743 } else { /* DS-FORM */ 6744 switch (ctx->opcode & 0x3) { 6745 case 0: /* stfdp */ 6746 if (ctx->insns_flags2 & PPC2_ISA205) { 6747 return gen_stfdp(ctx); 6748 } 6749 break; 6750 case 2: /* stxsd */ 6751 if (ctx->insns_flags2 & PPC2_ISA300) { 6752 return gen_stxsd(ctx); 6753 } 6754 break; 6755 case 3: /* stxssp */ 6756 if (ctx->insns_flags2 & PPC2_ISA300) { 6757 return gen_stxssp(ctx); 6758 } 6759 break; 6760 } 6761 } 6762 return gen_invalid(ctx); 6763 } 6764 6765 static opcode_t opcodes[] = { 6766 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6767 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6768 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6769 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6770 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6771 #if defined(TARGET_PPC64) 6772 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6773 #endif 6774 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6775 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6776 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6777 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6778 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6779 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6780 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6781 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6782 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6783 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6784 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6785 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6786 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6787 #if defined(TARGET_PPC64) 6788 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6789 #endif 6790 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6791 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6792 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6793 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6794 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6795 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6796 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6797 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 6798 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6799 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 6800 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6801 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6802 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6803 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6804 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6805 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6806 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6807 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6808 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6809 #if defined(TARGET_PPC64) 6810 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6811 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6812 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6813 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6814 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6815 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6816 #endif 6817 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6818 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6819 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6820 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6821 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6822 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6823 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6824 #if defined(TARGET_PPC64) 6825 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6826 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6827 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6828 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6829 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6830 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6831 PPC_NONE, PPC2_ISA300), 6832 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6833 PPC_NONE, PPC2_ISA300), 6834 #endif 6835 #if defined(TARGET_PPC64) 6836 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6837 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6838 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6839 #endif 6840 /* handles lfdp, lxsd, lxssp */ 6841 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6842 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 6843 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6844 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6845 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6846 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6847 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6848 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6849 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6850 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 6851 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6852 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6853 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6854 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6855 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 6856 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 6857 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6858 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6859 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6860 #if defined(TARGET_PPC64) 6861 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 6862 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 6863 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6864 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6865 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6866 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6867 #endif 6868 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6869 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6870 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 6871 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6872 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6873 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6874 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6875 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6876 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6877 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6878 #if defined(TARGET_PPC64) 6879 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6880 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6881 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6882 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6883 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6884 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6885 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6886 #endif 6887 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6888 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6889 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6890 #if defined(TARGET_PPC64) 6891 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6892 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6893 #endif 6894 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6895 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6896 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6897 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6898 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6899 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6900 #if defined(TARGET_PPC64) 6901 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6902 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6903 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 6904 #endif 6905 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6906 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6907 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6908 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6909 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6910 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6911 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6912 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6913 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6914 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6915 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6916 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6917 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6918 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6919 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6920 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 6921 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6922 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6923 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6924 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6925 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6926 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6927 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6928 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6929 #if defined(TARGET_PPC64) 6930 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6931 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6932 PPC_SEGMENT_64B), 6933 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6934 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6935 PPC_SEGMENT_64B), 6936 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6937 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6938 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6939 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6940 #endif 6941 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6942 /* XXX Those instructions will need to be handled differently for 6943 * different ISA versions */ 6944 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6945 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6946 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 6947 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 6948 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6949 #if defined(TARGET_PPC64) 6950 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6951 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6952 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 6953 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6954 #endif 6955 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6956 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6957 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6958 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6959 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6960 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6961 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6962 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6963 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6964 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6965 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 6966 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6967 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 6968 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 6969 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 6970 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 6971 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 6972 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 6973 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 6974 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6975 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 6976 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 6977 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 6978 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 6979 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 6980 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 6981 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 6982 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 6983 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 6984 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 6985 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 6986 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 6987 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 6988 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 6989 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 6990 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 6991 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 6992 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 6993 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 6994 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 6995 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 6996 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 6997 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 6998 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 6999 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7000 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7001 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7002 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7003 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7004 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7005 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7006 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7007 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7008 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7009 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7010 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7011 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7012 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7013 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7014 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7015 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7016 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7017 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7018 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7019 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7020 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7021 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7022 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7023 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7024 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7025 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7026 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7027 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7028 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7029 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7030 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7031 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7032 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7033 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7034 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7035 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7036 PPC_NONE, PPC2_BOOKE206), 7037 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7038 PPC_NONE, PPC2_BOOKE206), 7039 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7040 PPC_NONE, PPC2_BOOKE206), 7041 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7042 PPC_NONE, PPC2_BOOKE206), 7043 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7044 PPC_NONE, PPC2_BOOKE206), 7045 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7046 PPC_NONE, PPC2_PRCNTL), 7047 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7048 PPC_NONE, PPC2_PRCNTL), 7049 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7050 PPC_NONE, PPC2_PRCNTL), 7051 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7052 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7053 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7054 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7055 PPC_BOOKE, PPC2_BOOKE206), 7056 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE), 7057 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7058 PPC_BOOKE, PPC2_BOOKE206), 7059 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7060 PPC_440_SPEC), 7061 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7062 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7063 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7064 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7065 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7066 #if defined(TARGET_PPC64) 7067 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7068 PPC2_ISA300), 7069 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7070 #endif 7071 7072 #undef GEN_INT_ARITH_ADD 7073 #undef GEN_INT_ARITH_ADD_CONST 7074 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7075 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7076 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7077 add_ca, compute_ca, compute_ov) \ 7078 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7079 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7080 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7081 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7082 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7083 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7084 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7085 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7086 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7087 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 7088 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7089 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7090 7091 #undef GEN_INT_ARITH_DIVW 7092 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7093 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7094 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7095 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7096 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7097 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7098 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7099 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7100 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7101 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7102 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7103 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7104 7105 #if defined(TARGET_PPC64) 7106 #undef GEN_INT_ARITH_DIVD 7107 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7108 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7109 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7110 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7111 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7112 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7113 7114 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7115 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7116 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7117 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7118 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7119 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7120 7121 #undef GEN_INT_ARITH_MUL_HELPER 7122 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7123 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7124 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7125 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7126 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7127 #endif 7128 7129 #undef GEN_INT_ARITH_SUBF 7130 #undef GEN_INT_ARITH_SUBF_CONST 7131 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7132 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7133 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7134 add_ca, compute_ca, compute_ov) \ 7135 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7136 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7137 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7138 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7139 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7140 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7141 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7142 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7143 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7144 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7145 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7146 7147 #undef GEN_LOGICAL1 7148 #undef GEN_LOGICAL2 7149 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7150 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7151 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7152 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7153 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7154 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7155 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7156 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7157 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7158 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7159 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7160 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 7161 #if defined(TARGET_PPC64) 7162 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 7163 #endif 7164 7165 #if defined(TARGET_PPC64) 7166 #undef GEN_PPC64_R2 7167 #undef GEN_PPC64_R4 7168 #define GEN_PPC64_R2(name, opc1, opc2) \ 7169 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7170 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7171 PPC_64B) 7172 #define GEN_PPC64_R4(name, opc1, opc2) \ 7173 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7174 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 7175 PPC_64B), \ 7176 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7177 PPC_64B), \ 7178 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 7179 PPC_64B) 7180 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 7181 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 7182 GEN_PPC64_R4(rldic, 0x1E, 0x04), 7183 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 7184 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 7185 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 7186 #endif 7187 7188 #undef GEN_LD 7189 #undef GEN_LDU 7190 #undef GEN_LDUX 7191 #undef GEN_LDX_E 7192 #undef GEN_LDS 7193 #define GEN_LD(name, ldop, opc, type) \ 7194 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7195 #define GEN_LDU(name, ldop, opc, type) \ 7196 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 7197 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 7198 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7199 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 7200 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 7201 #define GEN_LDS(name, ldop, op, type) \ 7202 GEN_LD(name, ldop, op | 0x20, type) \ 7203 GEN_LDU(name, ldop, op | 0x21, type) \ 7204 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 7205 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 7206 7207 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 7208 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 7209 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 7210 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 7211 #if defined(TARGET_PPC64) 7212 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 7213 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 7214 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 7215 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 7216 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 7217 7218 /* HV/P7 and later only */ 7219 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 7220 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 7221 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 7222 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 7223 #endif 7224 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 7225 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 7226 7227 /* External PID based load */ 7228 #undef GEN_LDEPX 7229 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 7230 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7231 0x00000001, PPC_NONE, PPC2_BOOKE206), 7232 7233 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 7234 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 7235 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 7236 #if defined(TARGET_PPC64) 7237 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 7238 #endif 7239 7240 #undef GEN_ST 7241 #undef GEN_STU 7242 #undef GEN_STUX 7243 #undef GEN_STX_E 7244 #undef GEN_STS 7245 #define GEN_ST(name, stop, opc, type) \ 7246 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7247 #define GEN_STU(name, stop, opc, type) \ 7248 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 7249 #define GEN_STUX(name, stop, opc2, opc3, type) \ 7250 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7251 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 7252 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 7253 #define GEN_STS(name, stop, op, type) \ 7254 GEN_ST(name, stop, op | 0x20, type) \ 7255 GEN_STU(name, stop, op | 0x21, type) \ 7256 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 7257 GEN_STX(name, stop, 0x17, op | 0x00, type) 7258 7259 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 7260 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 7261 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 7262 #if defined(TARGET_PPC64) 7263 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 7264 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 7265 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 7266 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 7267 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 7268 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 7269 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 7270 #endif 7271 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 7272 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 7273 7274 #undef GEN_STEPX 7275 #define GEN_STEPX(name, ldop, opc2, opc3) \ 7276 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7277 0x00000001, PPC_NONE, PPC2_BOOKE206), 7278 7279 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 7280 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 7281 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 7282 #if defined(TARGET_PPC64) 7283 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04) 7284 #endif 7285 7286 #undef GEN_CRLOGIC 7287 #define GEN_CRLOGIC(name, tcg_op, opc) \ 7288 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 7289 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 7290 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 7291 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 7292 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 7293 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 7294 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 7295 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 7296 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 7297 7298 #undef GEN_MAC_HANDLER 7299 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 7300 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 7301 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 7302 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 7303 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 7304 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 7305 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 7306 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 7307 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 7308 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 7309 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 7310 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 7311 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 7312 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 7313 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 7314 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 7315 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 7316 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 7317 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 7318 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 7319 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 7320 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 7321 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 7322 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 7323 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 7324 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 7325 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 7326 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 7327 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 7328 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 7329 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 7330 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 7331 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 7332 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 7333 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 7334 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 7335 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 7336 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 7337 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 7338 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 7339 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 7340 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 7341 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 7342 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 7343 7344 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 7345 PPC_NONE, PPC2_TM), 7346 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 7347 PPC_NONE, PPC2_TM), 7348 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 7349 PPC_NONE, PPC2_TM), 7350 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 7351 PPC_NONE, PPC2_TM), 7352 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 7353 PPC_NONE, PPC2_TM), 7354 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 7355 PPC_NONE, PPC2_TM), 7356 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 7357 PPC_NONE, PPC2_TM), 7358 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 7359 PPC_NONE, PPC2_TM), 7360 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 7361 PPC_NONE, PPC2_TM), 7362 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 7363 PPC_NONE, PPC2_TM), 7364 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 7365 PPC_NONE, PPC2_TM), 7366 7367 #include "translate/fp-ops.inc.c" 7368 7369 #include "translate/vmx-ops.inc.c" 7370 7371 #include "translate/vsx-ops.inc.c" 7372 7373 #include "translate/dfp-ops.inc.c" 7374 7375 #include "translate/spe-ops.inc.c" 7376 }; 7377 7378 #include "helper_regs.h" 7379 #include "translate_init.inc.c" 7380 7381 /*****************************************************************************/ 7382 /* Misc PowerPC helpers */ 7383 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 7384 int flags) 7385 { 7386 #define RGPL 4 7387 #define RFPL 4 7388 7389 PowerPCCPU *cpu = POWERPC_CPU(cs); 7390 CPUPPCState *env = &cpu->env; 7391 int i; 7392 7393 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 7394 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 7395 env->nip, env->lr, env->ctr, cpu_read_xer(env), 7396 cs->cpu_index); 7397 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 7398 TARGET_FMT_lx " iidx %d didx %d\n", 7399 env->msr, env->spr[SPR_HID0], 7400 env->hflags, env->immu_idx, env->dmmu_idx); 7401 #if !defined(NO_TIMER_DUMP) 7402 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 7403 #if !defined(CONFIG_USER_ONLY) 7404 " DECR %08" PRIu32 7405 #endif 7406 "\n", 7407 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 7408 #if !defined(CONFIG_USER_ONLY) 7409 , cpu_ppc_load_decr(env) 7410 #endif 7411 ); 7412 #endif 7413 for (i = 0; i < 32; i++) { 7414 if ((i & (RGPL - 1)) == 0) 7415 cpu_fprintf(f, "GPR%02d", i); 7416 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 7417 if ((i & (RGPL - 1)) == (RGPL - 1)) 7418 cpu_fprintf(f, "\n"); 7419 } 7420 cpu_fprintf(f, "CR "); 7421 for (i = 0; i < 8; i++) 7422 cpu_fprintf(f, "%01x", env->crf[i]); 7423 cpu_fprintf(f, " ["); 7424 for (i = 0; i < 8; i++) { 7425 char a = '-'; 7426 if (env->crf[i] & 0x08) 7427 a = 'L'; 7428 else if (env->crf[i] & 0x04) 7429 a = 'G'; 7430 else if (env->crf[i] & 0x02) 7431 a = 'E'; 7432 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 7433 } 7434 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 7435 env->reserve_addr); 7436 7437 if (flags & CPU_DUMP_FPU) { 7438 for (i = 0; i < 32; i++) { 7439 if ((i & (RFPL - 1)) == 0) { 7440 cpu_fprintf(f, "FPR%02d", i); 7441 } 7442 cpu_fprintf(f, " %016" PRIx64, *cpu_fpr_ptr(env, i)); 7443 if ((i & (RFPL - 1)) == (RFPL - 1)) { 7444 cpu_fprintf(f, "\n"); 7445 } 7446 } 7447 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 7448 } 7449 7450 #if !defined(CONFIG_USER_ONLY) 7451 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 7452 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 7453 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 7454 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 7455 7456 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 7457 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 7458 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 7459 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 7460 7461 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 7462 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 7463 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 7464 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 7465 7466 #if defined(TARGET_PPC64) 7467 if (env->excp_model == POWERPC_EXCP_POWER7 || 7468 env->excp_model == POWERPC_EXCP_POWER8) { 7469 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 7470 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 7471 } 7472 #endif 7473 if (env->excp_model == POWERPC_EXCP_BOOKE) { 7474 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 7475 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 7476 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 7477 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 7478 7479 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 7480 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 7481 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 7482 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 7483 7484 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 7485 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 7486 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 7487 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 7488 7489 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 7490 " EPR " TARGET_FMT_lx "\n", 7491 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 7492 env->spr[SPR_BOOKE_EPR]); 7493 7494 /* FSL-specific */ 7495 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 7496 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 7497 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 7498 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 7499 7500 /* 7501 * IVORs are left out as they are large and do not change often -- 7502 * they can be read with "p $ivor0", "p $ivor1", etc. 7503 */ 7504 } 7505 7506 #if defined(TARGET_PPC64) 7507 if (env->flags & POWERPC_FLAG_CFAR) { 7508 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 7509 } 7510 #endif 7511 7512 if (env->spr_cb[SPR_LPCR].name) 7513 cpu_fprintf(f, " LPCR " TARGET_FMT_lx "\n", env->spr[SPR_LPCR]); 7514 7515 switch (env->mmu_model) { 7516 case POWERPC_MMU_32B: 7517 case POWERPC_MMU_601: 7518 case POWERPC_MMU_SOFT_6xx: 7519 case POWERPC_MMU_SOFT_74xx: 7520 #if defined(TARGET_PPC64) 7521 case POWERPC_MMU_64B: 7522 case POWERPC_MMU_2_03: 7523 case POWERPC_MMU_2_06: 7524 case POWERPC_MMU_2_07: 7525 case POWERPC_MMU_3_00: 7526 #endif 7527 if (env->spr_cb[SPR_SDR1].name) { /* SDR1 Exists */ 7528 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " ", env->spr[SPR_SDR1]); 7529 } 7530 if (env->spr_cb[SPR_PTCR].name) { /* PTCR Exists */ 7531 cpu_fprintf(f, " PTCR " TARGET_FMT_lx " ", env->spr[SPR_PTCR]); 7532 } 7533 cpu_fprintf(f, " DAR " TARGET_FMT_lx " DSISR " TARGET_FMT_lx "\n", 7534 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 7535 break; 7536 case POWERPC_MMU_BOOKE206: 7537 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 7538 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 7539 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 7540 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 7541 7542 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 7543 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 7544 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 7545 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 7546 7547 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 7548 " TLB1CFG " TARGET_FMT_lx "\n", 7549 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 7550 env->spr[SPR_BOOKE_TLB1CFG]); 7551 break; 7552 default: 7553 break; 7554 } 7555 #endif 7556 7557 #undef RGPL 7558 #undef RFPL 7559 } 7560 7561 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 7562 fprintf_function cpu_fprintf, int flags) 7563 { 7564 #if defined(DO_PPC_STATISTICS) 7565 PowerPCCPU *cpu = POWERPC_CPU(cs); 7566 opc_handler_t **t1, **t2, **t3, *handler; 7567 int op1, op2, op3; 7568 7569 t1 = cpu->env.opcodes; 7570 for (op1 = 0; op1 < 64; op1++) { 7571 handler = t1[op1]; 7572 if (is_indirect_opcode(handler)) { 7573 t2 = ind_table(handler); 7574 for (op2 = 0; op2 < 32; op2++) { 7575 handler = t2[op2]; 7576 if (is_indirect_opcode(handler)) { 7577 t3 = ind_table(handler); 7578 for (op3 = 0; op3 < 32; op3++) { 7579 handler = t3[op3]; 7580 if (handler->count == 0) 7581 continue; 7582 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 7583 "%016" PRIx64 " %" PRId64 "\n", 7584 op1, op2, op3, op1, (op3 << 5) | op2, 7585 handler->oname, 7586 handler->count, handler->count); 7587 } 7588 } else { 7589 if (handler->count == 0) 7590 continue; 7591 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 7592 "%016" PRIx64 " %" PRId64 "\n", 7593 op1, op2, op1, op2, handler->oname, 7594 handler->count, handler->count); 7595 } 7596 } 7597 } else { 7598 if (handler->count == 0) 7599 continue; 7600 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 7601 " %" PRId64 "\n", 7602 op1, op1, handler->oname, 7603 handler->count, handler->count); 7604 } 7605 } 7606 #endif 7607 } 7608 7609 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 7610 { 7611 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7612 CPUPPCState *env = cs->env_ptr; 7613 int bound; 7614 7615 ctx->exception = POWERPC_EXCP_NONE; 7616 ctx->spr_cb = env->spr_cb; 7617 ctx->pr = msr_pr; 7618 ctx->mem_idx = env->dmmu_idx; 7619 ctx->dr = msr_dr; 7620 #if !defined(CONFIG_USER_ONLY) 7621 ctx->hv = msr_hv || !env->has_hv_mode; 7622 #endif 7623 ctx->insns_flags = env->insns_flags; 7624 ctx->insns_flags2 = env->insns_flags2; 7625 ctx->access_type = -1; 7626 ctx->need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7627 ctx->le_mode = !!(env->hflags & (1 << MSR_LE)); 7628 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 7629 ctx->flags = env->flags; 7630 #if defined(TARGET_PPC64) 7631 ctx->sf_mode = msr_is_64bit(env, env->msr); 7632 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7633 #endif 7634 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 7635 || env->mmu_model == POWERPC_MMU_601 7636 || (env->mmu_model & POWERPC_MMU_64B); 7637 7638 ctx->fpu_enabled = !!msr_fp; 7639 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7640 ctx->spe_enabled = !!msr_spe; 7641 else 7642 ctx->spe_enabled = false; 7643 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7644 ctx->altivec_enabled = !!msr_vr; 7645 else 7646 ctx->altivec_enabled = false; 7647 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7648 ctx->vsx_enabled = !!msr_vsx; 7649 } else { 7650 ctx->vsx_enabled = false; 7651 } 7652 #if defined(TARGET_PPC64) 7653 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7654 ctx->tm_enabled = !!msr_tm; 7655 } else { 7656 ctx->tm_enabled = false; 7657 } 7658 #endif 7659 ctx->gtse = !!(env->spr[SPR_LPCR] & LPCR_GTSE); 7660 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7661 ctx->singlestep_enabled = CPU_SINGLE_STEP; 7662 else 7663 ctx->singlestep_enabled = 0; 7664 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7665 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7666 if ((env->flags & POWERPC_FLAG_DE) && msr_de) { 7667 ctx->singlestep_enabled = 0; 7668 target_ulong dbcr0 = env->spr[SPR_BOOKE_DBCR0]; 7669 if (dbcr0 & DBCR0_ICMP) { 7670 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 7671 } 7672 if (dbcr0 & DBCR0_BRT) { 7673 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7674 } 7675 7676 } 7677 if (unlikely(ctx->base.singlestep_enabled)) { 7678 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7679 } 7680 #if defined (DO_SINGLE_STEP) && 0 7681 /* Single step trace mode */ 7682 msr_se = 1; 7683 #endif 7684 7685 bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4; 7686 ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 7687 } 7688 7689 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 7690 { 7691 } 7692 7693 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 7694 { 7695 tcg_gen_insn_start(dcbase->pc_next); 7696 } 7697 7698 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs, 7699 const CPUBreakpoint *bp) 7700 { 7701 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7702 7703 gen_debug_exception(ctx); 7704 dcbase->is_jmp = DISAS_NORETURN; 7705 /* The address covered by the breakpoint must be included in 7706 [tb->pc, tb->pc + tb->size) in order to for it to be 7707 properly cleared -- thus we increment the PC here so that 7708 the logic setting tb->size below does the right thing. */ 7709 ctx->base.pc_next += 4; 7710 return true; 7711 } 7712 7713 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 7714 { 7715 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7716 CPUPPCState *env = cs->env_ptr; 7717 opc_handler_t **table, *handler; 7718 7719 LOG_DISAS("----------------\n"); 7720 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7721 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 7722 7723 if (unlikely(need_byteswap(ctx))) { 7724 ctx->opcode = bswap32(cpu_ldl_code(env, ctx->base.pc_next)); 7725 } else { 7726 ctx->opcode = cpu_ldl_code(env, ctx->base.pc_next); 7727 } 7728 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7729 ctx->opcode, opc1(ctx->opcode), opc2(ctx->opcode), 7730 opc3(ctx->opcode), opc4(ctx->opcode), 7731 ctx->le_mode ? "little" : "big"); 7732 ctx->base.pc_next += 4; 7733 table = env->opcodes; 7734 handler = table[opc1(ctx->opcode)]; 7735 if (is_indirect_opcode(handler)) { 7736 table = ind_table(handler); 7737 handler = table[opc2(ctx->opcode)]; 7738 if (is_indirect_opcode(handler)) { 7739 table = ind_table(handler); 7740 handler = table[opc3(ctx->opcode)]; 7741 if (is_indirect_opcode(handler)) { 7742 table = ind_table(handler); 7743 handler = table[opc4(ctx->opcode)]; 7744 } 7745 } 7746 } 7747 /* Is opcode *REALLY* valid ? */ 7748 if (unlikely(handler->handler == &gen_invalid)) { 7749 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7750 "%02x - %02x - %02x - %02x (%08x) " 7751 TARGET_FMT_lx " %d\n", 7752 opc1(ctx->opcode), opc2(ctx->opcode), 7753 opc3(ctx->opcode), opc4(ctx->opcode), 7754 ctx->opcode, ctx->base.pc_next - 4, (int)msr_ir); 7755 } else { 7756 uint32_t inval; 7757 7758 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 7759 && Rc(ctx->opcode))) { 7760 inval = handler->inval2; 7761 } else { 7762 inval = handler->inval1; 7763 } 7764 7765 if (unlikely((ctx->opcode & inval) != 0)) { 7766 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7767 "%02x - %02x - %02x - %02x (%08x) " 7768 TARGET_FMT_lx "\n", ctx->opcode & inval, 7769 opc1(ctx->opcode), opc2(ctx->opcode), 7770 opc3(ctx->opcode), opc4(ctx->opcode), 7771 ctx->opcode, ctx->base.pc_next - 4); 7772 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7773 ctx->base.is_jmp = DISAS_NORETURN; 7774 return; 7775 } 7776 } 7777 (*(handler->handler))(ctx); 7778 #if defined(DO_PPC_STATISTICS) 7779 handler->count++; 7780 #endif 7781 /* Check trace mode exceptions */ 7782 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP && 7783 (ctx->base.pc_next <= 0x100 || ctx->base.pc_next > 0xF00) && 7784 ctx->exception != POWERPC_SYSCALL && 7785 ctx->exception != POWERPC_EXCP_TRAP && 7786 ctx->exception != POWERPC_EXCP_BRANCH)) { 7787 uint32_t excp = gen_prep_dbgex(ctx); 7788 gen_exception_nip(ctx, excp, ctx->base.pc_next); 7789 } 7790 7791 if (tcg_check_temp_count()) { 7792 qemu_log("Opcode %02x %02x %02x %02x (%08x) leaked " 7793 "temporaries\n", opc1(ctx->opcode), opc2(ctx->opcode), 7794 opc3(ctx->opcode), opc4(ctx->opcode), ctx->opcode); 7795 } 7796 7797 ctx->base.is_jmp = ctx->exception == POWERPC_EXCP_NONE ? 7798 DISAS_NEXT : DISAS_NORETURN; 7799 } 7800 7801 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 7802 { 7803 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7804 7805 if (ctx->exception == POWERPC_EXCP_NONE) { 7806 gen_goto_tb(ctx, 0, ctx->base.pc_next); 7807 } else if (ctx->exception != POWERPC_EXCP_BRANCH) { 7808 if (unlikely(ctx->base.singlestep_enabled)) { 7809 gen_debug_exception(ctx); 7810 } 7811 /* Generate the return instruction */ 7812 tcg_gen_exit_tb(NULL, 0); 7813 } 7814 } 7815 7816 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 7817 { 7818 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 7819 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 7820 } 7821 7822 static const TranslatorOps ppc_tr_ops = { 7823 .init_disas_context = ppc_tr_init_disas_context, 7824 .tb_start = ppc_tr_tb_start, 7825 .insn_start = ppc_tr_insn_start, 7826 .breakpoint_check = ppc_tr_breakpoint_check, 7827 .translate_insn = ppc_tr_translate_insn, 7828 .tb_stop = ppc_tr_tb_stop, 7829 .disas_log = ppc_tr_disas_log, 7830 }; 7831 7832 void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb) 7833 { 7834 DisasContext ctx; 7835 7836 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb); 7837 } 7838 7839 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7840 target_ulong *data) 7841 { 7842 env->nip = data[0]; 7843 } 7844