1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "tcg-op-gvec.h" 28 #include "qemu/host-utils.h" 29 #include "exec/cpu_ldst.h" 30 31 #include "exec/helper-proto.h" 32 #include "exec/helper-gen.h" 33 34 #include "trace-tcg.h" 35 #include "exec/translator.h" 36 #include "exec/log.h" 37 #include "qemu/atomic128.h" 38 39 40 #define CPU_SINGLE_STEP 0x1 41 #define CPU_BRANCH_STEP 0x2 42 #define GDBSTUB_SINGLE_STEP 0x4 43 44 /* Include definitions for instructions classes and implementations flags */ 45 //#define PPC_DEBUG_DISAS 46 //#define DO_PPC_STATISTICS 47 48 #ifdef PPC_DEBUG_DISAS 49 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 50 #else 51 # define LOG_DISAS(...) do { } while (0) 52 #endif 53 /*****************************************************************************/ 54 /* Code translation helpers */ 55 56 /* global register indexes */ 57 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 58 + 10*4 + 22*5 /* SPE GPRh */ 59 + 8*5 /* CRF */]; 60 static TCGv cpu_gpr[32]; 61 static TCGv cpu_gprh[32]; 62 static TCGv_i32 cpu_crf[8]; 63 static TCGv cpu_nip; 64 static TCGv cpu_msr; 65 static TCGv cpu_ctr; 66 static TCGv cpu_lr; 67 #if defined(TARGET_PPC64) 68 static TCGv cpu_cfar; 69 #endif 70 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 71 static TCGv cpu_reserve; 72 static TCGv cpu_reserve_val; 73 static TCGv cpu_fpscr; 74 static TCGv_i32 cpu_access_type; 75 76 #include "exec/gen-icount.h" 77 78 void ppc_translate_init(void) 79 { 80 int i; 81 char* p; 82 size_t cpu_reg_names_size; 83 84 p = cpu_reg_names; 85 cpu_reg_names_size = sizeof(cpu_reg_names); 86 87 for (i = 0; i < 8; i++) { 88 snprintf(p, cpu_reg_names_size, "crf%d", i); 89 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 90 offsetof(CPUPPCState, crf[i]), p); 91 p += 5; 92 cpu_reg_names_size -= 5; 93 } 94 95 for (i = 0; i < 32; i++) { 96 snprintf(p, cpu_reg_names_size, "r%d", i); 97 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 98 offsetof(CPUPPCState, gpr[i]), p); 99 p += (i < 10) ? 3 : 4; 100 cpu_reg_names_size -= (i < 10) ? 3 : 4; 101 snprintf(p, cpu_reg_names_size, "r%dH", i); 102 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 103 offsetof(CPUPPCState, gprh[i]), p); 104 p += (i < 10) ? 4 : 5; 105 cpu_reg_names_size -= (i < 10) ? 4 : 5; 106 } 107 108 cpu_nip = tcg_global_mem_new(cpu_env, 109 offsetof(CPUPPCState, nip), "nip"); 110 111 cpu_msr = tcg_global_mem_new(cpu_env, 112 offsetof(CPUPPCState, msr), "msr"); 113 114 cpu_ctr = tcg_global_mem_new(cpu_env, 115 offsetof(CPUPPCState, ctr), "ctr"); 116 117 cpu_lr = tcg_global_mem_new(cpu_env, 118 offsetof(CPUPPCState, lr), "lr"); 119 120 #if defined(TARGET_PPC64) 121 cpu_cfar = tcg_global_mem_new(cpu_env, 122 offsetof(CPUPPCState, cfar), "cfar"); 123 #endif 124 125 cpu_xer = tcg_global_mem_new(cpu_env, 126 offsetof(CPUPPCState, xer), "xer"); 127 cpu_so = tcg_global_mem_new(cpu_env, 128 offsetof(CPUPPCState, so), "SO"); 129 cpu_ov = tcg_global_mem_new(cpu_env, 130 offsetof(CPUPPCState, ov), "OV"); 131 cpu_ca = tcg_global_mem_new(cpu_env, 132 offsetof(CPUPPCState, ca), "CA"); 133 cpu_ov32 = tcg_global_mem_new(cpu_env, 134 offsetof(CPUPPCState, ov32), "OV32"); 135 cpu_ca32 = tcg_global_mem_new(cpu_env, 136 offsetof(CPUPPCState, ca32), "CA32"); 137 138 cpu_reserve = tcg_global_mem_new(cpu_env, 139 offsetof(CPUPPCState, reserve_addr), 140 "reserve_addr"); 141 cpu_reserve_val = tcg_global_mem_new(cpu_env, 142 offsetof(CPUPPCState, reserve_val), 143 "reserve_val"); 144 145 cpu_fpscr = tcg_global_mem_new(cpu_env, 146 offsetof(CPUPPCState, fpscr), "fpscr"); 147 148 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 149 offsetof(CPUPPCState, access_type), "access_type"); 150 } 151 152 /* internal defines */ 153 struct DisasContext { 154 DisasContextBase base; 155 uint32_t opcode; 156 uint32_t exception; 157 /* Routine used to access memory */ 158 bool pr, hv, dr, le_mode; 159 bool lazy_tlb_flush; 160 bool need_access_type; 161 int mem_idx; 162 int access_type; 163 /* Translation flags */ 164 TCGMemOp default_tcg_memop_mask; 165 #if defined(TARGET_PPC64) 166 bool sf_mode; 167 bool has_cfar; 168 #endif 169 bool fpu_enabled; 170 bool altivec_enabled; 171 bool vsx_enabled; 172 bool spe_enabled; 173 bool tm_enabled; 174 bool gtse; 175 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 176 int singlestep_enabled; 177 uint32_t flags; 178 uint64_t insns_flags; 179 uint64_t insns_flags2; 180 }; 181 182 /* Return true iff byteswap is needed in a scalar memop */ 183 static inline bool need_byteswap(const DisasContext *ctx) 184 { 185 #if defined(TARGET_WORDS_BIGENDIAN) 186 return ctx->le_mode; 187 #else 188 return !ctx->le_mode; 189 #endif 190 } 191 192 /* True when active word size < size of target_long. */ 193 #ifdef TARGET_PPC64 194 # define NARROW_MODE(C) (!(C)->sf_mode) 195 #else 196 # define NARROW_MODE(C) 0 197 #endif 198 199 struct opc_handler_t { 200 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 201 uint32_t inval1; 202 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 203 uint32_t inval2; 204 /* instruction type */ 205 uint64_t type; 206 /* extended instruction type */ 207 uint64_t type2; 208 /* handler */ 209 void (*handler)(DisasContext *ctx); 210 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 211 const char *oname; 212 #endif 213 #if defined(DO_PPC_STATISTICS) 214 uint64_t count; 215 #endif 216 }; 217 218 /* SPR load/store helpers */ 219 static inline void gen_load_spr(TCGv t, int reg) 220 { 221 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 222 } 223 224 static inline void gen_store_spr(int reg, TCGv t) 225 { 226 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 227 } 228 229 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 230 { 231 if (ctx->need_access_type && ctx->access_type != access_type) { 232 tcg_gen_movi_i32(cpu_access_type, access_type); 233 ctx->access_type = access_type; 234 } 235 } 236 237 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 238 { 239 if (NARROW_MODE(ctx)) { 240 nip = (uint32_t)nip; 241 } 242 tcg_gen_movi_tl(cpu_nip, nip); 243 } 244 245 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 246 { 247 TCGv_i32 t0, t1; 248 249 /* These are all synchronous exceptions, we set the PC back to 250 * the faulting instruction 251 */ 252 if (ctx->exception == POWERPC_EXCP_NONE) { 253 gen_update_nip(ctx, ctx->base.pc_next - 4); 254 } 255 t0 = tcg_const_i32(excp); 256 t1 = tcg_const_i32(error); 257 gen_helper_raise_exception_err(cpu_env, t0, t1); 258 tcg_temp_free_i32(t0); 259 tcg_temp_free_i32(t1); 260 ctx->exception = (excp); 261 } 262 263 static void gen_exception(DisasContext *ctx, uint32_t excp) 264 { 265 TCGv_i32 t0; 266 267 /* These are all synchronous exceptions, we set the PC back to 268 * the faulting instruction 269 */ 270 if (ctx->exception == POWERPC_EXCP_NONE) { 271 gen_update_nip(ctx, ctx->base.pc_next - 4); 272 } 273 t0 = tcg_const_i32(excp); 274 gen_helper_raise_exception(cpu_env, t0); 275 tcg_temp_free_i32(t0); 276 ctx->exception = (excp); 277 } 278 279 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 280 target_ulong nip) 281 { 282 TCGv_i32 t0; 283 284 gen_update_nip(ctx, nip); 285 t0 = tcg_const_i32(excp); 286 gen_helper_raise_exception(cpu_env, t0); 287 tcg_temp_free_i32(t0); 288 ctx->exception = (excp); 289 } 290 291 /* 292 * Tells the caller what is the appropriate exception to generate and prepares 293 * SPR registers for this exception. 294 * 295 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or 296 * POWERPC_EXCP_DEBUG (on BookE). 297 */ 298 static uint32_t gen_prep_dbgex(DisasContext *ctx) 299 { 300 if (ctx->flags & POWERPC_FLAG_DE) { 301 target_ulong dbsr = 0; 302 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) { 303 dbsr = DBCR0_ICMP; 304 } else { 305 /* Must have been branch */ 306 dbsr = DBCR0_BRT; 307 } 308 TCGv t0 = tcg_temp_new(); 309 gen_load_spr(t0, SPR_BOOKE_DBSR); 310 tcg_gen_ori_tl(t0, t0, dbsr); 311 gen_store_spr(SPR_BOOKE_DBSR, t0); 312 tcg_temp_free(t0); 313 return POWERPC_EXCP_DEBUG; 314 } else { 315 return POWERPC_EXCP_TRACE; 316 } 317 } 318 319 static void gen_debug_exception(DisasContext *ctx) 320 { 321 TCGv_i32 t0; 322 323 /* These are all synchronous exceptions, we set the PC back to 324 * the faulting instruction 325 */ 326 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 327 (ctx->exception != POWERPC_EXCP_SYNC)) { 328 gen_update_nip(ctx, ctx->base.pc_next); 329 } 330 t0 = tcg_const_i32(EXCP_DEBUG); 331 gen_helper_raise_exception(cpu_env, t0); 332 tcg_temp_free_i32(t0); 333 } 334 335 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 336 { 337 /* Will be converted to program check if needed */ 338 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 339 } 340 341 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 342 { 343 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 344 } 345 346 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 347 { 348 /* Will be converted to program check if needed */ 349 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 350 } 351 352 /* Stop translation */ 353 static inline void gen_stop_exception(DisasContext *ctx) 354 { 355 gen_update_nip(ctx, ctx->base.pc_next); 356 ctx->exception = POWERPC_EXCP_STOP; 357 } 358 359 #ifndef CONFIG_USER_ONLY 360 /* No need to update nip here, as execution flow will change */ 361 static inline void gen_sync_exception(DisasContext *ctx) 362 { 363 ctx->exception = POWERPC_EXCP_SYNC; 364 } 365 #endif 366 367 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 368 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 369 370 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 371 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 372 373 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 374 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 375 376 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 377 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 378 379 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 380 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 381 382 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 383 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 384 385 typedef struct opcode_t { 386 unsigned char opc1, opc2, opc3, opc4; 387 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 388 unsigned char pad[4]; 389 #endif 390 opc_handler_t handler; 391 const char *oname; 392 } opcode_t; 393 394 /* Helpers for priv. check */ 395 #define GEN_PRIV \ 396 do { \ 397 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 398 } while (0) 399 400 #if defined(CONFIG_USER_ONLY) 401 #define CHK_HV GEN_PRIV 402 #define CHK_SV GEN_PRIV 403 #define CHK_HVRM GEN_PRIV 404 #else 405 #define CHK_HV \ 406 do { \ 407 if (unlikely(ctx->pr || !ctx->hv)) { \ 408 GEN_PRIV; \ 409 } \ 410 } while (0) 411 #define CHK_SV \ 412 do { \ 413 if (unlikely(ctx->pr)) { \ 414 GEN_PRIV; \ 415 } \ 416 } while (0) 417 #define CHK_HVRM \ 418 do { \ 419 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 420 GEN_PRIV; \ 421 } \ 422 } while (0) 423 #endif 424 425 #define CHK_NONE 426 427 /*****************************************************************************/ 428 /* PowerPC instructions table */ 429 430 #if defined(DO_PPC_STATISTICS) 431 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 432 { \ 433 .opc1 = op1, \ 434 .opc2 = op2, \ 435 .opc3 = op3, \ 436 .opc4 = 0xff, \ 437 .handler = { \ 438 .inval1 = invl, \ 439 .type = _typ, \ 440 .type2 = _typ2, \ 441 .handler = &gen_##name, \ 442 .oname = stringify(name), \ 443 }, \ 444 .oname = stringify(name), \ 445 } 446 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 447 { \ 448 .opc1 = op1, \ 449 .opc2 = op2, \ 450 .opc3 = op3, \ 451 .opc4 = 0xff, \ 452 .handler = { \ 453 .inval1 = invl1, \ 454 .inval2 = invl2, \ 455 .type = _typ, \ 456 .type2 = _typ2, \ 457 .handler = &gen_##name, \ 458 .oname = stringify(name), \ 459 }, \ 460 .oname = stringify(name), \ 461 } 462 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 463 { \ 464 .opc1 = op1, \ 465 .opc2 = op2, \ 466 .opc3 = op3, \ 467 .opc4 = 0xff, \ 468 .handler = { \ 469 .inval1 = invl, \ 470 .type = _typ, \ 471 .type2 = _typ2, \ 472 .handler = &gen_##name, \ 473 .oname = onam, \ 474 }, \ 475 .oname = onam, \ 476 } 477 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 478 { \ 479 .opc1 = op1, \ 480 .opc2 = op2, \ 481 .opc3 = op3, \ 482 .opc4 = op4, \ 483 .handler = { \ 484 .inval1 = invl, \ 485 .type = _typ, \ 486 .type2 = _typ2, \ 487 .handler = &gen_##name, \ 488 .oname = stringify(name), \ 489 }, \ 490 .oname = stringify(name), \ 491 } 492 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 493 { \ 494 .opc1 = op1, \ 495 .opc2 = op2, \ 496 .opc3 = op3, \ 497 .opc4 = op4, \ 498 .handler = { \ 499 .inval1 = invl, \ 500 .type = _typ, \ 501 .type2 = _typ2, \ 502 .handler = &gen_##name, \ 503 .oname = onam, \ 504 }, \ 505 .oname = onam, \ 506 } 507 #else 508 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 509 { \ 510 .opc1 = op1, \ 511 .opc2 = op2, \ 512 .opc3 = op3, \ 513 .opc4 = 0xff, \ 514 .handler = { \ 515 .inval1 = invl, \ 516 .type = _typ, \ 517 .type2 = _typ2, \ 518 .handler = &gen_##name, \ 519 }, \ 520 .oname = stringify(name), \ 521 } 522 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 523 { \ 524 .opc1 = op1, \ 525 .opc2 = op2, \ 526 .opc3 = op3, \ 527 .opc4 = 0xff, \ 528 .handler = { \ 529 .inval1 = invl1, \ 530 .inval2 = invl2, \ 531 .type = _typ, \ 532 .type2 = _typ2, \ 533 .handler = &gen_##name, \ 534 }, \ 535 .oname = stringify(name), \ 536 } 537 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 538 { \ 539 .opc1 = op1, \ 540 .opc2 = op2, \ 541 .opc3 = op3, \ 542 .opc4 = 0xff, \ 543 .handler = { \ 544 .inval1 = invl, \ 545 .type = _typ, \ 546 .type2 = _typ2, \ 547 .handler = &gen_##name, \ 548 }, \ 549 .oname = onam, \ 550 } 551 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 552 { \ 553 .opc1 = op1, \ 554 .opc2 = op2, \ 555 .opc3 = op3, \ 556 .opc4 = op4, \ 557 .handler = { \ 558 .inval1 = invl, \ 559 .type = _typ, \ 560 .type2 = _typ2, \ 561 .handler = &gen_##name, \ 562 }, \ 563 .oname = stringify(name), \ 564 } 565 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 566 { \ 567 .opc1 = op1, \ 568 .opc2 = op2, \ 569 .opc3 = op3, \ 570 .opc4 = op4, \ 571 .handler = { \ 572 .inval1 = invl, \ 573 .type = _typ, \ 574 .type2 = _typ2, \ 575 .handler = &gen_##name, \ 576 }, \ 577 .oname = onam, \ 578 } 579 #endif 580 581 /* Invalid instruction */ 582 static void gen_invalid(DisasContext *ctx) 583 { 584 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 585 } 586 587 static opc_handler_t invalid_handler = { 588 .inval1 = 0xFFFFFFFF, 589 .inval2 = 0xFFFFFFFF, 590 .type = PPC_NONE, 591 .type2 = PPC_NONE, 592 .handler = gen_invalid, 593 }; 594 595 /*** Integer comparison ***/ 596 597 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 598 { 599 TCGv t0 = tcg_temp_new(); 600 TCGv t1 = tcg_temp_new(); 601 TCGv_i32 t = tcg_temp_new_i32(); 602 603 tcg_gen_movi_tl(t0, CRF_EQ); 604 tcg_gen_movi_tl(t1, CRF_LT); 605 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), t0, arg0, arg1, t1, t0); 606 tcg_gen_movi_tl(t1, CRF_GT); 607 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), t0, arg0, arg1, t1, t0); 608 609 tcg_gen_trunc_tl_i32(t, t0); 610 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 611 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 612 613 tcg_temp_free(t0); 614 tcg_temp_free(t1); 615 tcg_temp_free_i32(t); 616 } 617 618 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 619 { 620 TCGv t0 = tcg_const_tl(arg1); 621 gen_op_cmp(arg0, t0, s, crf); 622 tcg_temp_free(t0); 623 } 624 625 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 626 { 627 TCGv t0, t1; 628 t0 = tcg_temp_new(); 629 t1 = tcg_temp_new(); 630 if (s) { 631 tcg_gen_ext32s_tl(t0, arg0); 632 tcg_gen_ext32s_tl(t1, arg1); 633 } else { 634 tcg_gen_ext32u_tl(t0, arg0); 635 tcg_gen_ext32u_tl(t1, arg1); 636 } 637 gen_op_cmp(t0, t1, s, crf); 638 tcg_temp_free(t1); 639 tcg_temp_free(t0); 640 } 641 642 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 643 { 644 TCGv t0 = tcg_const_tl(arg1); 645 gen_op_cmp32(arg0, t0, s, crf); 646 tcg_temp_free(t0); 647 } 648 649 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 650 { 651 if (NARROW_MODE(ctx)) { 652 gen_op_cmpi32(reg, 0, 1, 0); 653 } else { 654 gen_op_cmpi(reg, 0, 1, 0); 655 } 656 } 657 658 /* cmp */ 659 static void gen_cmp(DisasContext *ctx) 660 { 661 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 662 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 663 1, crfD(ctx->opcode)); 664 } else { 665 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 666 1, crfD(ctx->opcode)); 667 } 668 } 669 670 /* cmpi */ 671 static void gen_cmpi(DisasContext *ctx) 672 { 673 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 674 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 675 1, crfD(ctx->opcode)); 676 } else { 677 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 678 1, crfD(ctx->opcode)); 679 } 680 } 681 682 /* cmpl */ 683 static void gen_cmpl(DisasContext *ctx) 684 { 685 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 686 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 687 0, crfD(ctx->opcode)); 688 } else { 689 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 690 0, crfD(ctx->opcode)); 691 } 692 } 693 694 /* cmpli */ 695 static void gen_cmpli(DisasContext *ctx) 696 { 697 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 698 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 699 0, crfD(ctx->opcode)); 700 } else { 701 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 702 0, crfD(ctx->opcode)); 703 } 704 } 705 706 /* cmprb - range comparison: isupper, isaplha, islower*/ 707 static void gen_cmprb(DisasContext *ctx) 708 { 709 TCGv_i32 src1 = tcg_temp_new_i32(); 710 TCGv_i32 src2 = tcg_temp_new_i32(); 711 TCGv_i32 src2lo = tcg_temp_new_i32(); 712 TCGv_i32 src2hi = tcg_temp_new_i32(); 713 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 714 715 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 716 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 717 718 tcg_gen_andi_i32(src1, src1, 0xFF); 719 tcg_gen_ext8u_i32(src2lo, src2); 720 tcg_gen_shri_i32(src2, src2, 8); 721 tcg_gen_ext8u_i32(src2hi, src2); 722 723 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 724 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 725 tcg_gen_and_i32(crf, src2lo, src2hi); 726 727 if (ctx->opcode & 0x00200000) { 728 tcg_gen_shri_i32(src2, src2, 8); 729 tcg_gen_ext8u_i32(src2lo, src2); 730 tcg_gen_shri_i32(src2, src2, 8); 731 tcg_gen_ext8u_i32(src2hi, src2); 732 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 733 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 734 tcg_gen_and_i32(src2lo, src2lo, src2hi); 735 tcg_gen_or_i32(crf, crf, src2lo); 736 } 737 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 738 tcg_temp_free_i32(src1); 739 tcg_temp_free_i32(src2); 740 tcg_temp_free_i32(src2lo); 741 tcg_temp_free_i32(src2hi); 742 } 743 744 #if defined(TARGET_PPC64) 745 /* cmpeqb */ 746 static void gen_cmpeqb(DisasContext *ctx) 747 { 748 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 749 cpu_gpr[rB(ctx->opcode)]); 750 } 751 #endif 752 753 /* isel (PowerPC 2.03 specification) */ 754 static void gen_isel(DisasContext *ctx) 755 { 756 uint32_t bi = rC(ctx->opcode); 757 uint32_t mask = 0x08 >> (bi & 0x03); 758 TCGv t0 = tcg_temp_new(); 759 TCGv zr; 760 761 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 762 tcg_gen_andi_tl(t0, t0, mask); 763 764 zr = tcg_const_tl(0); 765 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 766 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 767 cpu_gpr[rB(ctx->opcode)]); 768 tcg_temp_free(zr); 769 tcg_temp_free(t0); 770 } 771 772 /* cmpb: PowerPC 2.05 specification */ 773 static void gen_cmpb(DisasContext *ctx) 774 { 775 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 776 cpu_gpr[rB(ctx->opcode)]); 777 } 778 779 /*** Integer arithmetic ***/ 780 781 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 782 TCGv arg1, TCGv arg2, int sub) 783 { 784 TCGv t0 = tcg_temp_new(); 785 786 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 787 tcg_gen_xor_tl(t0, arg1, arg2); 788 if (sub) { 789 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 790 } else { 791 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 792 } 793 tcg_temp_free(t0); 794 if (NARROW_MODE(ctx)) { 795 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 796 if (is_isa300(ctx)) { 797 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 798 } 799 } else { 800 if (is_isa300(ctx)) { 801 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 802 } 803 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 804 } 805 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 806 } 807 808 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 809 TCGv res, TCGv arg0, TCGv arg1, 810 TCGv ca32, int sub) 811 { 812 TCGv t0; 813 814 if (!is_isa300(ctx)) { 815 return; 816 } 817 818 t0 = tcg_temp_new(); 819 if (sub) { 820 tcg_gen_eqv_tl(t0, arg0, arg1); 821 } else { 822 tcg_gen_xor_tl(t0, arg0, arg1); 823 } 824 tcg_gen_xor_tl(t0, t0, res); 825 tcg_gen_extract_tl(ca32, t0, 32, 1); 826 tcg_temp_free(t0); 827 } 828 829 /* Common add function */ 830 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 831 TCGv arg2, TCGv ca, TCGv ca32, 832 bool add_ca, bool compute_ca, 833 bool compute_ov, bool compute_rc0) 834 { 835 TCGv t0 = ret; 836 837 if (compute_ca || compute_ov) { 838 t0 = tcg_temp_new(); 839 } 840 841 if (compute_ca) { 842 if (NARROW_MODE(ctx)) { 843 /* Caution: a non-obvious corner case of the spec is that we 844 must produce the *entire* 64-bit addition, but produce the 845 carry into bit 32. */ 846 TCGv t1 = tcg_temp_new(); 847 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 848 tcg_gen_add_tl(t0, arg1, arg2); 849 if (add_ca) { 850 tcg_gen_add_tl(t0, t0, ca); 851 } 852 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 853 tcg_temp_free(t1); 854 tcg_gen_extract_tl(ca, ca, 32, 1); 855 if (is_isa300(ctx)) { 856 tcg_gen_mov_tl(ca32, ca); 857 } 858 } else { 859 TCGv zero = tcg_const_tl(0); 860 if (add_ca) { 861 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 862 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 863 } else { 864 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 865 } 866 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 867 tcg_temp_free(zero); 868 } 869 } else { 870 tcg_gen_add_tl(t0, arg1, arg2); 871 if (add_ca) { 872 tcg_gen_add_tl(t0, t0, ca); 873 } 874 } 875 876 if (compute_ov) { 877 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 878 } 879 if (unlikely(compute_rc0)) { 880 gen_set_Rc0(ctx, t0); 881 } 882 883 if (t0 != ret) { 884 tcg_gen_mov_tl(ret, t0); 885 tcg_temp_free(t0); 886 } 887 } 888 /* Add functions with two operands */ 889 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 890 static void glue(gen_, name)(DisasContext *ctx) \ 891 { \ 892 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 893 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 894 ca, glue(ca, 32), \ 895 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 896 } 897 /* Add functions with one operand and one immediate */ 898 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 899 add_ca, compute_ca, compute_ov) \ 900 static void glue(gen_, name)(DisasContext *ctx) \ 901 { \ 902 TCGv t0 = tcg_const_tl(const_val); \ 903 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 904 cpu_gpr[rA(ctx->opcode)], t0, \ 905 ca, glue(ca, 32), \ 906 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 907 tcg_temp_free(t0); \ 908 } 909 910 /* add add. addo addo. */ 911 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 912 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 913 /* addc addc. addco addco. */ 914 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 915 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 916 /* adde adde. addeo addeo. */ 917 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 918 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 919 /* addme addme. addmeo addmeo. */ 920 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 921 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 922 /* addex */ 923 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 924 /* addze addze. addzeo addzeo.*/ 925 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 926 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 927 /* addi */ 928 static void gen_addi(DisasContext *ctx) 929 { 930 target_long simm = SIMM(ctx->opcode); 931 932 if (rA(ctx->opcode) == 0) { 933 /* li case */ 934 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 935 } else { 936 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 937 cpu_gpr[rA(ctx->opcode)], simm); 938 } 939 } 940 /* addic addic.*/ 941 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 942 { 943 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 944 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 945 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 946 tcg_temp_free(c); 947 } 948 949 static void gen_addic(DisasContext *ctx) 950 { 951 gen_op_addic(ctx, 0); 952 } 953 954 static void gen_addic_(DisasContext *ctx) 955 { 956 gen_op_addic(ctx, 1); 957 } 958 959 /* addis */ 960 static void gen_addis(DisasContext *ctx) 961 { 962 target_long simm = SIMM(ctx->opcode); 963 964 if (rA(ctx->opcode) == 0) { 965 /* lis case */ 966 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 967 } else { 968 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 969 cpu_gpr[rA(ctx->opcode)], simm << 16); 970 } 971 } 972 973 /* addpcis */ 974 static void gen_addpcis(DisasContext *ctx) 975 { 976 target_long d = DX(ctx->opcode); 977 978 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16)); 979 } 980 981 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 982 TCGv arg2, int sign, int compute_ov) 983 { 984 TCGv_i32 t0 = tcg_temp_new_i32(); 985 TCGv_i32 t1 = tcg_temp_new_i32(); 986 TCGv_i32 t2 = tcg_temp_new_i32(); 987 TCGv_i32 t3 = tcg_temp_new_i32(); 988 989 tcg_gen_trunc_tl_i32(t0, arg1); 990 tcg_gen_trunc_tl_i32(t1, arg2); 991 if (sign) { 992 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 993 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 994 tcg_gen_and_i32(t2, t2, t3); 995 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 996 tcg_gen_or_i32(t2, t2, t3); 997 tcg_gen_movi_i32(t3, 0); 998 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 999 tcg_gen_div_i32(t3, t0, t1); 1000 tcg_gen_extu_i32_tl(ret, t3); 1001 } else { 1002 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1003 tcg_gen_movi_i32(t3, 0); 1004 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1005 tcg_gen_divu_i32(t3, t0, t1); 1006 tcg_gen_extu_i32_tl(ret, t3); 1007 } 1008 if (compute_ov) { 1009 tcg_gen_extu_i32_tl(cpu_ov, t2); 1010 if (is_isa300(ctx)) { 1011 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1012 } 1013 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1014 } 1015 tcg_temp_free_i32(t0); 1016 tcg_temp_free_i32(t1); 1017 tcg_temp_free_i32(t2); 1018 tcg_temp_free_i32(t3); 1019 1020 if (unlikely(Rc(ctx->opcode) != 0)) 1021 gen_set_Rc0(ctx, ret); 1022 } 1023 /* Div functions */ 1024 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1025 static void glue(gen_, name)(DisasContext *ctx) \ 1026 { \ 1027 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1028 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1029 sign, compute_ov); \ 1030 } 1031 /* divwu divwu. divwuo divwuo. */ 1032 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1033 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1034 /* divw divw. divwo divwo. */ 1035 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1036 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1037 1038 /* div[wd]eu[o][.] */ 1039 #define GEN_DIVE(name, hlpr, compute_ov) \ 1040 static void gen_##name(DisasContext *ctx) \ 1041 { \ 1042 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1043 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1044 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1045 tcg_temp_free_i32(t0); \ 1046 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1047 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1048 } \ 1049 } 1050 1051 GEN_DIVE(divweu, divweu, 0); 1052 GEN_DIVE(divweuo, divweu, 1); 1053 GEN_DIVE(divwe, divwe, 0); 1054 GEN_DIVE(divweo, divwe, 1); 1055 1056 #if defined(TARGET_PPC64) 1057 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1058 TCGv arg2, int sign, int compute_ov) 1059 { 1060 TCGv_i64 t0 = tcg_temp_new_i64(); 1061 TCGv_i64 t1 = tcg_temp_new_i64(); 1062 TCGv_i64 t2 = tcg_temp_new_i64(); 1063 TCGv_i64 t3 = tcg_temp_new_i64(); 1064 1065 tcg_gen_mov_i64(t0, arg1); 1066 tcg_gen_mov_i64(t1, arg2); 1067 if (sign) { 1068 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1069 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1070 tcg_gen_and_i64(t2, t2, t3); 1071 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1072 tcg_gen_or_i64(t2, t2, t3); 1073 tcg_gen_movi_i64(t3, 0); 1074 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1075 tcg_gen_div_i64(ret, t0, t1); 1076 } else { 1077 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1078 tcg_gen_movi_i64(t3, 0); 1079 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1080 tcg_gen_divu_i64(ret, t0, t1); 1081 } 1082 if (compute_ov) { 1083 tcg_gen_mov_tl(cpu_ov, t2); 1084 if (is_isa300(ctx)) { 1085 tcg_gen_mov_tl(cpu_ov32, t2); 1086 } 1087 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1088 } 1089 tcg_temp_free_i64(t0); 1090 tcg_temp_free_i64(t1); 1091 tcg_temp_free_i64(t2); 1092 tcg_temp_free_i64(t3); 1093 1094 if (unlikely(Rc(ctx->opcode) != 0)) 1095 gen_set_Rc0(ctx, ret); 1096 } 1097 1098 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1099 static void glue(gen_, name)(DisasContext *ctx) \ 1100 { \ 1101 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1102 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1103 sign, compute_ov); \ 1104 } 1105 /* divdu divdu. divduo divduo. */ 1106 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1107 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1108 /* divd divd. divdo divdo. */ 1109 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1110 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1111 1112 GEN_DIVE(divdeu, divdeu, 0); 1113 GEN_DIVE(divdeuo, divdeu, 1); 1114 GEN_DIVE(divde, divde, 0); 1115 GEN_DIVE(divdeo, divde, 1); 1116 #endif 1117 1118 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1119 TCGv arg2, int sign) 1120 { 1121 TCGv_i32 t0 = tcg_temp_new_i32(); 1122 TCGv_i32 t1 = tcg_temp_new_i32(); 1123 1124 tcg_gen_trunc_tl_i32(t0, arg1); 1125 tcg_gen_trunc_tl_i32(t1, arg2); 1126 if (sign) { 1127 TCGv_i32 t2 = tcg_temp_new_i32(); 1128 TCGv_i32 t3 = tcg_temp_new_i32(); 1129 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1130 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1131 tcg_gen_and_i32(t2, t2, t3); 1132 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1133 tcg_gen_or_i32(t2, t2, t3); 1134 tcg_gen_movi_i32(t3, 0); 1135 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1136 tcg_gen_rem_i32(t3, t0, t1); 1137 tcg_gen_ext_i32_tl(ret, t3); 1138 tcg_temp_free_i32(t2); 1139 tcg_temp_free_i32(t3); 1140 } else { 1141 TCGv_i32 t2 = tcg_const_i32(1); 1142 TCGv_i32 t3 = tcg_const_i32(0); 1143 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1144 tcg_gen_remu_i32(t3, t0, t1); 1145 tcg_gen_extu_i32_tl(ret, t3); 1146 tcg_temp_free_i32(t2); 1147 tcg_temp_free_i32(t3); 1148 } 1149 tcg_temp_free_i32(t0); 1150 tcg_temp_free_i32(t1); 1151 } 1152 1153 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1154 static void glue(gen_, name)(DisasContext *ctx) \ 1155 { \ 1156 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1157 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1158 sign); \ 1159 } 1160 1161 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1162 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1163 1164 #if defined(TARGET_PPC64) 1165 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1166 TCGv arg2, int sign) 1167 { 1168 TCGv_i64 t0 = tcg_temp_new_i64(); 1169 TCGv_i64 t1 = tcg_temp_new_i64(); 1170 1171 tcg_gen_mov_i64(t0, arg1); 1172 tcg_gen_mov_i64(t1, arg2); 1173 if (sign) { 1174 TCGv_i64 t2 = tcg_temp_new_i64(); 1175 TCGv_i64 t3 = tcg_temp_new_i64(); 1176 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1177 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1178 tcg_gen_and_i64(t2, t2, t3); 1179 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1180 tcg_gen_or_i64(t2, t2, t3); 1181 tcg_gen_movi_i64(t3, 0); 1182 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1183 tcg_gen_rem_i64(ret, t0, t1); 1184 tcg_temp_free_i64(t2); 1185 tcg_temp_free_i64(t3); 1186 } else { 1187 TCGv_i64 t2 = tcg_const_i64(1); 1188 TCGv_i64 t3 = tcg_const_i64(0); 1189 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1190 tcg_gen_remu_i64(ret, t0, t1); 1191 tcg_temp_free_i64(t2); 1192 tcg_temp_free_i64(t3); 1193 } 1194 tcg_temp_free_i64(t0); 1195 tcg_temp_free_i64(t1); 1196 } 1197 1198 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1199 static void glue(gen_, name)(DisasContext *ctx) \ 1200 { \ 1201 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1202 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1203 sign); \ 1204 } 1205 1206 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1207 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1208 #endif 1209 1210 /* mulhw mulhw. */ 1211 static void gen_mulhw(DisasContext *ctx) 1212 { 1213 TCGv_i32 t0 = tcg_temp_new_i32(); 1214 TCGv_i32 t1 = tcg_temp_new_i32(); 1215 1216 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1217 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1218 tcg_gen_muls2_i32(t0, t1, t0, t1); 1219 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1220 tcg_temp_free_i32(t0); 1221 tcg_temp_free_i32(t1); 1222 if (unlikely(Rc(ctx->opcode) != 0)) 1223 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1224 } 1225 1226 /* mulhwu mulhwu. */ 1227 static void gen_mulhwu(DisasContext *ctx) 1228 { 1229 TCGv_i32 t0 = tcg_temp_new_i32(); 1230 TCGv_i32 t1 = tcg_temp_new_i32(); 1231 1232 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1233 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1234 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1235 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1236 tcg_temp_free_i32(t0); 1237 tcg_temp_free_i32(t1); 1238 if (unlikely(Rc(ctx->opcode) != 0)) 1239 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1240 } 1241 1242 /* mullw mullw. */ 1243 static void gen_mullw(DisasContext *ctx) 1244 { 1245 #if defined(TARGET_PPC64) 1246 TCGv_i64 t0, t1; 1247 t0 = tcg_temp_new_i64(); 1248 t1 = tcg_temp_new_i64(); 1249 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1250 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1251 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1252 tcg_temp_free(t0); 1253 tcg_temp_free(t1); 1254 #else 1255 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1256 cpu_gpr[rB(ctx->opcode)]); 1257 #endif 1258 if (unlikely(Rc(ctx->opcode) != 0)) 1259 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1260 } 1261 1262 /* mullwo mullwo. */ 1263 static void gen_mullwo(DisasContext *ctx) 1264 { 1265 TCGv_i32 t0 = tcg_temp_new_i32(); 1266 TCGv_i32 t1 = tcg_temp_new_i32(); 1267 1268 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1269 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1270 tcg_gen_muls2_i32(t0, t1, t0, t1); 1271 #if defined(TARGET_PPC64) 1272 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1273 #else 1274 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1275 #endif 1276 1277 tcg_gen_sari_i32(t0, t0, 31); 1278 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1279 tcg_gen_extu_i32_tl(cpu_ov, t0); 1280 if (is_isa300(ctx)) { 1281 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1282 } 1283 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1284 1285 tcg_temp_free_i32(t0); 1286 tcg_temp_free_i32(t1); 1287 if (unlikely(Rc(ctx->opcode) != 0)) 1288 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1289 } 1290 1291 /* mulli */ 1292 static void gen_mulli(DisasContext *ctx) 1293 { 1294 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1295 SIMM(ctx->opcode)); 1296 } 1297 1298 #if defined(TARGET_PPC64) 1299 /* mulhd mulhd. */ 1300 static void gen_mulhd(DisasContext *ctx) 1301 { 1302 TCGv lo = tcg_temp_new(); 1303 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1304 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1305 tcg_temp_free(lo); 1306 if (unlikely(Rc(ctx->opcode) != 0)) { 1307 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1308 } 1309 } 1310 1311 /* mulhdu mulhdu. */ 1312 static void gen_mulhdu(DisasContext *ctx) 1313 { 1314 TCGv lo = tcg_temp_new(); 1315 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1316 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1317 tcg_temp_free(lo); 1318 if (unlikely(Rc(ctx->opcode) != 0)) { 1319 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1320 } 1321 } 1322 1323 /* mulld mulld. */ 1324 static void gen_mulld(DisasContext *ctx) 1325 { 1326 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1327 cpu_gpr[rB(ctx->opcode)]); 1328 if (unlikely(Rc(ctx->opcode) != 0)) 1329 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1330 } 1331 1332 /* mulldo mulldo. */ 1333 static void gen_mulldo(DisasContext *ctx) 1334 { 1335 TCGv_i64 t0 = tcg_temp_new_i64(); 1336 TCGv_i64 t1 = tcg_temp_new_i64(); 1337 1338 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1339 cpu_gpr[rB(ctx->opcode)]); 1340 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1341 1342 tcg_gen_sari_i64(t0, t0, 63); 1343 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1344 if (is_isa300(ctx)) { 1345 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1346 } 1347 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1348 1349 tcg_temp_free_i64(t0); 1350 tcg_temp_free_i64(t1); 1351 1352 if (unlikely(Rc(ctx->opcode) != 0)) { 1353 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1354 } 1355 } 1356 #endif 1357 1358 /* Common subf function */ 1359 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1360 TCGv arg2, bool add_ca, bool compute_ca, 1361 bool compute_ov, bool compute_rc0) 1362 { 1363 TCGv t0 = ret; 1364 1365 if (compute_ca || compute_ov) { 1366 t0 = tcg_temp_new(); 1367 } 1368 1369 if (compute_ca) { 1370 /* dest = ~arg1 + arg2 [+ ca]. */ 1371 if (NARROW_MODE(ctx)) { 1372 /* Caution: a non-obvious corner case of the spec is that we 1373 must produce the *entire* 64-bit addition, but produce the 1374 carry into bit 32. */ 1375 TCGv inv1 = tcg_temp_new(); 1376 TCGv t1 = tcg_temp_new(); 1377 tcg_gen_not_tl(inv1, arg1); 1378 if (add_ca) { 1379 tcg_gen_add_tl(t0, arg2, cpu_ca); 1380 } else { 1381 tcg_gen_addi_tl(t0, arg2, 1); 1382 } 1383 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1384 tcg_gen_add_tl(t0, t0, inv1); 1385 tcg_temp_free(inv1); 1386 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1387 tcg_temp_free(t1); 1388 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 1389 if (is_isa300(ctx)) { 1390 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 1391 } 1392 } else if (add_ca) { 1393 TCGv zero, inv1 = tcg_temp_new(); 1394 tcg_gen_not_tl(inv1, arg1); 1395 zero = tcg_const_tl(0); 1396 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1397 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1398 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 1399 tcg_temp_free(zero); 1400 tcg_temp_free(inv1); 1401 } else { 1402 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1403 tcg_gen_sub_tl(t0, arg2, arg1); 1404 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 1405 } 1406 } else if (add_ca) { 1407 /* Since we're ignoring carry-out, we can simplify the 1408 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1409 tcg_gen_sub_tl(t0, arg2, arg1); 1410 tcg_gen_add_tl(t0, t0, cpu_ca); 1411 tcg_gen_subi_tl(t0, t0, 1); 1412 } else { 1413 tcg_gen_sub_tl(t0, arg2, arg1); 1414 } 1415 1416 if (compute_ov) { 1417 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1418 } 1419 if (unlikely(compute_rc0)) { 1420 gen_set_Rc0(ctx, t0); 1421 } 1422 1423 if (t0 != ret) { 1424 tcg_gen_mov_tl(ret, t0); 1425 tcg_temp_free(t0); 1426 } 1427 } 1428 /* Sub functions with Two operands functions */ 1429 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1430 static void glue(gen_, name)(DisasContext *ctx) \ 1431 { \ 1432 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1433 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1434 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1435 } 1436 /* Sub functions with one operand and one immediate */ 1437 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1438 add_ca, compute_ca, compute_ov) \ 1439 static void glue(gen_, name)(DisasContext *ctx) \ 1440 { \ 1441 TCGv t0 = tcg_const_tl(const_val); \ 1442 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1443 cpu_gpr[rA(ctx->opcode)], t0, \ 1444 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1445 tcg_temp_free(t0); \ 1446 } 1447 /* subf subf. subfo subfo. */ 1448 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1449 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1450 /* subfc subfc. subfco subfco. */ 1451 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1452 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1453 /* subfe subfe. subfeo subfo. */ 1454 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1455 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1456 /* subfme subfme. subfmeo subfmeo. */ 1457 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1458 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1459 /* subfze subfze. subfzeo subfzeo.*/ 1460 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1461 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1462 1463 /* subfic */ 1464 static void gen_subfic(DisasContext *ctx) 1465 { 1466 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1467 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1468 c, 0, 1, 0, 0); 1469 tcg_temp_free(c); 1470 } 1471 1472 /* neg neg. nego nego. */ 1473 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1474 { 1475 TCGv zero = tcg_const_tl(0); 1476 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1477 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1478 tcg_temp_free(zero); 1479 } 1480 1481 static void gen_neg(DisasContext *ctx) 1482 { 1483 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 1484 if (unlikely(Rc(ctx->opcode))) { 1485 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1486 } 1487 } 1488 1489 static void gen_nego(DisasContext *ctx) 1490 { 1491 gen_op_arith_neg(ctx, 1); 1492 } 1493 1494 /*** Integer logical ***/ 1495 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1496 static void glue(gen_, name)(DisasContext *ctx) \ 1497 { \ 1498 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1499 cpu_gpr[rB(ctx->opcode)]); \ 1500 if (unlikely(Rc(ctx->opcode) != 0)) \ 1501 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1502 } 1503 1504 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1505 static void glue(gen_, name)(DisasContext *ctx) \ 1506 { \ 1507 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1508 if (unlikely(Rc(ctx->opcode) != 0)) \ 1509 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1510 } 1511 1512 /* and & and. */ 1513 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1514 /* andc & andc. */ 1515 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1516 1517 /* andi. */ 1518 static void gen_andi_(DisasContext *ctx) 1519 { 1520 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1521 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1522 } 1523 1524 /* andis. */ 1525 static void gen_andis_(DisasContext *ctx) 1526 { 1527 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1528 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1529 } 1530 1531 /* cntlzw */ 1532 static void gen_cntlzw(DisasContext *ctx) 1533 { 1534 TCGv_i32 t = tcg_temp_new_i32(); 1535 1536 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1537 tcg_gen_clzi_i32(t, t, 32); 1538 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1539 tcg_temp_free_i32(t); 1540 1541 if (unlikely(Rc(ctx->opcode) != 0)) 1542 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1543 } 1544 1545 /* cnttzw */ 1546 static void gen_cnttzw(DisasContext *ctx) 1547 { 1548 TCGv_i32 t = tcg_temp_new_i32(); 1549 1550 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1551 tcg_gen_ctzi_i32(t, t, 32); 1552 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1553 tcg_temp_free_i32(t); 1554 1555 if (unlikely(Rc(ctx->opcode) != 0)) { 1556 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1557 } 1558 } 1559 1560 /* eqv & eqv. */ 1561 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1562 /* extsb & extsb. */ 1563 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1564 /* extsh & extsh. */ 1565 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1566 /* nand & nand. */ 1567 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1568 /* nor & nor. */ 1569 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1570 1571 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1572 static void gen_pause(DisasContext *ctx) 1573 { 1574 TCGv_i32 t0 = tcg_const_i32(0); 1575 tcg_gen_st_i32(t0, cpu_env, 1576 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1577 tcg_temp_free_i32(t0); 1578 1579 /* Stop translation, this gives other CPUs a chance to run */ 1580 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 1581 } 1582 #endif /* defined(TARGET_PPC64) */ 1583 1584 /* or & or. */ 1585 static void gen_or(DisasContext *ctx) 1586 { 1587 int rs, ra, rb; 1588 1589 rs = rS(ctx->opcode); 1590 ra = rA(ctx->opcode); 1591 rb = rB(ctx->opcode); 1592 /* Optimisation for mr. ri case */ 1593 if (rs != ra || rs != rb) { 1594 if (rs != rb) 1595 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1596 else 1597 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1598 if (unlikely(Rc(ctx->opcode) != 0)) 1599 gen_set_Rc0(ctx, cpu_gpr[ra]); 1600 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1601 gen_set_Rc0(ctx, cpu_gpr[rs]); 1602 #if defined(TARGET_PPC64) 1603 } else if (rs != 0) { /* 0 is nop */ 1604 int prio = 0; 1605 1606 switch (rs) { 1607 case 1: 1608 /* Set process priority to low */ 1609 prio = 2; 1610 break; 1611 case 6: 1612 /* Set process priority to medium-low */ 1613 prio = 3; 1614 break; 1615 case 2: 1616 /* Set process priority to normal */ 1617 prio = 4; 1618 break; 1619 #if !defined(CONFIG_USER_ONLY) 1620 case 31: 1621 if (!ctx->pr) { 1622 /* Set process priority to very low */ 1623 prio = 1; 1624 } 1625 break; 1626 case 5: 1627 if (!ctx->pr) { 1628 /* Set process priority to medium-hight */ 1629 prio = 5; 1630 } 1631 break; 1632 case 3: 1633 if (!ctx->pr) { 1634 /* Set process priority to high */ 1635 prio = 6; 1636 } 1637 break; 1638 case 7: 1639 if (ctx->hv && !ctx->pr) { 1640 /* Set process priority to very high */ 1641 prio = 7; 1642 } 1643 break; 1644 #endif 1645 default: 1646 break; 1647 } 1648 if (prio) { 1649 TCGv t0 = tcg_temp_new(); 1650 gen_load_spr(t0, SPR_PPR); 1651 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1652 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1653 gen_store_spr(SPR_PPR, t0); 1654 tcg_temp_free(t0); 1655 } 1656 #if !defined(CONFIG_USER_ONLY) 1657 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1658 * CPU and the kernel hangs. This applies to all encodings other 1659 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1660 * and all currently undefined. 1661 */ 1662 gen_pause(ctx); 1663 #endif 1664 #endif 1665 } 1666 } 1667 /* orc & orc. */ 1668 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1669 1670 /* xor & xor. */ 1671 static void gen_xor(DisasContext *ctx) 1672 { 1673 /* Optimisation for "set to zero" case */ 1674 if (rS(ctx->opcode) != rB(ctx->opcode)) 1675 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1676 else 1677 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1678 if (unlikely(Rc(ctx->opcode) != 0)) 1679 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1680 } 1681 1682 /* ori */ 1683 static void gen_ori(DisasContext *ctx) 1684 { 1685 target_ulong uimm = UIMM(ctx->opcode); 1686 1687 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1688 return; 1689 } 1690 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1691 } 1692 1693 /* oris */ 1694 static void gen_oris(DisasContext *ctx) 1695 { 1696 target_ulong uimm = UIMM(ctx->opcode); 1697 1698 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1699 /* NOP */ 1700 return; 1701 } 1702 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1703 } 1704 1705 /* xori */ 1706 static void gen_xori(DisasContext *ctx) 1707 { 1708 target_ulong uimm = UIMM(ctx->opcode); 1709 1710 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1711 /* NOP */ 1712 return; 1713 } 1714 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1715 } 1716 1717 /* xoris */ 1718 static void gen_xoris(DisasContext *ctx) 1719 { 1720 target_ulong uimm = UIMM(ctx->opcode); 1721 1722 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1723 /* NOP */ 1724 return; 1725 } 1726 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1727 } 1728 1729 /* popcntb : PowerPC 2.03 specification */ 1730 static void gen_popcntb(DisasContext *ctx) 1731 { 1732 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1733 } 1734 1735 static void gen_popcntw(DisasContext *ctx) 1736 { 1737 #if defined(TARGET_PPC64) 1738 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1739 #else 1740 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1741 #endif 1742 } 1743 1744 #if defined(TARGET_PPC64) 1745 /* popcntd: PowerPC 2.06 specification */ 1746 static void gen_popcntd(DisasContext *ctx) 1747 { 1748 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1749 } 1750 #endif 1751 1752 /* prtyw: PowerPC 2.05 specification */ 1753 static void gen_prtyw(DisasContext *ctx) 1754 { 1755 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1756 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1757 TCGv t0 = tcg_temp_new(); 1758 tcg_gen_shri_tl(t0, rs, 16); 1759 tcg_gen_xor_tl(ra, rs, t0); 1760 tcg_gen_shri_tl(t0, ra, 8); 1761 tcg_gen_xor_tl(ra, ra, t0); 1762 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1763 tcg_temp_free(t0); 1764 } 1765 1766 #if defined(TARGET_PPC64) 1767 /* prtyd: PowerPC 2.05 specification */ 1768 static void gen_prtyd(DisasContext *ctx) 1769 { 1770 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1771 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1772 TCGv t0 = tcg_temp_new(); 1773 tcg_gen_shri_tl(t0, rs, 32); 1774 tcg_gen_xor_tl(ra, rs, t0); 1775 tcg_gen_shri_tl(t0, ra, 16); 1776 tcg_gen_xor_tl(ra, ra, t0); 1777 tcg_gen_shri_tl(t0, ra, 8); 1778 tcg_gen_xor_tl(ra, ra, t0); 1779 tcg_gen_andi_tl(ra, ra, 1); 1780 tcg_temp_free(t0); 1781 } 1782 #endif 1783 1784 #if defined(TARGET_PPC64) 1785 /* bpermd */ 1786 static void gen_bpermd(DisasContext *ctx) 1787 { 1788 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1789 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1790 } 1791 #endif 1792 1793 #if defined(TARGET_PPC64) 1794 /* extsw & extsw. */ 1795 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1796 1797 /* cntlzd */ 1798 static void gen_cntlzd(DisasContext *ctx) 1799 { 1800 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1801 if (unlikely(Rc(ctx->opcode) != 0)) 1802 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1803 } 1804 1805 /* cnttzd */ 1806 static void gen_cnttzd(DisasContext *ctx) 1807 { 1808 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1809 if (unlikely(Rc(ctx->opcode) != 0)) { 1810 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1811 } 1812 } 1813 1814 /* darn */ 1815 static void gen_darn(DisasContext *ctx) 1816 { 1817 int l = L(ctx->opcode); 1818 1819 if (l == 0) { 1820 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1821 } else if (l <= 2) { 1822 /* Return 64-bit random for both CRN and RRN */ 1823 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1824 } else { 1825 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1826 } 1827 } 1828 #endif 1829 1830 /*** Integer rotate ***/ 1831 1832 /* rlwimi & rlwimi. */ 1833 static void gen_rlwimi(DisasContext *ctx) 1834 { 1835 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1836 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1837 uint32_t sh = SH(ctx->opcode); 1838 uint32_t mb = MB(ctx->opcode); 1839 uint32_t me = ME(ctx->opcode); 1840 1841 if (sh == (31-me) && mb <= me) { 1842 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1843 } else { 1844 target_ulong mask; 1845 TCGv t1; 1846 1847 #if defined(TARGET_PPC64) 1848 mb += 32; 1849 me += 32; 1850 #endif 1851 mask = MASK(mb, me); 1852 1853 t1 = tcg_temp_new(); 1854 if (mask <= 0xffffffffu) { 1855 TCGv_i32 t0 = tcg_temp_new_i32(); 1856 tcg_gen_trunc_tl_i32(t0, t_rs); 1857 tcg_gen_rotli_i32(t0, t0, sh); 1858 tcg_gen_extu_i32_tl(t1, t0); 1859 tcg_temp_free_i32(t0); 1860 } else { 1861 #if defined(TARGET_PPC64) 1862 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1863 tcg_gen_rotli_i64(t1, t1, sh); 1864 #else 1865 g_assert_not_reached(); 1866 #endif 1867 } 1868 1869 tcg_gen_andi_tl(t1, t1, mask); 1870 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1871 tcg_gen_or_tl(t_ra, t_ra, t1); 1872 tcg_temp_free(t1); 1873 } 1874 if (unlikely(Rc(ctx->opcode) != 0)) { 1875 gen_set_Rc0(ctx, t_ra); 1876 } 1877 } 1878 1879 /* rlwinm & rlwinm. */ 1880 static void gen_rlwinm(DisasContext *ctx) 1881 { 1882 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1883 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1884 int sh = SH(ctx->opcode); 1885 int mb = MB(ctx->opcode); 1886 int me = ME(ctx->opcode); 1887 int len = me - mb + 1; 1888 int rsh = (32 - sh) & 31; 1889 1890 if (sh != 0 && len > 0 && me == (31 - sh)) { 1891 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 1892 } else if (me == 31 && rsh + len <= 32) { 1893 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 1894 } else { 1895 target_ulong mask; 1896 #if defined(TARGET_PPC64) 1897 mb += 32; 1898 me += 32; 1899 #endif 1900 mask = MASK(mb, me); 1901 if (sh == 0) { 1902 tcg_gen_andi_tl(t_ra, t_rs, mask); 1903 } else if (mask <= 0xffffffffu) { 1904 TCGv_i32 t0 = tcg_temp_new_i32(); 1905 tcg_gen_trunc_tl_i32(t0, t_rs); 1906 tcg_gen_rotli_i32(t0, t0, sh); 1907 tcg_gen_andi_i32(t0, t0, mask); 1908 tcg_gen_extu_i32_tl(t_ra, t0); 1909 tcg_temp_free_i32(t0); 1910 } else { 1911 #if defined(TARGET_PPC64) 1912 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1913 tcg_gen_rotli_i64(t_ra, t_ra, sh); 1914 tcg_gen_andi_i64(t_ra, t_ra, mask); 1915 #else 1916 g_assert_not_reached(); 1917 #endif 1918 } 1919 } 1920 if (unlikely(Rc(ctx->opcode) != 0)) { 1921 gen_set_Rc0(ctx, t_ra); 1922 } 1923 } 1924 1925 /* rlwnm & rlwnm. */ 1926 static void gen_rlwnm(DisasContext *ctx) 1927 { 1928 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1929 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1930 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 1931 uint32_t mb = MB(ctx->opcode); 1932 uint32_t me = ME(ctx->opcode); 1933 target_ulong mask; 1934 1935 #if defined(TARGET_PPC64) 1936 mb += 32; 1937 me += 32; 1938 #endif 1939 mask = MASK(mb, me); 1940 1941 if (mask <= 0xffffffffu) { 1942 TCGv_i32 t0 = tcg_temp_new_i32(); 1943 TCGv_i32 t1 = tcg_temp_new_i32(); 1944 tcg_gen_trunc_tl_i32(t0, t_rb); 1945 tcg_gen_trunc_tl_i32(t1, t_rs); 1946 tcg_gen_andi_i32(t0, t0, 0x1f); 1947 tcg_gen_rotl_i32(t1, t1, t0); 1948 tcg_gen_extu_i32_tl(t_ra, t1); 1949 tcg_temp_free_i32(t0); 1950 tcg_temp_free_i32(t1); 1951 } else { 1952 #if defined(TARGET_PPC64) 1953 TCGv_i64 t0 = tcg_temp_new_i64(); 1954 tcg_gen_andi_i64(t0, t_rb, 0x1f); 1955 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1956 tcg_gen_rotl_i64(t_ra, t_ra, t0); 1957 tcg_temp_free_i64(t0); 1958 #else 1959 g_assert_not_reached(); 1960 #endif 1961 } 1962 1963 tcg_gen_andi_tl(t_ra, t_ra, mask); 1964 1965 if (unlikely(Rc(ctx->opcode) != 0)) { 1966 gen_set_Rc0(ctx, t_ra); 1967 } 1968 } 1969 1970 #if defined(TARGET_PPC64) 1971 #define GEN_PPC64_R2(name, opc1, opc2) \ 1972 static void glue(gen_, name##0)(DisasContext *ctx) \ 1973 { \ 1974 gen_##name(ctx, 0); \ 1975 } \ 1976 \ 1977 static void glue(gen_, name##1)(DisasContext *ctx) \ 1978 { \ 1979 gen_##name(ctx, 1); \ 1980 } 1981 #define GEN_PPC64_R4(name, opc1, opc2) \ 1982 static void glue(gen_, name##0)(DisasContext *ctx) \ 1983 { \ 1984 gen_##name(ctx, 0, 0); \ 1985 } \ 1986 \ 1987 static void glue(gen_, name##1)(DisasContext *ctx) \ 1988 { \ 1989 gen_##name(ctx, 0, 1); \ 1990 } \ 1991 \ 1992 static void glue(gen_, name##2)(DisasContext *ctx) \ 1993 { \ 1994 gen_##name(ctx, 1, 0); \ 1995 } \ 1996 \ 1997 static void glue(gen_, name##3)(DisasContext *ctx) \ 1998 { \ 1999 gen_##name(ctx, 1, 1); \ 2000 } 2001 2002 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2003 { 2004 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2005 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2006 int len = me - mb + 1; 2007 int rsh = (64 - sh) & 63; 2008 2009 if (sh != 0 && len > 0 && me == (63 - sh)) { 2010 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2011 } else if (me == 63 && rsh + len <= 64) { 2012 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2013 } else { 2014 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2015 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2016 } 2017 if (unlikely(Rc(ctx->opcode) != 0)) { 2018 gen_set_Rc0(ctx, t_ra); 2019 } 2020 } 2021 2022 /* rldicl - rldicl. */ 2023 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2024 { 2025 uint32_t sh, mb; 2026 2027 sh = SH(ctx->opcode) | (shn << 5); 2028 mb = MB(ctx->opcode) | (mbn << 5); 2029 gen_rldinm(ctx, mb, 63, sh); 2030 } 2031 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2032 2033 /* rldicr - rldicr. */ 2034 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2035 { 2036 uint32_t sh, me; 2037 2038 sh = SH(ctx->opcode) | (shn << 5); 2039 me = MB(ctx->opcode) | (men << 5); 2040 gen_rldinm(ctx, 0, me, sh); 2041 } 2042 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2043 2044 /* rldic - rldic. */ 2045 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2046 { 2047 uint32_t sh, mb; 2048 2049 sh = SH(ctx->opcode) | (shn << 5); 2050 mb = MB(ctx->opcode) | (mbn << 5); 2051 gen_rldinm(ctx, mb, 63 - sh, sh); 2052 } 2053 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2054 2055 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2056 { 2057 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2058 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2059 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2060 TCGv t0; 2061 2062 t0 = tcg_temp_new(); 2063 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2064 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2065 tcg_temp_free(t0); 2066 2067 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2068 if (unlikely(Rc(ctx->opcode) != 0)) { 2069 gen_set_Rc0(ctx, t_ra); 2070 } 2071 } 2072 2073 /* rldcl - rldcl. */ 2074 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2075 { 2076 uint32_t mb; 2077 2078 mb = MB(ctx->opcode) | (mbn << 5); 2079 gen_rldnm(ctx, mb, 63); 2080 } 2081 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2082 2083 /* rldcr - rldcr. */ 2084 static inline void gen_rldcr(DisasContext *ctx, int men) 2085 { 2086 uint32_t me; 2087 2088 me = MB(ctx->opcode) | (men << 5); 2089 gen_rldnm(ctx, 0, me); 2090 } 2091 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2092 2093 /* rldimi - rldimi. */ 2094 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2095 { 2096 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2097 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2098 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2099 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2100 uint32_t me = 63 - sh; 2101 2102 if (mb <= me) { 2103 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2104 } else { 2105 target_ulong mask = MASK(mb, me); 2106 TCGv t1 = tcg_temp_new(); 2107 2108 tcg_gen_rotli_tl(t1, t_rs, sh); 2109 tcg_gen_andi_tl(t1, t1, mask); 2110 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2111 tcg_gen_or_tl(t_ra, t_ra, t1); 2112 tcg_temp_free(t1); 2113 } 2114 if (unlikely(Rc(ctx->opcode) != 0)) { 2115 gen_set_Rc0(ctx, t_ra); 2116 } 2117 } 2118 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2119 #endif 2120 2121 /*** Integer shift ***/ 2122 2123 /* slw & slw. */ 2124 static void gen_slw(DisasContext *ctx) 2125 { 2126 TCGv t0, t1; 2127 2128 t0 = tcg_temp_new(); 2129 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2130 #if defined(TARGET_PPC64) 2131 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2132 tcg_gen_sari_tl(t0, t0, 0x3f); 2133 #else 2134 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2135 tcg_gen_sari_tl(t0, t0, 0x1f); 2136 #endif 2137 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2138 t1 = tcg_temp_new(); 2139 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2140 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2141 tcg_temp_free(t1); 2142 tcg_temp_free(t0); 2143 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2144 if (unlikely(Rc(ctx->opcode) != 0)) 2145 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2146 } 2147 2148 /* sraw & sraw. */ 2149 static void gen_sraw(DisasContext *ctx) 2150 { 2151 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2152 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2153 if (unlikely(Rc(ctx->opcode) != 0)) 2154 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2155 } 2156 2157 /* srawi & srawi. */ 2158 static void gen_srawi(DisasContext *ctx) 2159 { 2160 int sh = SH(ctx->opcode); 2161 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2162 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2163 if (sh == 0) { 2164 tcg_gen_ext32s_tl(dst, src); 2165 tcg_gen_movi_tl(cpu_ca, 0); 2166 if (is_isa300(ctx)) { 2167 tcg_gen_movi_tl(cpu_ca32, 0); 2168 } 2169 } else { 2170 TCGv t0; 2171 tcg_gen_ext32s_tl(dst, src); 2172 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2173 t0 = tcg_temp_new(); 2174 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2175 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2176 tcg_temp_free(t0); 2177 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2178 if (is_isa300(ctx)) { 2179 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2180 } 2181 tcg_gen_sari_tl(dst, dst, sh); 2182 } 2183 if (unlikely(Rc(ctx->opcode) != 0)) { 2184 gen_set_Rc0(ctx, dst); 2185 } 2186 } 2187 2188 /* srw & srw. */ 2189 static void gen_srw(DisasContext *ctx) 2190 { 2191 TCGv t0, t1; 2192 2193 t0 = tcg_temp_new(); 2194 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2195 #if defined(TARGET_PPC64) 2196 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2197 tcg_gen_sari_tl(t0, t0, 0x3f); 2198 #else 2199 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2200 tcg_gen_sari_tl(t0, t0, 0x1f); 2201 #endif 2202 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2203 tcg_gen_ext32u_tl(t0, t0); 2204 t1 = tcg_temp_new(); 2205 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2206 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2207 tcg_temp_free(t1); 2208 tcg_temp_free(t0); 2209 if (unlikely(Rc(ctx->opcode) != 0)) 2210 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2211 } 2212 2213 #if defined(TARGET_PPC64) 2214 /* sld & sld. */ 2215 static void gen_sld(DisasContext *ctx) 2216 { 2217 TCGv t0, t1; 2218 2219 t0 = tcg_temp_new(); 2220 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2221 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2222 tcg_gen_sari_tl(t0, t0, 0x3f); 2223 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2224 t1 = tcg_temp_new(); 2225 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2226 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2227 tcg_temp_free(t1); 2228 tcg_temp_free(t0); 2229 if (unlikely(Rc(ctx->opcode) != 0)) 2230 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2231 } 2232 2233 /* srad & srad. */ 2234 static void gen_srad(DisasContext *ctx) 2235 { 2236 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2237 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2238 if (unlikely(Rc(ctx->opcode) != 0)) 2239 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2240 } 2241 /* sradi & sradi. */ 2242 static inline void gen_sradi(DisasContext *ctx, int n) 2243 { 2244 int sh = SH(ctx->opcode) + (n << 5); 2245 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2246 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2247 if (sh == 0) { 2248 tcg_gen_mov_tl(dst, src); 2249 tcg_gen_movi_tl(cpu_ca, 0); 2250 if (is_isa300(ctx)) { 2251 tcg_gen_movi_tl(cpu_ca32, 0); 2252 } 2253 } else { 2254 TCGv t0; 2255 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2256 t0 = tcg_temp_new(); 2257 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2258 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2259 tcg_temp_free(t0); 2260 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2261 if (is_isa300(ctx)) { 2262 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2263 } 2264 tcg_gen_sari_tl(dst, src, sh); 2265 } 2266 if (unlikely(Rc(ctx->opcode) != 0)) { 2267 gen_set_Rc0(ctx, dst); 2268 } 2269 } 2270 2271 static void gen_sradi0(DisasContext *ctx) 2272 { 2273 gen_sradi(ctx, 0); 2274 } 2275 2276 static void gen_sradi1(DisasContext *ctx) 2277 { 2278 gen_sradi(ctx, 1); 2279 } 2280 2281 /* extswsli & extswsli. */ 2282 static inline void gen_extswsli(DisasContext *ctx, int n) 2283 { 2284 int sh = SH(ctx->opcode) + (n << 5); 2285 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2286 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2287 2288 tcg_gen_ext32s_tl(dst, src); 2289 tcg_gen_shli_tl(dst, dst, sh); 2290 if (unlikely(Rc(ctx->opcode) != 0)) { 2291 gen_set_Rc0(ctx, dst); 2292 } 2293 } 2294 2295 static void gen_extswsli0(DisasContext *ctx) 2296 { 2297 gen_extswsli(ctx, 0); 2298 } 2299 2300 static void gen_extswsli1(DisasContext *ctx) 2301 { 2302 gen_extswsli(ctx, 1); 2303 } 2304 2305 /* srd & srd. */ 2306 static void gen_srd(DisasContext *ctx) 2307 { 2308 TCGv t0, t1; 2309 2310 t0 = tcg_temp_new(); 2311 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2312 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2313 tcg_gen_sari_tl(t0, t0, 0x3f); 2314 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2315 t1 = tcg_temp_new(); 2316 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2317 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2318 tcg_temp_free(t1); 2319 tcg_temp_free(t0); 2320 if (unlikely(Rc(ctx->opcode) != 0)) 2321 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2322 } 2323 #endif 2324 2325 /*** Addressing modes ***/ 2326 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2327 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2328 target_long maskl) 2329 { 2330 target_long simm = SIMM(ctx->opcode); 2331 2332 simm &= ~maskl; 2333 if (rA(ctx->opcode) == 0) { 2334 if (NARROW_MODE(ctx)) { 2335 simm = (uint32_t)simm; 2336 } 2337 tcg_gen_movi_tl(EA, simm); 2338 } else if (likely(simm != 0)) { 2339 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2340 if (NARROW_MODE(ctx)) { 2341 tcg_gen_ext32u_tl(EA, EA); 2342 } 2343 } else { 2344 if (NARROW_MODE(ctx)) { 2345 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2346 } else { 2347 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2348 } 2349 } 2350 } 2351 2352 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2353 { 2354 if (rA(ctx->opcode) == 0) { 2355 if (NARROW_MODE(ctx)) { 2356 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2357 } else { 2358 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2359 } 2360 } else { 2361 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2362 if (NARROW_MODE(ctx)) { 2363 tcg_gen_ext32u_tl(EA, EA); 2364 } 2365 } 2366 } 2367 2368 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2369 { 2370 if (rA(ctx->opcode) == 0) { 2371 tcg_gen_movi_tl(EA, 0); 2372 } else if (NARROW_MODE(ctx)) { 2373 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2374 } else { 2375 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2376 } 2377 } 2378 2379 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2380 target_long val) 2381 { 2382 tcg_gen_addi_tl(ret, arg1, val); 2383 if (NARROW_MODE(ctx)) { 2384 tcg_gen_ext32u_tl(ret, ret); 2385 } 2386 } 2387 2388 static inline void gen_align_no_le(DisasContext *ctx) 2389 { 2390 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2391 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2392 } 2393 2394 /*** Integer load ***/ 2395 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2396 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2397 2398 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2399 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2400 TCGv val, \ 2401 TCGv addr) \ 2402 { \ 2403 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2404 } 2405 2406 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2407 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2408 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2409 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2410 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2411 2412 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2413 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2414 2415 #define GEN_QEMU_LOAD_64(ldop, op) \ 2416 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2417 TCGv_i64 val, \ 2418 TCGv addr) \ 2419 { \ 2420 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2421 } 2422 2423 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2424 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2425 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2426 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2427 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2428 2429 #if defined(TARGET_PPC64) 2430 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2431 #endif 2432 2433 #define GEN_QEMU_STORE_TL(stop, op) \ 2434 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2435 TCGv val, \ 2436 TCGv addr) \ 2437 { \ 2438 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2439 } 2440 2441 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2442 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2443 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2444 2445 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2446 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2447 2448 #define GEN_QEMU_STORE_64(stop, op) \ 2449 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2450 TCGv_i64 val, \ 2451 TCGv addr) \ 2452 { \ 2453 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2454 } 2455 2456 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2457 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2458 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2459 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2460 2461 #if defined(TARGET_PPC64) 2462 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2463 #endif 2464 2465 #define GEN_LD(name, ldop, opc, type) \ 2466 static void glue(gen_, name)(DisasContext *ctx) \ 2467 { \ 2468 TCGv EA; \ 2469 gen_set_access_type(ctx, ACCESS_INT); \ 2470 EA = tcg_temp_new(); \ 2471 gen_addr_imm_index(ctx, EA, 0); \ 2472 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2473 tcg_temp_free(EA); \ 2474 } 2475 2476 #define GEN_LDU(name, ldop, opc, type) \ 2477 static void glue(gen_, name##u)(DisasContext *ctx) \ 2478 { \ 2479 TCGv EA; \ 2480 if (unlikely(rA(ctx->opcode) == 0 || \ 2481 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2482 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2483 return; \ 2484 } \ 2485 gen_set_access_type(ctx, ACCESS_INT); \ 2486 EA = tcg_temp_new(); \ 2487 if (type == PPC_64B) \ 2488 gen_addr_imm_index(ctx, EA, 0x03); \ 2489 else \ 2490 gen_addr_imm_index(ctx, EA, 0); \ 2491 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2492 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2493 tcg_temp_free(EA); \ 2494 } 2495 2496 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2497 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2498 { \ 2499 TCGv EA; \ 2500 if (unlikely(rA(ctx->opcode) == 0 || \ 2501 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2502 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2503 return; \ 2504 } \ 2505 gen_set_access_type(ctx, ACCESS_INT); \ 2506 EA = tcg_temp_new(); \ 2507 gen_addr_reg_index(ctx, EA); \ 2508 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2509 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2510 tcg_temp_free(EA); \ 2511 } 2512 2513 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2514 static void glue(gen_, name##x)(DisasContext *ctx) \ 2515 { \ 2516 TCGv EA; \ 2517 chk; \ 2518 gen_set_access_type(ctx, ACCESS_INT); \ 2519 EA = tcg_temp_new(); \ 2520 gen_addr_reg_index(ctx, EA); \ 2521 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2522 tcg_temp_free(EA); \ 2523 } 2524 2525 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2526 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2527 2528 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2529 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2530 2531 #define GEN_LDS(name, ldop, op, type) \ 2532 GEN_LD(name, ldop, op | 0x20, type); \ 2533 GEN_LDU(name, ldop, op | 0x21, type); \ 2534 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2535 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2536 2537 /* lbz lbzu lbzux lbzx */ 2538 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2539 /* lha lhau lhaux lhax */ 2540 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2541 /* lhz lhzu lhzux lhzx */ 2542 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2543 /* lwz lwzu lwzux lwzx */ 2544 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2545 2546 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 2547 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2548 { \ 2549 TCGv EA; \ 2550 CHK_SV; \ 2551 gen_set_access_type(ctx, ACCESS_INT); \ 2552 EA = tcg_temp_new(); \ 2553 gen_addr_reg_index(ctx, EA); \ 2554 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 2555 tcg_temp_free(EA); \ 2556 } 2557 2558 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 2559 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 2560 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 2561 #if defined(TARGET_PPC64) 2562 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 2563 #endif 2564 2565 #if defined(TARGET_PPC64) 2566 /* lwaux */ 2567 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2568 /* lwax */ 2569 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2570 /* ldux */ 2571 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2572 /* ldx */ 2573 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2574 2575 /* CI load/store variants */ 2576 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2577 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2578 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2579 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2580 2581 static void gen_ld(DisasContext *ctx) 2582 { 2583 TCGv EA; 2584 if (Rc(ctx->opcode)) { 2585 if (unlikely(rA(ctx->opcode) == 0 || 2586 rA(ctx->opcode) == rD(ctx->opcode))) { 2587 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2588 return; 2589 } 2590 } 2591 gen_set_access_type(ctx, ACCESS_INT); 2592 EA = tcg_temp_new(); 2593 gen_addr_imm_index(ctx, EA, 0x03); 2594 if (ctx->opcode & 0x02) { 2595 /* lwa (lwau is undefined) */ 2596 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2597 } else { 2598 /* ld - ldu */ 2599 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2600 } 2601 if (Rc(ctx->opcode)) 2602 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2603 tcg_temp_free(EA); 2604 } 2605 2606 /* lq */ 2607 static void gen_lq(DisasContext *ctx) 2608 { 2609 int ra, rd; 2610 TCGv EA, hi, lo; 2611 2612 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2613 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2614 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2615 2616 if (!legal_in_user_mode && ctx->pr) { 2617 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2618 return; 2619 } 2620 2621 if (!le_is_supported && ctx->le_mode) { 2622 gen_align_no_le(ctx); 2623 return; 2624 } 2625 ra = rA(ctx->opcode); 2626 rd = rD(ctx->opcode); 2627 if (unlikely((rd & 1) || rd == ra)) { 2628 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2629 return; 2630 } 2631 2632 gen_set_access_type(ctx, ACCESS_INT); 2633 EA = tcg_temp_new(); 2634 gen_addr_imm_index(ctx, EA, 0x0F); 2635 2636 /* Note that the low part is always in RD+1, even in LE mode. */ 2637 lo = cpu_gpr[rd + 1]; 2638 hi = cpu_gpr[rd]; 2639 2640 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2641 if (HAVE_ATOMIC128) { 2642 TCGv_i32 oi = tcg_temp_new_i32(); 2643 if (ctx->le_mode) { 2644 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2645 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 2646 } else { 2647 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2648 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 2649 } 2650 tcg_temp_free_i32(oi); 2651 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 2652 } else { 2653 /* Restart with exclusive lock. */ 2654 gen_helper_exit_atomic(cpu_env); 2655 ctx->base.is_jmp = DISAS_NORETURN; 2656 } 2657 } else if (ctx->le_mode) { 2658 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2659 gen_addr_add(ctx, EA, EA, 8); 2660 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2661 } else { 2662 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2663 gen_addr_add(ctx, EA, EA, 8); 2664 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2665 } 2666 tcg_temp_free(EA); 2667 } 2668 #endif 2669 2670 /*** Integer store ***/ 2671 #define GEN_ST(name, stop, opc, type) \ 2672 static void glue(gen_, name)(DisasContext *ctx) \ 2673 { \ 2674 TCGv EA; \ 2675 gen_set_access_type(ctx, ACCESS_INT); \ 2676 EA = tcg_temp_new(); \ 2677 gen_addr_imm_index(ctx, EA, 0); \ 2678 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2679 tcg_temp_free(EA); \ 2680 } 2681 2682 #define GEN_STU(name, stop, opc, type) \ 2683 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2684 { \ 2685 TCGv EA; \ 2686 if (unlikely(rA(ctx->opcode) == 0)) { \ 2687 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2688 return; \ 2689 } \ 2690 gen_set_access_type(ctx, ACCESS_INT); \ 2691 EA = tcg_temp_new(); \ 2692 if (type == PPC_64B) \ 2693 gen_addr_imm_index(ctx, EA, 0x03); \ 2694 else \ 2695 gen_addr_imm_index(ctx, EA, 0); \ 2696 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2697 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2698 tcg_temp_free(EA); \ 2699 } 2700 2701 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2702 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2703 { \ 2704 TCGv EA; \ 2705 if (unlikely(rA(ctx->opcode) == 0)) { \ 2706 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2707 return; \ 2708 } \ 2709 gen_set_access_type(ctx, ACCESS_INT); \ 2710 EA = tcg_temp_new(); \ 2711 gen_addr_reg_index(ctx, EA); \ 2712 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2713 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2714 tcg_temp_free(EA); \ 2715 } 2716 2717 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2718 static void glue(gen_, name##x)(DisasContext *ctx) \ 2719 { \ 2720 TCGv EA; \ 2721 chk; \ 2722 gen_set_access_type(ctx, ACCESS_INT); \ 2723 EA = tcg_temp_new(); \ 2724 gen_addr_reg_index(ctx, EA); \ 2725 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2726 tcg_temp_free(EA); \ 2727 } 2728 #define GEN_STX(name, stop, opc2, opc3, type) \ 2729 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2730 2731 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2732 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2733 2734 #define GEN_STS(name, stop, op, type) \ 2735 GEN_ST(name, stop, op | 0x20, type); \ 2736 GEN_STU(name, stop, op | 0x21, type); \ 2737 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2738 GEN_STX(name, stop, 0x17, op | 0x00, type) 2739 2740 /* stb stbu stbux stbx */ 2741 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2742 /* sth sthu sthux sthx */ 2743 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2744 /* stw stwu stwux stwx */ 2745 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2746 2747 #define GEN_STEPX(name, stop, opc2, opc3) \ 2748 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2749 { \ 2750 TCGv EA; \ 2751 CHK_SV; \ 2752 gen_set_access_type(ctx, ACCESS_INT); \ 2753 EA = tcg_temp_new(); \ 2754 gen_addr_reg_index(ctx, EA); \ 2755 tcg_gen_qemu_st_tl( \ 2756 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 2757 tcg_temp_free(EA); \ 2758 } 2759 2760 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 2761 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 2762 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 2763 #if defined(TARGET_PPC64) 2764 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04) 2765 #endif 2766 2767 #if defined(TARGET_PPC64) 2768 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2769 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2770 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2771 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2772 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2773 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2774 2775 static void gen_std(DisasContext *ctx) 2776 { 2777 int rs; 2778 TCGv EA; 2779 2780 rs = rS(ctx->opcode); 2781 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2782 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2783 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2784 TCGv hi, lo; 2785 2786 if (!(ctx->insns_flags & PPC_64BX)) { 2787 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2788 } 2789 2790 if (!legal_in_user_mode && ctx->pr) { 2791 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2792 return; 2793 } 2794 2795 if (!le_is_supported && ctx->le_mode) { 2796 gen_align_no_le(ctx); 2797 return; 2798 } 2799 2800 if (unlikely(rs & 1)) { 2801 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2802 return; 2803 } 2804 gen_set_access_type(ctx, ACCESS_INT); 2805 EA = tcg_temp_new(); 2806 gen_addr_imm_index(ctx, EA, 0x03); 2807 2808 /* Note that the low part is always in RS+1, even in LE mode. */ 2809 lo = cpu_gpr[rs + 1]; 2810 hi = cpu_gpr[rs]; 2811 2812 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2813 if (HAVE_ATOMIC128) { 2814 TCGv_i32 oi = tcg_temp_new_i32(); 2815 if (ctx->le_mode) { 2816 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2817 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 2818 } else { 2819 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2820 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 2821 } 2822 tcg_temp_free_i32(oi); 2823 } else { 2824 /* Restart with exclusive lock. */ 2825 gen_helper_exit_atomic(cpu_env); 2826 ctx->base.is_jmp = DISAS_NORETURN; 2827 } 2828 } else if (ctx->le_mode) { 2829 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2830 gen_addr_add(ctx, EA, EA, 8); 2831 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2832 } else { 2833 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2834 gen_addr_add(ctx, EA, EA, 8); 2835 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2836 } 2837 tcg_temp_free(EA); 2838 } else { 2839 /* std / stdu */ 2840 if (Rc(ctx->opcode)) { 2841 if (unlikely(rA(ctx->opcode) == 0)) { 2842 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2843 return; 2844 } 2845 } 2846 gen_set_access_type(ctx, ACCESS_INT); 2847 EA = tcg_temp_new(); 2848 gen_addr_imm_index(ctx, EA, 0x03); 2849 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2850 if (Rc(ctx->opcode)) 2851 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2852 tcg_temp_free(EA); 2853 } 2854 } 2855 #endif 2856 /*** Integer load and store with byte reverse ***/ 2857 2858 /* lhbrx */ 2859 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2860 2861 /* lwbrx */ 2862 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2863 2864 #if defined(TARGET_PPC64) 2865 /* ldbrx */ 2866 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2867 /* stdbrx */ 2868 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2869 #endif /* TARGET_PPC64 */ 2870 2871 /* sthbrx */ 2872 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2873 /* stwbrx */ 2874 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2875 2876 /*** Integer load and store multiple ***/ 2877 2878 /* lmw */ 2879 static void gen_lmw(DisasContext *ctx) 2880 { 2881 TCGv t0; 2882 TCGv_i32 t1; 2883 2884 if (ctx->le_mode) { 2885 gen_align_no_le(ctx); 2886 return; 2887 } 2888 gen_set_access_type(ctx, ACCESS_INT); 2889 t0 = tcg_temp_new(); 2890 t1 = tcg_const_i32(rD(ctx->opcode)); 2891 gen_addr_imm_index(ctx, t0, 0); 2892 gen_helper_lmw(cpu_env, t0, t1); 2893 tcg_temp_free(t0); 2894 tcg_temp_free_i32(t1); 2895 } 2896 2897 /* stmw */ 2898 static void gen_stmw(DisasContext *ctx) 2899 { 2900 TCGv t0; 2901 TCGv_i32 t1; 2902 2903 if (ctx->le_mode) { 2904 gen_align_no_le(ctx); 2905 return; 2906 } 2907 gen_set_access_type(ctx, ACCESS_INT); 2908 t0 = tcg_temp_new(); 2909 t1 = tcg_const_i32(rS(ctx->opcode)); 2910 gen_addr_imm_index(ctx, t0, 0); 2911 gen_helper_stmw(cpu_env, t0, t1); 2912 tcg_temp_free(t0); 2913 tcg_temp_free_i32(t1); 2914 } 2915 2916 /*** Integer load and store strings ***/ 2917 2918 /* lswi */ 2919 /* PowerPC32 specification says we must generate an exception if 2920 * rA is in the range of registers to be loaded. 2921 * In an other hand, IBM says this is valid, but rA won't be loaded. 2922 * For now, I'll follow the spec... 2923 */ 2924 static void gen_lswi(DisasContext *ctx) 2925 { 2926 TCGv t0; 2927 TCGv_i32 t1, t2; 2928 int nb = NB(ctx->opcode); 2929 int start = rD(ctx->opcode); 2930 int ra = rA(ctx->opcode); 2931 int nr; 2932 2933 if (ctx->le_mode) { 2934 gen_align_no_le(ctx); 2935 return; 2936 } 2937 if (nb == 0) 2938 nb = 32; 2939 nr = DIV_ROUND_UP(nb, 4); 2940 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2941 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2942 return; 2943 } 2944 gen_set_access_type(ctx, ACCESS_INT); 2945 t0 = tcg_temp_new(); 2946 gen_addr_register(ctx, t0); 2947 t1 = tcg_const_i32(nb); 2948 t2 = tcg_const_i32(start); 2949 gen_helper_lsw(cpu_env, t0, t1, t2); 2950 tcg_temp_free(t0); 2951 tcg_temp_free_i32(t1); 2952 tcg_temp_free_i32(t2); 2953 } 2954 2955 /* lswx */ 2956 static void gen_lswx(DisasContext *ctx) 2957 { 2958 TCGv t0; 2959 TCGv_i32 t1, t2, t3; 2960 2961 if (ctx->le_mode) { 2962 gen_align_no_le(ctx); 2963 return; 2964 } 2965 gen_set_access_type(ctx, ACCESS_INT); 2966 t0 = tcg_temp_new(); 2967 gen_addr_reg_index(ctx, t0); 2968 t1 = tcg_const_i32(rD(ctx->opcode)); 2969 t2 = tcg_const_i32(rA(ctx->opcode)); 2970 t3 = tcg_const_i32(rB(ctx->opcode)); 2971 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 2972 tcg_temp_free(t0); 2973 tcg_temp_free_i32(t1); 2974 tcg_temp_free_i32(t2); 2975 tcg_temp_free_i32(t3); 2976 } 2977 2978 /* stswi */ 2979 static void gen_stswi(DisasContext *ctx) 2980 { 2981 TCGv t0; 2982 TCGv_i32 t1, t2; 2983 int nb = NB(ctx->opcode); 2984 2985 if (ctx->le_mode) { 2986 gen_align_no_le(ctx); 2987 return; 2988 } 2989 gen_set_access_type(ctx, ACCESS_INT); 2990 t0 = tcg_temp_new(); 2991 gen_addr_register(ctx, t0); 2992 if (nb == 0) 2993 nb = 32; 2994 t1 = tcg_const_i32(nb); 2995 t2 = tcg_const_i32(rS(ctx->opcode)); 2996 gen_helper_stsw(cpu_env, t0, t1, t2); 2997 tcg_temp_free(t0); 2998 tcg_temp_free_i32(t1); 2999 tcg_temp_free_i32(t2); 3000 } 3001 3002 /* stswx */ 3003 static void gen_stswx(DisasContext *ctx) 3004 { 3005 TCGv t0; 3006 TCGv_i32 t1, t2; 3007 3008 if (ctx->le_mode) { 3009 gen_align_no_le(ctx); 3010 return; 3011 } 3012 gen_set_access_type(ctx, ACCESS_INT); 3013 t0 = tcg_temp_new(); 3014 gen_addr_reg_index(ctx, t0); 3015 t1 = tcg_temp_new_i32(); 3016 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3017 tcg_gen_andi_i32(t1, t1, 0x7F); 3018 t2 = tcg_const_i32(rS(ctx->opcode)); 3019 gen_helper_stsw(cpu_env, t0, t1, t2); 3020 tcg_temp_free(t0); 3021 tcg_temp_free_i32(t1); 3022 tcg_temp_free_i32(t2); 3023 } 3024 3025 /*** Memory synchronisation ***/ 3026 /* eieio */ 3027 static void gen_eieio(DisasContext *ctx) 3028 { 3029 TCGBar bar = TCG_MO_LD_ST; 3030 3031 /* 3032 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3033 * tell the CPU it is a store-forwarding barrier. 3034 */ 3035 if (ctx->opcode & 0x2000000) { 3036 /* 3037 * ISA says that "Reserved fields in instructions are ignored 3038 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3039 * as this is not an instruction software should be using, 3040 * complain to the user. 3041 */ 3042 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3043 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3044 TARGET_FMT_lx "\n", ctx->base.pc_next - 4); 3045 } else { 3046 bar = TCG_MO_ST_LD; 3047 } 3048 } 3049 3050 tcg_gen_mb(bar | TCG_BAR_SC); 3051 } 3052 3053 #if !defined(CONFIG_USER_ONLY) 3054 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3055 { 3056 TCGv_i32 t; 3057 TCGLabel *l; 3058 3059 if (!ctx->lazy_tlb_flush) { 3060 return; 3061 } 3062 l = gen_new_label(); 3063 t = tcg_temp_new_i32(); 3064 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3065 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3066 if (global) { 3067 gen_helper_check_tlb_flush_global(cpu_env); 3068 } else { 3069 gen_helper_check_tlb_flush_local(cpu_env); 3070 } 3071 gen_set_label(l); 3072 tcg_temp_free_i32(t); 3073 } 3074 #else 3075 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3076 #endif 3077 3078 /* isync */ 3079 static void gen_isync(DisasContext *ctx) 3080 { 3081 /* 3082 * We need to check for a pending TLB flush. This can only happen in 3083 * kernel mode however so check MSR_PR 3084 */ 3085 if (!ctx->pr) { 3086 gen_check_tlb_flush(ctx, false); 3087 } 3088 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3089 gen_stop_exception(ctx); 3090 } 3091 3092 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3093 3094 static void gen_load_locked(DisasContext *ctx, TCGMemOp memop) 3095 { 3096 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3097 TCGv t0 = tcg_temp_new(); 3098 3099 gen_set_access_type(ctx, ACCESS_RES); 3100 gen_addr_reg_index(ctx, t0); 3101 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3102 tcg_gen_mov_tl(cpu_reserve, t0); 3103 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3104 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3105 tcg_temp_free(t0); 3106 } 3107 3108 #define LARX(name, memop) \ 3109 static void gen_##name(DisasContext *ctx) \ 3110 { \ 3111 gen_load_locked(ctx, memop); \ 3112 } 3113 3114 /* lwarx */ 3115 LARX(lbarx, DEF_MEMOP(MO_UB)) 3116 LARX(lharx, DEF_MEMOP(MO_UW)) 3117 LARX(lwarx, DEF_MEMOP(MO_UL)) 3118 3119 static void gen_fetch_inc_conditional(DisasContext *ctx, TCGMemOp memop, 3120 TCGv EA, TCGCond cond, int addend) 3121 { 3122 TCGv t = tcg_temp_new(); 3123 TCGv t2 = tcg_temp_new(); 3124 TCGv u = tcg_temp_new(); 3125 3126 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3127 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3128 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3129 tcg_gen_addi_tl(u, t, addend); 3130 3131 /* E.g. for fetch and increment bounded... */ 3132 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3133 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3134 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3135 3136 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3137 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3138 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3139 3140 tcg_temp_free(t); 3141 tcg_temp_free(t2); 3142 tcg_temp_free(u); 3143 } 3144 3145 static void gen_ld_atomic(DisasContext *ctx, TCGMemOp memop) 3146 { 3147 uint32_t gpr_FC = FC(ctx->opcode); 3148 TCGv EA = tcg_temp_new(); 3149 int rt = rD(ctx->opcode); 3150 bool need_serial; 3151 TCGv src, dst; 3152 3153 gen_addr_register(ctx, EA); 3154 dst = cpu_gpr[rt]; 3155 src = cpu_gpr[(rt + 1) & 31]; 3156 3157 need_serial = false; 3158 memop |= MO_ALIGN; 3159 switch (gpr_FC) { 3160 case 0: /* Fetch and add */ 3161 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3162 break; 3163 case 1: /* Fetch and xor */ 3164 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3165 break; 3166 case 2: /* Fetch and or */ 3167 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3168 break; 3169 case 3: /* Fetch and 'and' */ 3170 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3171 break; 3172 case 4: /* Fetch and max unsigned */ 3173 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3174 break; 3175 case 5: /* Fetch and max signed */ 3176 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3177 break; 3178 case 6: /* Fetch and min unsigned */ 3179 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3180 break; 3181 case 7: /* Fetch and min signed */ 3182 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3183 break; 3184 case 8: /* Swap */ 3185 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3186 break; 3187 3188 case 16: /* Compare and swap not equal */ 3189 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3190 need_serial = true; 3191 } else { 3192 TCGv t0 = tcg_temp_new(); 3193 TCGv t1 = tcg_temp_new(); 3194 3195 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3196 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3197 tcg_gen_mov_tl(t1, src); 3198 } else { 3199 tcg_gen_ext32u_tl(t1, src); 3200 } 3201 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3202 cpu_gpr[(rt + 2) & 31], t0); 3203 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3204 tcg_gen_mov_tl(dst, t0); 3205 3206 tcg_temp_free(t0); 3207 tcg_temp_free(t1); 3208 } 3209 break; 3210 3211 case 24: /* Fetch and increment bounded */ 3212 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3213 need_serial = true; 3214 } else { 3215 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3216 } 3217 break; 3218 case 25: /* Fetch and increment equal */ 3219 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3220 need_serial = true; 3221 } else { 3222 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3223 } 3224 break; 3225 case 28: /* Fetch and decrement bounded */ 3226 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3227 need_serial = true; 3228 } else { 3229 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3230 } 3231 break; 3232 3233 default: 3234 /* invoke data storage error handler */ 3235 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3236 } 3237 tcg_temp_free(EA); 3238 3239 if (need_serial) { 3240 /* Restart with exclusive lock. */ 3241 gen_helper_exit_atomic(cpu_env); 3242 ctx->base.is_jmp = DISAS_NORETURN; 3243 } 3244 } 3245 3246 static void gen_lwat(DisasContext *ctx) 3247 { 3248 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3249 } 3250 3251 #ifdef TARGET_PPC64 3252 static void gen_ldat(DisasContext *ctx) 3253 { 3254 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3255 } 3256 #endif 3257 3258 static void gen_st_atomic(DisasContext *ctx, TCGMemOp memop) 3259 { 3260 uint32_t gpr_FC = FC(ctx->opcode); 3261 TCGv EA = tcg_temp_new(); 3262 TCGv src, discard; 3263 3264 gen_addr_register(ctx, EA); 3265 src = cpu_gpr[rD(ctx->opcode)]; 3266 discard = tcg_temp_new(); 3267 3268 memop |= MO_ALIGN; 3269 switch (gpr_FC) { 3270 case 0: /* add and Store */ 3271 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3272 break; 3273 case 1: /* xor and Store */ 3274 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3275 break; 3276 case 2: /* Or and Store */ 3277 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3278 break; 3279 case 3: /* 'and' and Store */ 3280 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3281 break; 3282 case 4: /* Store max unsigned */ 3283 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3284 break; 3285 case 5: /* Store max signed */ 3286 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3287 break; 3288 case 6: /* Store min unsigned */ 3289 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3290 break; 3291 case 7: /* Store min signed */ 3292 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3293 break; 3294 case 24: /* Store twin */ 3295 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3296 /* Restart with exclusive lock. */ 3297 gen_helper_exit_atomic(cpu_env); 3298 ctx->base.is_jmp = DISAS_NORETURN; 3299 } else { 3300 TCGv t = tcg_temp_new(); 3301 TCGv t2 = tcg_temp_new(); 3302 TCGv s = tcg_temp_new(); 3303 TCGv s2 = tcg_temp_new(); 3304 TCGv ea_plus_s = tcg_temp_new(); 3305 3306 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3307 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3308 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3309 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3310 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3311 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3312 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3313 3314 tcg_temp_free(ea_plus_s); 3315 tcg_temp_free(s2); 3316 tcg_temp_free(s); 3317 tcg_temp_free(t2); 3318 tcg_temp_free(t); 3319 } 3320 break; 3321 default: 3322 /* invoke data storage error handler */ 3323 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3324 } 3325 tcg_temp_free(discard); 3326 tcg_temp_free(EA); 3327 } 3328 3329 static void gen_stwat(DisasContext *ctx) 3330 { 3331 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3332 } 3333 3334 #ifdef TARGET_PPC64 3335 static void gen_stdat(DisasContext *ctx) 3336 { 3337 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3338 } 3339 #endif 3340 3341 static void gen_conditional_store(DisasContext *ctx, TCGMemOp memop) 3342 { 3343 TCGLabel *l1 = gen_new_label(); 3344 TCGLabel *l2 = gen_new_label(); 3345 TCGv t0 = tcg_temp_new(); 3346 int reg = rS(ctx->opcode); 3347 3348 gen_set_access_type(ctx, ACCESS_RES); 3349 gen_addr_reg_index(ctx, t0); 3350 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 3351 tcg_temp_free(t0); 3352 3353 t0 = tcg_temp_new(); 3354 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3355 cpu_gpr[reg], ctx->mem_idx, 3356 DEF_MEMOP(memop) | MO_ALIGN); 3357 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3358 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3359 tcg_gen_or_tl(t0, t0, cpu_so); 3360 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3361 tcg_temp_free(t0); 3362 tcg_gen_br(l2); 3363 3364 gen_set_label(l1); 3365 3366 /* Address mismatch implies failure. But we still need to provide the 3367 memory barrier semantics of the instruction. */ 3368 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3369 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3370 3371 gen_set_label(l2); 3372 tcg_gen_movi_tl(cpu_reserve, -1); 3373 } 3374 3375 #define STCX(name, memop) \ 3376 static void gen_##name(DisasContext *ctx) \ 3377 { \ 3378 gen_conditional_store(ctx, memop); \ 3379 } 3380 3381 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3382 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3383 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3384 3385 #if defined(TARGET_PPC64) 3386 /* ldarx */ 3387 LARX(ldarx, DEF_MEMOP(MO_Q)) 3388 /* stdcx. */ 3389 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3390 3391 /* lqarx */ 3392 static void gen_lqarx(DisasContext *ctx) 3393 { 3394 int rd = rD(ctx->opcode); 3395 TCGv EA, hi, lo; 3396 3397 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3398 (rd == rB(ctx->opcode)))) { 3399 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3400 return; 3401 } 3402 3403 gen_set_access_type(ctx, ACCESS_RES); 3404 EA = tcg_temp_new(); 3405 gen_addr_reg_index(ctx, EA); 3406 3407 /* Note that the low part is always in RD+1, even in LE mode. */ 3408 lo = cpu_gpr[rd + 1]; 3409 hi = cpu_gpr[rd]; 3410 3411 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3412 if (HAVE_ATOMIC128) { 3413 TCGv_i32 oi = tcg_temp_new_i32(); 3414 if (ctx->le_mode) { 3415 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16, 3416 ctx->mem_idx)); 3417 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3418 } else { 3419 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16, 3420 ctx->mem_idx)); 3421 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3422 } 3423 tcg_temp_free_i32(oi); 3424 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3425 } else { 3426 /* Restart with exclusive lock. */ 3427 gen_helper_exit_atomic(cpu_env); 3428 ctx->base.is_jmp = DISAS_NORETURN; 3429 tcg_temp_free(EA); 3430 return; 3431 } 3432 } else if (ctx->le_mode) { 3433 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 3434 tcg_gen_mov_tl(cpu_reserve, EA); 3435 gen_addr_add(ctx, EA, EA, 8); 3436 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3437 } else { 3438 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 3439 tcg_gen_mov_tl(cpu_reserve, EA); 3440 gen_addr_add(ctx, EA, EA, 8); 3441 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3442 } 3443 tcg_temp_free(EA); 3444 3445 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 3446 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3447 } 3448 3449 /* stqcx. */ 3450 static void gen_stqcx_(DisasContext *ctx) 3451 { 3452 int rs = rS(ctx->opcode); 3453 TCGv EA, hi, lo; 3454 3455 if (unlikely(rs & 1)) { 3456 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3457 return; 3458 } 3459 3460 gen_set_access_type(ctx, ACCESS_RES); 3461 EA = tcg_temp_new(); 3462 gen_addr_reg_index(ctx, EA); 3463 3464 /* Note that the low part is always in RS+1, even in LE mode. */ 3465 lo = cpu_gpr[rs + 1]; 3466 hi = cpu_gpr[rs]; 3467 3468 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3469 if (HAVE_CMPXCHG128) { 3470 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16); 3471 if (ctx->le_mode) { 3472 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 3473 EA, lo, hi, oi); 3474 } else { 3475 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 3476 EA, lo, hi, oi); 3477 } 3478 tcg_temp_free_i32(oi); 3479 } else { 3480 /* Restart with exclusive lock. */ 3481 gen_helper_exit_atomic(cpu_env); 3482 ctx->base.is_jmp = DISAS_NORETURN; 3483 } 3484 tcg_temp_free(EA); 3485 } else { 3486 TCGLabel *lab_fail = gen_new_label(); 3487 TCGLabel *lab_over = gen_new_label(); 3488 TCGv_i64 t0 = tcg_temp_new_i64(); 3489 TCGv_i64 t1 = tcg_temp_new_i64(); 3490 3491 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 3492 tcg_temp_free(EA); 3493 3494 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 3495 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3496 ? offsetof(CPUPPCState, reserve_val2) 3497 : offsetof(CPUPPCState, reserve_val))); 3498 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3499 3500 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3501 gen_qemu_ld64_i64(ctx, t0, t0); 3502 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3503 ? offsetof(CPUPPCState, reserve_val) 3504 : offsetof(CPUPPCState, reserve_val2))); 3505 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3506 3507 /* Success */ 3508 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 3509 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3510 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 3511 3512 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3513 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 3514 tcg_gen_br(lab_over); 3515 3516 gen_set_label(lab_fail); 3517 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3518 3519 gen_set_label(lab_over); 3520 tcg_gen_movi_tl(cpu_reserve, -1); 3521 tcg_temp_free_i64(t0); 3522 tcg_temp_free_i64(t1); 3523 } 3524 } 3525 #endif /* defined(TARGET_PPC64) */ 3526 3527 /* sync */ 3528 static void gen_sync(DisasContext *ctx) 3529 { 3530 uint32_t l = (ctx->opcode >> 21) & 3; 3531 3532 /* 3533 * We may need to check for a pending TLB flush. 3534 * 3535 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3536 * 3537 * Additionally, this can only happen in kernel mode however so 3538 * check MSR_PR as well. 3539 */ 3540 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3541 gen_check_tlb_flush(ctx, true); 3542 } 3543 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3544 } 3545 3546 /* wait */ 3547 static void gen_wait(DisasContext *ctx) 3548 { 3549 TCGv_i32 t0 = tcg_const_i32(1); 3550 tcg_gen_st_i32(t0, cpu_env, 3551 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3552 tcg_temp_free_i32(t0); 3553 /* Stop translation, as the CPU is supposed to sleep from now */ 3554 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3555 } 3556 3557 #if defined(TARGET_PPC64) 3558 static void gen_doze(DisasContext *ctx) 3559 { 3560 #if defined(CONFIG_USER_ONLY) 3561 GEN_PRIV; 3562 #else 3563 TCGv_i32 t; 3564 3565 CHK_HV; 3566 t = tcg_const_i32(PPC_PM_DOZE); 3567 gen_helper_pminsn(cpu_env, t); 3568 tcg_temp_free_i32(t); 3569 gen_stop_exception(ctx); 3570 #endif /* defined(CONFIG_USER_ONLY) */ 3571 } 3572 3573 static void gen_nap(DisasContext *ctx) 3574 { 3575 #if defined(CONFIG_USER_ONLY) 3576 GEN_PRIV; 3577 #else 3578 TCGv_i32 t; 3579 3580 CHK_HV; 3581 t = tcg_const_i32(PPC_PM_NAP); 3582 gen_helper_pminsn(cpu_env, t); 3583 tcg_temp_free_i32(t); 3584 gen_stop_exception(ctx); 3585 #endif /* defined(CONFIG_USER_ONLY) */ 3586 } 3587 3588 static void gen_stop(DisasContext *ctx) 3589 { 3590 gen_nap(ctx); 3591 } 3592 3593 static void gen_sleep(DisasContext *ctx) 3594 { 3595 #if defined(CONFIG_USER_ONLY) 3596 GEN_PRIV; 3597 #else 3598 TCGv_i32 t; 3599 3600 CHK_HV; 3601 t = tcg_const_i32(PPC_PM_SLEEP); 3602 gen_helper_pminsn(cpu_env, t); 3603 tcg_temp_free_i32(t); 3604 gen_stop_exception(ctx); 3605 #endif /* defined(CONFIG_USER_ONLY) */ 3606 } 3607 3608 static void gen_rvwinkle(DisasContext *ctx) 3609 { 3610 #if defined(CONFIG_USER_ONLY) 3611 GEN_PRIV; 3612 #else 3613 TCGv_i32 t; 3614 3615 CHK_HV; 3616 t = tcg_const_i32(PPC_PM_RVWINKLE); 3617 gen_helper_pminsn(cpu_env, t); 3618 tcg_temp_free_i32(t); 3619 gen_stop_exception(ctx); 3620 #endif /* defined(CONFIG_USER_ONLY) */ 3621 } 3622 #endif /* #if defined(TARGET_PPC64) */ 3623 3624 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3625 { 3626 #if defined(TARGET_PPC64) 3627 if (ctx->has_cfar) 3628 tcg_gen_movi_tl(cpu_cfar, nip); 3629 #endif 3630 } 3631 3632 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3633 { 3634 if (unlikely(ctx->singlestep_enabled)) { 3635 return false; 3636 } 3637 3638 #ifndef CONFIG_USER_ONLY 3639 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3640 #else 3641 return true; 3642 #endif 3643 } 3644 3645 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 3646 { 3647 int sse = ctx->singlestep_enabled; 3648 if (unlikely(sse)) { 3649 if (sse & GDBSTUB_SINGLE_STEP) { 3650 gen_debug_exception(ctx); 3651 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) { 3652 uint32_t excp = gen_prep_dbgex(ctx); 3653 gen_exception(ctx, excp); 3654 } 3655 tcg_gen_exit_tb(NULL, 0); 3656 } else { 3657 tcg_gen_lookup_and_goto_ptr(); 3658 } 3659 } 3660 3661 /*** Branch ***/ 3662 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3663 { 3664 if (NARROW_MODE(ctx)) { 3665 dest = (uint32_t) dest; 3666 } 3667 if (use_goto_tb(ctx, dest)) { 3668 tcg_gen_goto_tb(n); 3669 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3670 tcg_gen_exit_tb(ctx->base.tb, n); 3671 } else { 3672 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3673 gen_lookup_and_goto_ptr(ctx); 3674 } 3675 } 3676 3677 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3678 { 3679 if (NARROW_MODE(ctx)) { 3680 nip = (uint32_t)nip; 3681 } 3682 tcg_gen_movi_tl(cpu_lr, nip); 3683 } 3684 3685 /* b ba bl bla */ 3686 static void gen_b(DisasContext *ctx) 3687 { 3688 target_ulong li, target; 3689 3690 ctx->exception = POWERPC_EXCP_BRANCH; 3691 /* sign extend LI */ 3692 li = LI(ctx->opcode); 3693 li = (li ^ 0x02000000) - 0x02000000; 3694 if (likely(AA(ctx->opcode) == 0)) { 3695 target = ctx->base.pc_next + li - 4; 3696 } else { 3697 target = li; 3698 } 3699 if (LK(ctx->opcode)) { 3700 gen_setlr(ctx, ctx->base.pc_next); 3701 } 3702 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3703 gen_goto_tb(ctx, 0, target); 3704 } 3705 3706 #define BCOND_IM 0 3707 #define BCOND_LR 1 3708 #define BCOND_CTR 2 3709 #define BCOND_TAR 3 3710 3711 static void gen_bcond(DisasContext *ctx, int type) 3712 { 3713 uint32_t bo = BO(ctx->opcode); 3714 TCGLabel *l1; 3715 TCGv target; 3716 ctx->exception = POWERPC_EXCP_BRANCH; 3717 3718 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3719 target = tcg_temp_local_new(); 3720 if (type == BCOND_CTR) 3721 tcg_gen_mov_tl(target, cpu_ctr); 3722 else if (type == BCOND_TAR) 3723 gen_load_spr(target, SPR_TAR); 3724 else 3725 tcg_gen_mov_tl(target, cpu_lr); 3726 } else { 3727 target = NULL; 3728 } 3729 if (LK(ctx->opcode)) 3730 gen_setlr(ctx, ctx->base.pc_next); 3731 l1 = gen_new_label(); 3732 if ((bo & 0x4) == 0) { 3733 /* Decrement and test CTR */ 3734 TCGv temp = tcg_temp_new(); 3735 if (unlikely(type == BCOND_CTR)) { 3736 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3737 return; 3738 } 3739 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3740 if (NARROW_MODE(ctx)) { 3741 tcg_gen_ext32u_tl(temp, cpu_ctr); 3742 } else { 3743 tcg_gen_mov_tl(temp, cpu_ctr); 3744 } 3745 if (bo & 0x2) { 3746 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3747 } else { 3748 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3749 } 3750 tcg_temp_free(temp); 3751 } 3752 if ((bo & 0x10) == 0) { 3753 /* Test CR */ 3754 uint32_t bi = BI(ctx->opcode); 3755 uint32_t mask = 0x08 >> (bi & 0x03); 3756 TCGv_i32 temp = tcg_temp_new_i32(); 3757 3758 if (bo & 0x8) { 3759 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3760 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3761 } else { 3762 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3763 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3764 } 3765 tcg_temp_free_i32(temp); 3766 } 3767 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3768 if (type == BCOND_IM) { 3769 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3770 if (likely(AA(ctx->opcode) == 0)) { 3771 gen_goto_tb(ctx, 0, ctx->base.pc_next + li - 4); 3772 } else { 3773 gen_goto_tb(ctx, 0, li); 3774 } 3775 } else { 3776 if (NARROW_MODE(ctx)) { 3777 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3778 } else { 3779 tcg_gen_andi_tl(cpu_nip, target, ~3); 3780 } 3781 gen_lookup_and_goto_ptr(ctx); 3782 tcg_temp_free(target); 3783 } 3784 if ((bo & 0x14) != 0x14) { 3785 /* fallthrough case */ 3786 gen_set_label(l1); 3787 gen_goto_tb(ctx, 1, ctx->base.pc_next); 3788 } 3789 } 3790 3791 static void gen_bc(DisasContext *ctx) 3792 { 3793 gen_bcond(ctx, BCOND_IM); 3794 } 3795 3796 static void gen_bcctr(DisasContext *ctx) 3797 { 3798 gen_bcond(ctx, BCOND_CTR); 3799 } 3800 3801 static void gen_bclr(DisasContext *ctx) 3802 { 3803 gen_bcond(ctx, BCOND_LR); 3804 } 3805 3806 static void gen_bctar(DisasContext *ctx) 3807 { 3808 gen_bcond(ctx, BCOND_TAR); 3809 } 3810 3811 /*** Condition register logical ***/ 3812 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3813 static void glue(gen_, name)(DisasContext *ctx) \ 3814 { \ 3815 uint8_t bitmask; \ 3816 int sh; \ 3817 TCGv_i32 t0, t1; \ 3818 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3819 t0 = tcg_temp_new_i32(); \ 3820 if (sh > 0) \ 3821 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3822 else if (sh < 0) \ 3823 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3824 else \ 3825 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3826 t1 = tcg_temp_new_i32(); \ 3827 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3828 if (sh > 0) \ 3829 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3830 else if (sh < 0) \ 3831 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3832 else \ 3833 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3834 tcg_op(t0, t0, t1); \ 3835 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3836 tcg_gen_andi_i32(t0, t0, bitmask); \ 3837 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3838 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3839 tcg_temp_free_i32(t0); \ 3840 tcg_temp_free_i32(t1); \ 3841 } 3842 3843 /* crand */ 3844 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3845 /* crandc */ 3846 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3847 /* creqv */ 3848 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3849 /* crnand */ 3850 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3851 /* crnor */ 3852 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3853 /* cror */ 3854 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3855 /* crorc */ 3856 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3857 /* crxor */ 3858 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3859 3860 /* mcrf */ 3861 static void gen_mcrf(DisasContext *ctx) 3862 { 3863 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3864 } 3865 3866 /*** System linkage ***/ 3867 3868 /* rfi (supervisor only) */ 3869 static void gen_rfi(DisasContext *ctx) 3870 { 3871 #if defined(CONFIG_USER_ONLY) 3872 GEN_PRIV; 3873 #else 3874 /* This instruction doesn't exist anymore on 64-bit server 3875 * processors compliant with arch 2.x 3876 */ 3877 if (ctx->insns_flags & PPC_SEGMENT_64B) { 3878 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3879 return; 3880 } 3881 /* Restore CPU state */ 3882 CHK_SV; 3883 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3884 gen_io_start(); 3885 } 3886 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3887 gen_helper_rfi(cpu_env); 3888 gen_sync_exception(ctx); 3889 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3890 gen_io_end(); 3891 } 3892 #endif 3893 } 3894 3895 #if defined(TARGET_PPC64) 3896 static void gen_rfid(DisasContext *ctx) 3897 { 3898 #if defined(CONFIG_USER_ONLY) 3899 GEN_PRIV; 3900 #else 3901 /* Restore CPU state */ 3902 CHK_SV; 3903 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3904 gen_io_start(); 3905 } 3906 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3907 gen_helper_rfid(cpu_env); 3908 gen_sync_exception(ctx); 3909 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3910 gen_io_end(); 3911 } 3912 #endif 3913 } 3914 3915 static void gen_hrfid(DisasContext *ctx) 3916 { 3917 #if defined(CONFIG_USER_ONLY) 3918 GEN_PRIV; 3919 #else 3920 /* Restore CPU state */ 3921 CHK_HV; 3922 gen_helper_hrfid(cpu_env); 3923 gen_sync_exception(ctx); 3924 #endif 3925 } 3926 #endif 3927 3928 /* sc */ 3929 #if defined(CONFIG_USER_ONLY) 3930 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3931 #else 3932 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3933 #endif 3934 static void gen_sc(DisasContext *ctx) 3935 { 3936 uint32_t lev; 3937 3938 lev = (ctx->opcode >> 5) & 0x7F; 3939 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3940 } 3941 3942 /*** Trap ***/ 3943 3944 /* Check for unconditional traps (always or never) */ 3945 static bool check_unconditional_trap(DisasContext *ctx) 3946 { 3947 /* Trap never */ 3948 if (TO(ctx->opcode) == 0) { 3949 return true; 3950 } 3951 /* Trap always */ 3952 if (TO(ctx->opcode) == 31) { 3953 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 3954 return true; 3955 } 3956 return false; 3957 } 3958 3959 /* tw */ 3960 static void gen_tw(DisasContext *ctx) 3961 { 3962 TCGv_i32 t0; 3963 3964 if (check_unconditional_trap(ctx)) { 3965 return; 3966 } 3967 t0 = tcg_const_i32(TO(ctx->opcode)); 3968 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3969 t0); 3970 tcg_temp_free_i32(t0); 3971 } 3972 3973 /* twi */ 3974 static void gen_twi(DisasContext *ctx) 3975 { 3976 TCGv t0; 3977 TCGv_i32 t1; 3978 3979 if (check_unconditional_trap(ctx)) { 3980 return; 3981 } 3982 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3983 t1 = tcg_const_i32(TO(ctx->opcode)); 3984 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3985 tcg_temp_free(t0); 3986 tcg_temp_free_i32(t1); 3987 } 3988 3989 #if defined(TARGET_PPC64) 3990 /* td */ 3991 static void gen_td(DisasContext *ctx) 3992 { 3993 TCGv_i32 t0; 3994 3995 if (check_unconditional_trap(ctx)) { 3996 return; 3997 } 3998 t0 = tcg_const_i32(TO(ctx->opcode)); 3999 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4000 t0); 4001 tcg_temp_free_i32(t0); 4002 } 4003 4004 /* tdi */ 4005 static void gen_tdi(DisasContext *ctx) 4006 { 4007 TCGv t0; 4008 TCGv_i32 t1; 4009 4010 if (check_unconditional_trap(ctx)) { 4011 return; 4012 } 4013 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4014 t1 = tcg_const_i32(TO(ctx->opcode)); 4015 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4016 tcg_temp_free(t0); 4017 tcg_temp_free_i32(t1); 4018 } 4019 #endif 4020 4021 /*** Processor control ***/ 4022 4023 static void gen_read_xer(DisasContext *ctx, TCGv dst) 4024 { 4025 TCGv t0 = tcg_temp_new(); 4026 TCGv t1 = tcg_temp_new(); 4027 TCGv t2 = tcg_temp_new(); 4028 tcg_gen_mov_tl(dst, cpu_xer); 4029 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 4030 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 4031 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 4032 tcg_gen_or_tl(t0, t0, t1); 4033 tcg_gen_or_tl(dst, dst, t2); 4034 tcg_gen_or_tl(dst, dst, t0); 4035 if (is_isa300(ctx)) { 4036 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 4037 tcg_gen_or_tl(dst, dst, t0); 4038 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 4039 tcg_gen_or_tl(dst, dst, t0); 4040 } 4041 tcg_temp_free(t0); 4042 tcg_temp_free(t1); 4043 tcg_temp_free(t2); 4044 } 4045 4046 static void gen_write_xer(TCGv src) 4047 { 4048 /* Write all flags, while reading back check for isa300 */ 4049 tcg_gen_andi_tl(cpu_xer, src, 4050 ~((1u << XER_SO) | 4051 (1u << XER_OV) | (1u << XER_OV32) | 4052 (1u << XER_CA) | (1u << XER_CA32))); 4053 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 4054 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 4055 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 4056 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 4057 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 4058 } 4059 4060 /* mcrxr */ 4061 static void gen_mcrxr(DisasContext *ctx) 4062 { 4063 TCGv_i32 t0 = tcg_temp_new_i32(); 4064 TCGv_i32 t1 = tcg_temp_new_i32(); 4065 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4066 4067 tcg_gen_trunc_tl_i32(t0, cpu_so); 4068 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4069 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4070 tcg_gen_shli_i32(t0, t0, 3); 4071 tcg_gen_shli_i32(t1, t1, 2); 4072 tcg_gen_shli_i32(dst, dst, 1); 4073 tcg_gen_or_i32(dst, dst, t0); 4074 tcg_gen_or_i32(dst, dst, t1); 4075 tcg_temp_free_i32(t0); 4076 tcg_temp_free_i32(t1); 4077 4078 tcg_gen_movi_tl(cpu_so, 0); 4079 tcg_gen_movi_tl(cpu_ov, 0); 4080 tcg_gen_movi_tl(cpu_ca, 0); 4081 } 4082 4083 #ifdef TARGET_PPC64 4084 /* mcrxrx */ 4085 static void gen_mcrxrx(DisasContext *ctx) 4086 { 4087 TCGv t0 = tcg_temp_new(); 4088 TCGv t1 = tcg_temp_new(); 4089 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4090 4091 /* copy OV and OV32 */ 4092 tcg_gen_shli_tl(t0, cpu_ov, 1); 4093 tcg_gen_or_tl(t0, t0, cpu_ov32); 4094 tcg_gen_shli_tl(t0, t0, 2); 4095 /* copy CA and CA32 */ 4096 tcg_gen_shli_tl(t1, cpu_ca, 1); 4097 tcg_gen_or_tl(t1, t1, cpu_ca32); 4098 tcg_gen_or_tl(t0, t0, t1); 4099 tcg_gen_trunc_tl_i32(dst, t0); 4100 tcg_temp_free(t0); 4101 tcg_temp_free(t1); 4102 } 4103 #endif 4104 4105 /* mfcr mfocrf */ 4106 static void gen_mfcr(DisasContext *ctx) 4107 { 4108 uint32_t crm, crn; 4109 4110 if (likely(ctx->opcode & 0x00100000)) { 4111 crm = CRM(ctx->opcode); 4112 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4113 crn = ctz32 (crm); 4114 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4115 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4116 cpu_gpr[rD(ctx->opcode)], crn * 4); 4117 } 4118 } else { 4119 TCGv_i32 t0 = tcg_temp_new_i32(); 4120 tcg_gen_mov_i32(t0, cpu_crf[0]); 4121 tcg_gen_shli_i32(t0, t0, 4); 4122 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4123 tcg_gen_shli_i32(t0, t0, 4); 4124 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4125 tcg_gen_shli_i32(t0, t0, 4); 4126 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4127 tcg_gen_shli_i32(t0, t0, 4); 4128 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4129 tcg_gen_shli_i32(t0, t0, 4); 4130 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4131 tcg_gen_shli_i32(t0, t0, 4); 4132 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4133 tcg_gen_shli_i32(t0, t0, 4); 4134 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4135 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4136 tcg_temp_free_i32(t0); 4137 } 4138 } 4139 4140 /* mfmsr */ 4141 static void gen_mfmsr(DisasContext *ctx) 4142 { 4143 CHK_SV; 4144 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4145 } 4146 4147 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 4148 { 4149 #if 0 4150 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 4151 printf("ERROR: try to access SPR %d !\n", sprn); 4152 #endif 4153 } 4154 #define SPR_NOACCESS (&spr_noaccess) 4155 4156 /* mfspr */ 4157 static inline void gen_op_mfspr(DisasContext *ctx) 4158 { 4159 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4160 uint32_t sprn = SPR(ctx->opcode); 4161 4162 #if defined(CONFIG_USER_ONLY) 4163 read_cb = ctx->spr_cb[sprn].uea_read; 4164 #else 4165 if (ctx->pr) { 4166 read_cb = ctx->spr_cb[sprn].uea_read; 4167 } else if (ctx->hv) { 4168 read_cb = ctx->spr_cb[sprn].hea_read; 4169 } else { 4170 read_cb = ctx->spr_cb[sprn].oea_read; 4171 } 4172 #endif 4173 if (likely(read_cb != NULL)) { 4174 if (likely(read_cb != SPR_NOACCESS)) { 4175 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4176 } else { 4177 /* Privilege exception */ 4178 /* This is a hack to avoid warnings when running Linux: 4179 * this OS breaks the PowerPC virtualisation model, 4180 * allowing userland application to read the PVR 4181 */ 4182 if (sprn != SPR_PVR) { 4183 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4184 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4185 ctx->base.pc_next - 4); 4186 } 4187 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4188 } 4189 } else { 4190 /* ISA 2.07 defines these as no-ops */ 4191 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4192 (sprn >= 808 && sprn <= 811)) { 4193 /* This is a nop */ 4194 return; 4195 } 4196 /* Not defined */ 4197 qemu_log_mask(LOG_GUEST_ERROR, 4198 "Trying to read invalid spr %d (0x%03x) at " 4199 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4200 4201 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4202 * it can generate a priv, a hv emu or a no-op 4203 */ 4204 if (sprn & 0x10) { 4205 if (ctx->pr) { 4206 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4207 } 4208 } else { 4209 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4210 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4211 } 4212 } 4213 } 4214 } 4215 4216 static void gen_mfspr(DisasContext *ctx) 4217 { 4218 gen_op_mfspr(ctx); 4219 } 4220 4221 /* mftb */ 4222 static void gen_mftb(DisasContext *ctx) 4223 { 4224 gen_op_mfspr(ctx); 4225 } 4226 4227 /* mtcrf mtocrf*/ 4228 static void gen_mtcrf(DisasContext *ctx) 4229 { 4230 uint32_t crm, crn; 4231 4232 crm = CRM(ctx->opcode); 4233 if (likely((ctx->opcode & 0x00100000))) { 4234 if (crm && ((crm & (crm - 1)) == 0)) { 4235 TCGv_i32 temp = tcg_temp_new_i32(); 4236 crn = ctz32 (crm); 4237 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4238 tcg_gen_shri_i32(temp, temp, crn * 4); 4239 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4240 tcg_temp_free_i32(temp); 4241 } 4242 } else { 4243 TCGv_i32 temp = tcg_temp_new_i32(); 4244 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4245 for (crn = 0 ; crn < 8 ; crn++) { 4246 if (crm & (1 << crn)) { 4247 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4248 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4249 } 4250 } 4251 tcg_temp_free_i32(temp); 4252 } 4253 } 4254 4255 /* mtmsr */ 4256 #if defined(TARGET_PPC64) 4257 static void gen_mtmsrd(DisasContext *ctx) 4258 { 4259 CHK_SV; 4260 4261 #if !defined(CONFIG_USER_ONLY) 4262 if (ctx->opcode & 0x00010000) { 4263 /* Special form that does not need any synchronisation */ 4264 TCGv t0 = tcg_temp_new(); 4265 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4266 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4267 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4268 tcg_temp_free(t0); 4269 } else { 4270 /* XXX: we need to update nip before the store 4271 * if we enter power saving mode, we will exit the loop 4272 * directly from ppc_store_msr 4273 */ 4274 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4275 gen_io_start(); 4276 } 4277 gen_update_nip(ctx, ctx->base.pc_next); 4278 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4279 /* Must stop the translation as machine state (may have) changed */ 4280 /* Note that mtmsr is not always defined as context-synchronizing */ 4281 gen_stop_exception(ctx); 4282 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4283 gen_io_end(); 4284 } 4285 } 4286 #endif /* !defined(CONFIG_USER_ONLY) */ 4287 } 4288 #endif /* defined(TARGET_PPC64) */ 4289 4290 static void gen_mtmsr(DisasContext *ctx) 4291 { 4292 CHK_SV; 4293 4294 #if !defined(CONFIG_USER_ONLY) 4295 if (ctx->opcode & 0x00010000) { 4296 /* Special form that does not need any synchronisation */ 4297 TCGv t0 = tcg_temp_new(); 4298 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4299 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4300 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4301 tcg_temp_free(t0); 4302 } else { 4303 TCGv msr = tcg_temp_new(); 4304 4305 /* XXX: we need to update nip before the store 4306 * if we enter power saving mode, we will exit the loop 4307 * directly from ppc_store_msr 4308 */ 4309 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4310 gen_io_start(); 4311 } 4312 gen_update_nip(ctx, ctx->base.pc_next); 4313 #if defined(TARGET_PPC64) 4314 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4315 #else 4316 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4317 #endif 4318 gen_helper_store_msr(cpu_env, msr); 4319 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4320 gen_io_end(); 4321 } 4322 tcg_temp_free(msr); 4323 /* Must stop the translation as machine state (may have) changed */ 4324 /* Note that mtmsr is not always defined as context-synchronizing */ 4325 gen_stop_exception(ctx); 4326 } 4327 #endif 4328 } 4329 4330 /* mtspr */ 4331 static void gen_mtspr(DisasContext *ctx) 4332 { 4333 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4334 uint32_t sprn = SPR(ctx->opcode); 4335 4336 #if defined(CONFIG_USER_ONLY) 4337 write_cb = ctx->spr_cb[sprn].uea_write; 4338 #else 4339 if (ctx->pr) { 4340 write_cb = ctx->spr_cb[sprn].uea_write; 4341 } else if (ctx->hv) { 4342 write_cb = ctx->spr_cb[sprn].hea_write; 4343 } else { 4344 write_cb = ctx->spr_cb[sprn].oea_write; 4345 } 4346 #endif 4347 if (likely(write_cb != NULL)) { 4348 if (likely(write_cb != SPR_NOACCESS)) { 4349 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4350 } else { 4351 /* Privilege exception */ 4352 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 4353 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4354 ctx->base.pc_next - 4); 4355 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4356 } 4357 } else { 4358 /* ISA 2.07 defines these as no-ops */ 4359 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4360 (sprn >= 808 && sprn <= 811)) { 4361 /* This is a nop */ 4362 return; 4363 } 4364 4365 /* Not defined */ 4366 qemu_log_mask(LOG_GUEST_ERROR, 4367 "Trying to write invalid spr %d (0x%03x) at " 4368 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4369 4370 4371 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4372 * it can generate a priv, a hv emu or a no-op 4373 */ 4374 if (sprn & 0x10) { 4375 if (ctx->pr) { 4376 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4377 } 4378 } else { 4379 if (ctx->pr || sprn == 0) { 4380 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4381 } 4382 } 4383 } 4384 } 4385 4386 #if defined(TARGET_PPC64) 4387 /* setb */ 4388 static void gen_setb(DisasContext *ctx) 4389 { 4390 TCGv_i32 t0 = tcg_temp_new_i32(); 4391 TCGv_i32 t8 = tcg_temp_new_i32(); 4392 TCGv_i32 tm1 = tcg_temp_new_i32(); 4393 int crf = crfS(ctx->opcode); 4394 4395 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4396 tcg_gen_movi_i32(t8, 8); 4397 tcg_gen_movi_i32(tm1, -1); 4398 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4399 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4400 4401 tcg_temp_free_i32(t0); 4402 tcg_temp_free_i32(t8); 4403 tcg_temp_free_i32(tm1); 4404 } 4405 #endif 4406 4407 /*** Cache management ***/ 4408 4409 /* dcbf */ 4410 static void gen_dcbf(DisasContext *ctx) 4411 { 4412 /* XXX: specification says this is treated as a load by the MMU */ 4413 TCGv t0; 4414 gen_set_access_type(ctx, ACCESS_CACHE); 4415 t0 = tcg_temp_new(); 4416 gen_addr_reg_index(ctx, t0); 4417 gen_qemu_ld8u(ctx, t0, t0); 4418 tcg_temp_free(t0); 4419 } 4420 4421 /* dcbfep (external PID dcbf) */ 4422 static void gen_dcbfep(DisasContext *ctx) 4423 { 4424 /* XXX: specification says this is treated as a load by the MMU */ 4425 TCGv t0; 4426 CHK_SV; 4427 gen_set_access_type(ctx, ACCESS_CACHE); 4428 t0 = tcg_temp_new(); 4429 gen_addr_reg_index(ctx, t0); 4430 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4431 tcg_temp_free(t0); 4432 } 4433 4434 /* dcbi (Supervisor only) */ 4435 static void gen_dcbi(DisasContext *ctx) 4436 { 4437 #if defined(CONFIG_USER_ONLY) 4438 GEN_PRIV; 4439 #else 4440 TCGv EA, val; 4441 4442 CHK_SV; 4443 EA = tcg_temp_new(); 4444 gen_set_access_type(ctx, ACCESS_CACHE); 4445 gen_addr_reg_index(ctx, EA); 4446 val = tcg_temp_new(); 4447 /* XXX: specification says this should be treated as a store by the MMU */ 4448 gen_qemu_ld8u(ctx, val, EA); 4449 gen_qemu_st8(ctx, val, EA); 4450 tcg_temp_free(val); 4451 tcg_temp_free(EA); 4452 #endif /* defined(CONFIG_USER_ONLY) */ 4453 } 4454 4455 /* dcdst */ 4456 static void gen_dcbst(DisasContext *ctx) 4457 { 4458 /* XXX: specification say this is treated as a load by the MMU */ 4459 TCGv t0; 4460 gen_set_access_type(ctx, ACCESS_CACHE); 4461 t0 = tcg_temp_new(); 4462 gen_addr_reg_index(ctx, t0); 4463 gen_qemu_ld8u(ctx, t0, t0); 4464 tcg_temp_free(t0); 4465 } 4466 4467 /* dcbstep (dcbstep External PID version) */ 4468 static void gen_dcbstep(DisasContext *ctx) 4469 { 4470 /* XXX: specification say this is treated as a load by the MMU */ 4471 TCGv t0; 4472 gen_set_access_type(ctx, ACCESS_CACHE); 4473 t0 = tcg_temp_new(); 4474 gen_addr_reg_index(ctx, t0); 4475 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4476 tcg_temp_free(t0); 4477 } 4478 4479 /* dcbt */ 4480 static void gen_dcbt(DisasContext *ctx) 4481 { 4482 /* interpreted as no-op */ 4483 /* XXX: specification say this is treated as a load by the MMU 4484 * but does not generate any exception 4485 */ 4486 } 4487 4488 /* dcbtep */ 4489 static void gen_dcbtep(DisasContext *ctx) 4490 { 4491 /* interpreted as no-op */ 4492 /* XXX: specification say this is treated as a load by the MMU 4493 * but does not generate any exception 4494 */ 4495 } 4496 4497 /* dcbtst */ 4498 static void gen_dcbtst(DisasContext *ctx) 4499 { 4500 /* interpreted as no-op */ 4501 /* XXX: specification say this is treated as a load by the MMU 4502 * but does not generate any exception 4503 */ 4504 } 4505 4506 /* dcbtstep */ 4507 static void gen_dcbtstep(DisasContext *ctx) 4508 { 4509 /* interpreted as no-op */ 4510 /* XXX: specification say this is treated as a load by the MMU 4511 * but does not generate any exception 4512 */ 4513 } 4514 4515 /* dcbtls */ 4516 static void gen_dcbtls(DisasContext *ctx) 4517 { 4518 /* Always fails locking the cache */ 4519 TCGv t0 = tcg_temp_new(); 4520 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4521 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4522 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4523 tcg_temp_free(t0); 4524 } 4525 4526 /* dcbz */ 4527 static void gen_dcbz(DisasContext *ctx) 4528 { 4529 TCGv tcgv_addr; 4530 TCGv_i32 tcgv_op; 4531 4532 gen_set_access_type(ctx, ACCESS_CACHE); 4533 tcgv_addr = tcg_temp_new(); 4534 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4535 gen_addr_reg_index(ctx, tcgv_addr); 4536 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4537 tcg_temp_free(tcgv_addr); 4538 tcg_temp_free_i32(tcgv_op); 4539 } 4540 4541 /* dcbzep */ 4542 static void gen_dcbzep(DisasContext *ctx) 4543 { 4544 TCGv tcgv_addr; 4545 TCGv_i32 tcgv_op; 4546 4547 gen_set_access_type(ctx, ACCESS_CACHE); 4548 tcgv_addr = tcg_temp_new(); 4549 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4550 gen_addr_reg_index(ctx, tcgv_addr); 4551 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 4552 tcg_temp_free(tcgv_addr); 4553 tcg_temp_free_i32(tcgv_op); 4554 } 4555 4556 /* dst / dstt */ 4557 static void gen_dst(DisasContext *ctx) 4558 { 4559 if (rA(ctx->opcode) == 0) { 4560 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4561 } else { 4562 /* interpreted as no-op */ 4563 } 4564 } 4565 4566 /* dstst /dststt */ 4567 static void gen_dstst(DisasContext *ctx) 4568 { 4569 if (rA(ctx->opcode) == 0) { 4570 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4571 } else { 4572 /* interpreted as no-op */ 4573 } 4574 4575 } 4576 4577 /* dss / dssall */ 4578 static void gen_dss(DisasContext *ctx) 4579 { 4580 /* interpreted as no-op */ 4581 } 4582 4583 /* icbi */ 4584 static void gen_icbi(DisasContext *ctx) 4585 { 4586 TCGv t0; 4587 gen_set_access_type(ctx, ACCESS_CACHE); 4588 t0 = tcg_temp_new(); 4589 gen_addr_reg_index(ctx, t0); 4590 gen_helper_icbi(cpu_env, t0); 4591 tcg_temp_free(t0); 4592 } 4593 4594 /* icbiep */ 4595 static void gen_icbiep(DisasContext *ctx) 4596 { 4597 TCGv t0; 4598 gen_set_access_type(ctx, ACCESS_CACHE); 4599 t0 = tcg_temp_new(); 4600 gen_addr_reg_index(ctx, t0); 4601 gen_helper_icbiep(cpu_env, t0); 4602 tcg_temp_free(t0); 4603 } 4604 4605 /* Optional: */ 4606 /* dcba */ 4607 static void gen_dcba(DisasContext *ctx) 4608 { 4609 /* interpreted as no-op */ 4610 /* XXX: specification say this is treated as a store by the MMU 4611 * but does not generate any exception 4612 */ 4613 } 4614 4615 /*** Segment register manipulation ***/ 4616 /* Supervisor only: */ 4617 4618 /* mfsr */ 4619 static void gen_mfsr(DisasContext *ctx) 4620 { 4621 #if defined(CONFIG_USER_ONLY) 4622 GEN_PRIV; 4623 #else 4624 TCGv t0; 4625 4626 CHK_SV; 4627 t0 = tcg_const_tl(SR(ctx->opcode)); 4628 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4629 tcg_temp_free(t0); 4630 #endif /* defined(CONFIG_USER_ONLY) */ 4631 } 4632 4633 /* mfsrin */ 4634 static void gen_mfsrin(DisasContext *ctx) 4635 { 4636 #if defined(CONFIG_USER_ONLY) 4637 GEN_PRIV; 4638 #else 4639 TCGv t0; 4640 4641 CHK_SV; 4642 t0 = tcg_temp_new(); 4643 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4644 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4645 tcg_temp_free(t0); 4646 #endif /* defined(CONFIG_USER_ONLY) */ 4647 } 4648 4649 /* mtsr */ 4650 static void gen_mtsr(DisasContext *ctx) 4651 { 4652 #if defined(CONFIG_USER_ONLY) 4653 GEN_PRIV; 4654 #else 4655 TCGv t0; 4656 4657 CHK_SV; 4658 t0 = tcg_const_tl(SR(ctx->opcode)); 4659 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4660 tcg_temp_free(t0); 4661 #endif /* defined(CONFIG_USER_ONLY) */ 4662 } 4663 4664 /* mtsrin */ 4665 static void gen_mtsrin(DisasContext *ctx) 4666 { 4667 #if defined(CONFIG_USER_ONLY) 4668 GEN_PRIV; 4669 #else 4670 TCGv t0; 4671 CHK_SV; 4672 4673 t0 = tcg_temp_new(); 4674 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4675 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4676 tcg_temp_free(t0); 4677 #endif /* defined(CONFIG_USER_ONLY) */ 4678 } 4679 4680 #if defined(TARGET_PPC64) 4681 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4682 4683 /* mfsr */ 4684 static void gen_mfsr_64b(DisasContext *ctx) 4685 { 4686 #if defined(CONFIG_USER_ONLY) 4687 GEN_PRIV; 4688 #else 4689 TCGv t0; 4690 4691 CHK_SV; 4692 t0 = tcg_const_tl(SR(ctx->opcode)); 4693 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4694 tcg_temp_free(t0); 4695 #endif /* defined(CONFIG_USER_ONLY) */ 4696 } 4697 4698 /* mfsrin */ 4699 static void gen_mfsrin_64b(DisasContext *ctx) 4700 { 4701 #if defined(CONFIG_USER_ONLY) 4702 GEN_PRIV; 4703 #else 4704 TCGv t0; 4705 4706 CHK_SV; 4707 t0 = tcg_temp_new(); 4708 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4709 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4710 tcg_temp_free(t0); 4711 #endif /* defined(CONFIG_USER_ONLY) */ 4712 } 4713 4714 /* mtsr */ 4715 static void gen_mtsr_64b(DisasContext *ctx) 4716 { 4717 #if defined(CONFIG_USER_ONLY) 4718 GEN_PRIV; 4719 #else 4720 TCGv t0; 4721 4722 CHK_SV; 4723 t0 = tcg_const_tl(SR(ctx->opcode)); 4724 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4725 tcg_temp_free(t0); 4726 #endif /* defined(CONFIG_USER_ONLY) */ 4727 } 4728 4729 /* mtsrin */ 4730 static void gen_mtsrin_64b(DisasContext *ctx) 4731 { 4732 #if defined(CONFIG_USER_ONLY) 4733 GEN_PRIV; 4734 #else 4735 TCGv t0; 4736 4737 CHK_SV; 4738 t0 = tcg_temp_new(); 4739 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4740 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4741 tcg_temp_free(t0); 4742 #endif /* defined(CONFIG_USER_ONLY) */ 4743 } 4744 4745 /* slbmte */ 4746 static void gen_slbmte(DisasContext *ctx) 4747 { 4748 #if defined(CONFIG_USER_ONLY) 4749 GEN_PRIV; 4750 #else 4751 CHK_SV; 4752 4753 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4754 cpu_gpr[rS(ctx->opcode)]); 4755 #endif /* defined(CONFIG_USER_ONLY) */ 4756 } 4757 4758 static void gen_slbmfee(DisasContext *ctx) 4759 { 4760 #if defined(CONFIG_USER_ONLY) 4761 GEN_PRIV; 4762 #else 4763 CHK_SV; 4764 4765 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4766 cpu_gpr[rB(ctx->opcode)]); 4767 #endif /* defined(CONFIG_USER_ONLY) */ 4768 } 4769 4770 static void gen_slbmfev(DisasContext *ctx) 4771 { 4772 #if defined(CONFIG_USER_ONLY) 4773 GEN_PRIV; 4774 #else 4775 CHK_SV; 4776 4777 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4778 cpu_gpr[rB(ctx->opcode)]); 4779 #endif /* defined(CONFIG_USER_ONLY) */ 4780 } 4781 4782 static void gen_slbfee_(DisasContext *ctx) 4783 { 4784 #if defined(CONFIG_USER_ONLY) 4785 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4786 #else 4787 TCGLabel *l1, *l2; 4788 4789 if (unlikely(ctx->pr)) { 4790 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4791 return; 4792 } 4793 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4794 cpu_gpr[rB(ctx->opcode)]); 4795 l1 = gen_new_label(); 4796 l2 = gen_new_label(); 4797 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4798 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4799 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4800 tcg_gen_br(l2); 4801 gen_set_label(l1); 4802 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4803 gen_set_label(l2); 4804 #endif 4805 } 4806 #endif /* defined(TARGET_PPC64) */ 4807 4808 /*** Lookaside buffer management ***/ 4809 /* Optional & supervisor only: */ 4810 4811 /* tlbia */ 4812 static void gen_tlbia(DisasContext *ctx) 4813 { 4814 #if defined(CONFIG_USER_ONLY) 4815 GEN_PRIV; 4816 #else 4817 CHK_HV; 4818 4819 gen_helper_tlbia(cpu_env); 4820 #endif /* defined(CONFIG_USER_ONLY) */ 4821 } 4822 4823 /* tlbiel */ 4824 static void gen_tlbiel(DisasContext *ctx) 4825 { 4826 #if defined(CONFIG_USER_ONLY) 4827 GEN_PRIV; 4828 #else 4829 CHK_SV; 4830 4831 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4832 #endif /* defined(CONFIG_USER_ONLY) */ 4833 } 4834 4835 /* tlbie */ 4836 static void gen_tlbie(DisasContext *ctx) 4837 { 4838 #if defined(CONFIG_USER_ONLY) 4839 GEN_PRIV; 4840 #else 4841 TCGv_i32 t1; 4842 4843 if (ctx->gtse) { 4844 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */ 4845 } else { 4846 CHK_HV; /* Else hypervisor privileged */ 4847 } 4848 4849 if (NARROW_MODE(ctx)) { 4850 TCGv t0 = tcg_temp_new(); 4851 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4852 gen_helper_tlbie(cpu_env, t0); 4853 tcg_temp_free(t0); 4854 } else { 4855 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4856 } 4857 t1 = tcg_temp_new_i32(); 4858 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4859 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4860 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4861 tcg_temp_free_i32(t1); 4862 #endif /* defined(CONFIG_USER_ONLY) */ 4863 } 4864 4865 /* tlbsync */ 4866 static void gen_tlbsync(DisasContext *ctx) 4867 { 4868 #if defined(CONFIG_USER_ONLY) 4869 GEN_PRIV; 4870 #else 4871 4872 if (ctx->gtse) { 4873 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 4874 } else { 4875 CHK_HV; /* Else hypervisor privileged */ 4876 } 4877 4878 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4879 if (ctx->insns_flags & PPC_BOOKE) { 4880 gen_check_tlb_flush(ctx, true); 4881 } 4882 #endif /* defined(CONFIG_USER_ONLY) */ 4883 } 4884 4885 #if defined(TARGET_PPC64) 4886 /* slbia */ 4887 static void gen_slbia(DisasContext *ctx) 4888 { 4889 #if defined(CONFIG_USER_ONLY) 4890 GEN_PRIV; 4891 #else 4892 CHK_SV; 4893 4894 gen_helper_slbia(cpu_env); 4895 #endif /* defined(CONFIG_USER_ONLY) */ 4896 } 4897 4898 /* slbie */ 4899 static void gen_slbie(DisasContext *ctx) 4900 { 4901 #if defined(CONFIG_USER_ONLY) 4902 GEN_PRIV; 4903 #else 4904 CHK_SV; 4905 4906 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4907 #endif /* defined(CONFIG_USER_ONLY) */ 4908 } 4909 4910 /* slbieg */ 4911 static void gen_slbieg(DisasContext *ctx) 4912 { 4913 #if defined(CONFIG_USER_ONLY) 4914 GEN_PRIV; 4915 #else 4916 CHK_SV; 4917 4918 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4919 #endif /* defined(CONFIG_USER_ONLY) */ 4920 } 4921 4922 /* slbsync */ 4923 static void gen_slbsync(DisasContext *ctx) 4924 { 4925 #if defined(CONFIG_USER_ONLY) 4926 GEN_PRIV; 4927 #else 4928 CHK_SV; 4929 gen_check_tlb_flush(ctx, true); 4930 #endif /* defined(CONFIG_USER_ONLY) */ 4931 } 4932 4933 #endif /* defined(TARGET_PPC64) */ 4934 4935 /*** External control ***/ 4936 /* Optional: */ 4937 4938 /* eciwx */ 4939 static void gen_eciwx(DisasContext *ctx) 4940 { 4941 TCGv t0; 4942 /* Should check EAR[E] ! */ 4943 gen_set_access_type(ctx, ACCESS_EXT); 4944 t0 = tcg_temp_new(); 4945 gen_addr_reg_index(ctx, t0); 4946 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4947 DEF_MEMOP(MO_UL | MO_ALIGN)); 4948 tcg_temp_free(t0); 4949 } 4950 4951 /* ecowx */ 4952 static void gen_ecowx(DisasContext *ctx) 4953 { 4954 TCGv t0; 4955 /* Should check EAR[E] ! */ 4956 gen_set_access_type(ctx, ACCESS_EXT); 4957 t0 = tcg_temp_new(); 4958 gen_addr_reg_index(ctx, t0); 4959 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4960 DEF_MEMOP(MO_UL | MO_ALIGN)); 4961 tcg_temp_free(t0); 4962 } 4963 4964 /* PowerPC 601 specific instructions */ 4965 4966 /* abs - abs. */ 4967 static void gen_abs(DisasContext *ctx) 4968 { 4969 TCGLabel *l1 = gen_new_label(); 4970 TCGLabel *l2 = gen_new_label(); 4971 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 4972 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4973 tcg_gen_br(l2); 4974 gen_set_label(l1); 4975 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4976 gen_set_label(l2); 4977 if (unlikely(Rc(ctx->opcode) != 0)) 4978 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4979 } 4980 4981 /* abso - abso. */ 4982 static void gen_abso(DisasContext *ctx) 4983 { 4984 TCGLabel *l1 = gen_new_label(); 4985 TCGLabel *l2 = gen_new_label(); 4986 TCGLabel *l3 = gen_new_label(); 4987 /* Start with XER OV disabled, the most likely case */ 4988 tcg_gen_movi_tl(cpu_ov, 0); 4989 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 4990 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 4991 tcg_gen_movi_tl(cpu_ov, 1); 4992 tcg_gen_movi_tl(cpu_so, 1); 4993 tcg_gen_br(l2); 4994 gen_set_label(l1); 4995 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4996 tcg_gen_br(l3); 4997 gen_set_label(l2); 4998 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4999 gen_set_label(l3); 5000 if (unlikely(Rc(ctx->opcode) != 0)) 5001 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5002 } 5003 5004 /* clcs */ 5005 static void gen_clcs(DisasContext *ctx) 5006 { 5007 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5008 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5009 tcg_temp_free_i32(t0); 5010 /* Rc=1 sets CR0 to an undefined state */ 5011 } 5012 5013 /* div - div. */ 5014 static void gen_div(DisasContext *ctx) 5015 { 5016 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5017 cpu_gpr[rB(ctx->opcode)]); 5018 if (unlikely(Rc(ctx->opcode) != 0)) 5019 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5020 } 5021 5022 /* divo - divo. */ 5023 static void gen_divo(DisasContext *ctx) 5024 { 5025 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5026 cpu_gpr[rB(ctx->opcode)]); 5027 if (unlikely(Rc(ctx->opcode) != 0)) 5028 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5029 } 5030 5031 /* divs - divs. */ 5032 static void gen_divs(DisasContext *ctx) 5033 { 5034 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5035 cpu_gpr[rB(ctx->opcode)]); 5036 if (unlikely(Rc(ctx->opcode) != 0)) 5037 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5038 } 5039 5040 /* divso - divso. */ 5041 static void gen_divso(DisasContext *ctx) 5042 { 5043 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5044 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5045 if (unlikely(Rc(ctx->opcode) != 0)) 5046 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5047 } 5048 5049 /* doz - doz. */ 5050 static void gen_doz(DisasContext *ctx) 5051 { 5052 TCGLabel *l1 = gen_new_label(); 5053 TCGLabel *l2 = gen_new_label(); 5054 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5055 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5056 tcg_gen_br(l2); 5057 gen_set_label(l1); 5058 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5059 gen_set_label(l2); 5060 if (unlikely(Rc(ctx->opcode) != 0)) 5061 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5062 } 5063 5064 /* dozo - dozo. */ 5065 static void gen_dozo(DisasContext *ctx) 5066 { 5067 TCGLabel *l1 = gen_new_label(); 5068 TCGLabel *l2 = gen_new_label(); 5069 TCGv t0 = tcg_temp_new(); 5070 TCGv t1 = tcg_temp_new(); 5071 TCGv t2 = tcg_temp_new(); 5072 /* Start with XER OV disabled, the most likely case */ 5073 tcg_gen_movi_tl(cpu_ov, 0); 5074 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5075 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5076 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5077 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5078 tcg_gen_andc_tl(t1, t1, t2); 5079 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5080 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5081 tcg_gen_movi_tl(cpu_ov, 1); 5082 tcg_gen_movi_tl(cpu_so, 1); 5083 tcg_gen_br(l2); 5084 gen_set_label(l1); 5085 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5086 gen_set_label(l2); 5087 tcg_temp_free(t0); 5088 tcg_temp_free(t1); 5089 tcg_temp_free(t2); 5090 if (unlikely(Rc(ctx->opcode) != 0)) 5091 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5092 } 5093 5094 /* dozi */ 5095 static void gen_dozi(DisasContext *ctx) 5096 { 5097 target_long simm = SIMM(ctx->opcode); 5098 TCGLabel *l1 = gen_new_label(); 5099 TCGLabel *l2 = gen_new_label(); 5100 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5101 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5102 tcg_gen_br(l2); 5103 gen_set_label(l1); 5104 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5105 gen_set_label(l2); 5106 if (unlikely(Rc(ctx->opcode) != 0)) 5107 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5108 } 5109 5110 /* lscbx - lscbx. */ 5111 static void gen_lscbx(DisasContext *ctx) 5112 { 5113 TCGv t0 = tcg_temp_new(); 5114 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5115 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5116 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5117 5118 gen_addr_reg_index(ctx, t0); 5119 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5120 tcg_temp_free_i32(t1); 5121 tcg_temp_free_i32(t2); 5122 tcg_temp_free_i32(t3); 5123 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5124 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5125 if (unlikely(Rc(ctx->opcode) != 0)) 5126 gen_set_Rc0(ctx, t0); 5127 tcg_temp_free(t0); 5128 } 5129 5130 /* maskg - maskg. */ 5131 static void gen_maskg(DisasContext *ctx) 5132 { 5133 TCGLabel *l1 = gen_new_label(); 5134 TCGv t0 = tcg_temp_new(); 5135 TCGv t1 = tcg_temp_new(); 5136 TCGv t2 = tcg_temp_new(); 5137 TCGv t3 = tcg_temp_new(); 5138 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5139 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5140 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5141 tcg_gen_addi_tl(t2, t0, 1); 5142 tcg_gen_shr_tl(t2, t3, t2); 5143 tcg_gen_shr_tl(t3, t3, t1); 5144 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5145 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5146 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5147 gen_set_label(l1); 5148 tcg_temp_free(t0); 5149 tcg_temp_free(t1); 5150 tcg_temp_free(t2); 5151 tcg_temp_free(t3); 5152 if (unlikely(Rc(ctx->opcode) != 0)) 5153 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5154 } 5155 5156 /* maskir - maskir. */ 5157 static void gen_maskir(DisasContext *ctx) 5158 { 5159 TCGv t0 = tcg_temp_new(); 5160 TCGv t1 = tcg_temp_new(); 5161 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5162 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5163 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5164 tcg_temp_free(t0); 5165 tcg_temp_free(t1); 5166 if (unlikely(Rc(ctx->opcode) != 0)) 5167 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5168 } 5169 5170 /* mul - mul. */ 5171 static void gen_mul(DisasContext *ctx) 5172 { 5173 TCGv_i64 t0 = tcg_temp_new_i64(); 5174 TCGv_i64 t1 = tcg_temp_new_i64(); 5175 TCGv t2 = tcg_temp_new(); 5176 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5177 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5178 tcg_gen_mul_i64(t0, t0, t1); 5179 tcg_gen_trunc_i64_tl(t2, t0); 5180 gen_store_spr(SPR_MQ, t2); 5181 tcg_gen_shri_i64(t1, t0, 32); 5182 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5183 tcg_temp_free_i64(t0); 5184 tcg_temp_free_i64(t1); 5185 tcg_temp_free(t2); 5186 if (unlikely(Rc(ctx->opcode) != 0)) 5187 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5188 } 5189 5190 /* mulo - mulo. */ 5191 static void gen_mulo(DisasContext *ctx) 5192 { 5193 TCGLabel *l1 = gen_new_label(); 5194 TCGv_i64 t0 = tcg_temp_new_i64(); 5195 TCGv_i64 t1 = tcg_temp_new_i64(); 5196 TCGv t2 = tcg_temp_new(); 5197 /* Start with XER OV disabled, the most likely case */ 5198 tcg_gen_movi_tl(cpu_ov, 0); 5199 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5200 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5201 tcg_gen_mul_i64(t0, t0, t1); 5202 tcg_gen_trunc_i64_tl(t2, t0); 5203 gen_store_spr(SPR_MQ, t2); 5204 tcg_gen_shri_i64(t1, t0, 32); 5205 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5206 tcg_gen_ext32s_i64(t1, t0); 5207 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5208 tcg_gen_movi_tl(cpu_ov, 1); 5209 tcg_gen_movi_tl(cpu_so, 1); 5210 gen_set_label(l1); 5211 tcg_temp_free_i64(t0); 5212 tcg_temp_free_i64(t1); 5213 tcg_temp_free(t2); 5214 if (unlikely(Rc(ctx->opcode) != 0)) 5215 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5216 } 5217 5218 /* nabs - nabs. */ 5219 static void gen_nabs(DisasContext *ctx) 5220 { 5221 TCGLabel *l1 = gen_new_label(); 5222 TCGLabel *l2 = gen_new_label(); 5223 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5224 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5225 tcg_gen_br(l2); 5226 gen_set_label(l1); 5227 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5228 gen_set_label(l2); 5229 if (unlikely(Rc(ctx->opcode) != 0)) 5230 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5231 } 5232 5233 /* nabso - nabso. */ 5234 static void gen_nabso(DisasContext *ctx) 5235 { 5236 TCGLabel *l1 = gen_new_label(); 5237 TCGLabel *l2 = gen_new_label(); 5238 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5239 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5240 tcg_gen_br(l2); 5241 gen_set_label(l1); 5242 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5243 gen_set_label(l2); 5244 /* nabs never overflows */ 5245 tcg_gen_movi_tl(cpu_ov, 0); 5246 if (unlikely(Rc(ctx->opcode) != 0)) 5247 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5248 } 5249 5250 /* rlmi - rlmi. */ 5251 static void gen_rlmi(DisasContext *ctx) 5252 { 5253 uint32_t mb = MB(ctx->opcode); 5254 uint32_t me = ME(ctx->opcode); 5255 TCGv t0 = tcg_temp_new(); 5256 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5257 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5258 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5259 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 5260 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5261 tcg_temp_free(t0); 5262 if (unlikely(Rc(ctx->opcode) != 0)) 5263 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5264 } 5265 5266 /* rrib - rrib. */ 5267 static void gen_rrib(DisasContext *ctx) 5268 { 5269 TCGv t0 = tcg_temp_new(); 5270 TCGv t1 = tcg_temp_new(); 5271 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5272 tcg_gen_movi_tl(t1, 0x80000000); 5273 tcg_gen_shr_tl(t1, t1, t0); 5274 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5275 tcg_gen_and_tl(t0, t0, t1); 5276 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5277 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5278 tcg_temp_free(t0); 5279 tcg_temp_free(t1); 5280 if (unlikely(Rc(ctx->opcode) != 0)) 5281 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5282 } 5283 5284 /* sle - sle. */ 5285 static void gen_sle(DisasContext *ctx) 5286 { 5287 TCGv t0 = tcg_temp_new(); 5288 TCGv t1 = tcg_temp_new(); 5289 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5290 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5291 tcg_gen_subfi_tl(t1, 32, t1); 5292 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5293 tcg_gen_or_tl(t1, t0, t1); 5294 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5295 gen_store_spr(SPR_MQ, t1); 5296 tcg_temp_free(t0); 5297 tcg_temp_free(t1); 5298 if (unlikely(Rc(ctx->opcode) != 0)) 5299 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5300 } 5301 5302 /* sleq - sleq. */ 5303 static void gen_sleq(DisasContext *ctx) 5304 { 5305 TCGv t0 = tcg_temp_new(); 5306 TCGv t1 = tcg_temp_new(); 5307 TCGv t2 = tcg_temp_new(); 5308 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5309 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 5310 tcg_gen_shl_tl(t2, t2, t0); 5311 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5312 gen_load_spr(t1, SPR_MQ); 5313 gen_store_spr(SPR_MQ, t0); 5314 tcg_gen_and_tl(t0, t0, t2); 5315 tcg_gen_andc_tl(t1, t1, t2); 5316 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5317 tcg_temp_free(t0); 5318 tcg_temp_free(t1); 5319 tcg_temp_free(t2); 5320 if (unlikely(Rc(ctx->opcode) != 0)) 5321 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5322 } 5323 5324 /* sliq - sliq. */ 5325 static void gen_sliq(DisasContext *ctx) 5326 { 5327 int sh = SH(ctx->opcode); 5328 TCGv t0 = tcg_temp_new(); 5329 TCGv t1 = tcg_temp_new(); 5330 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5331 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5332 tcg_gen_or_tl(t1, t0, t1); 5333 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5334 gen_store_spr(SPR_MQ, t1); 5335 tcg_temp_free(t0); 5336 tcg_temp_free(t1); 5337 if (unlikely(Rc(ctx->opcode) != 0)) 5338 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5339 } 5340 5341 /* slliq - slliq. */ 5342 static void gen_slliq(DisasContext *ctx) 5343 { 5344 int sh = SH(ctx->opcode); 5345 TCGv t0 = tcg_temp_new(); 5346 TCGv t1 = tcg_temp_new(); 5347 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5348 gen_load_spr(t1, SPR_MQ); 5349 gen_store_spr(SPR_MQ, t0); 5350 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 5351 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 5352 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5353 tcg_temp_free(t0); 5354 tcg_temp_free(t1); 5355 if (unlikely(Rc(ctx->opcode) != 0)) 5356 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5357 } 5358 5359 /* sllq - sllq. */ 5360 static void gen_sllq(DisasContext *ctx) 5361 { 5362 TCGLabel *l1 = gen_new_label(); 5363 TCGLabel *l2 = gen_new_label(); 5364 TCGv t0 = tcg_temp_local_new(); 5365 TCGv t1 = tcg_temp_local_new(); 5366 TCGv t2 = tcg_temp_local_new(); 5367 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5368 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5369 tcg_gen_shl_tl(t1, t1, t2); 5370 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5371 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5372 gen_load_spr(t0, SPR_MQ); 5373 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5374 tcg_gen_br(l2); 5375 gen_set_label(l1); 5376 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5377 gen_load_spr(t2, SPR_MQ); 5378 tcg_gen_andc_tl(t1, t2, t1); 5379 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5380 gen_set_label(l2); 5381 tcg_temp_free(t0); 5382 tcg_temp_free(t1); 5383 tcg_temp_free(t2); 5384 if (unlikely(Rc(ctx->opcode) != 0)) 5385 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5386 } 5387 5388 /* slq - slq. */ 5389 static void gen_slq(DisasContext *ctx) 5390 { 5391 TCGLabel *l1 = gen_new_label(); 5392 TCGv t0 = tcg_temp_new(); 5393 TCGv t1 = tcg_temp_new(); 5394 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5395 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5396 tcg_gen_subfi_tl(t1, 32, t1); 5397 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5398 tcg_gen_or_tl(t1, t0, t1); 5399 gen_store_spr(SPR_MQ, t1); 5400 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5401 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5402 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5403 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5404 gen_set_label(l1); 5405 tcg_temp_free(t0); 5406 tcg_temp_free(t1); 5407 if (unlikely(Rc(ctx->opcode) != 0)) 5408 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5409 } 5410 5411 /* sraiq - sraiq. */ 5412 static void gen_sraiq(DisasContext *ctx) 5413 { 5414 int sh = SH(ctx->opcode); 5415 TCGLabel *l1 = gen_new_label(); 5416 TCGv t0 = tcg_temp_new(); 5417 TCGv t1 = tcg_temp_new(); 5418 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5419 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5420 tcg_gen_or_tl(t0, t0, t1); 5421 gen_store_spr(SPR_MQ, t0); 5422 tcg_gen_movi_tl(cpu_ca, 0); 5423 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5424 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5425 tcg_gen_movi_tl(cpu_ca, 1); 5426 gen_set_label(l1); 5427 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5428 tcg_temp_free(t0); 5429 tcg_temp_free(t1); 5430 if (unlikely(Rc(ctx->opcode) != 0)) 5431 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5432 } 5433 5434 /* sraq - sraq. */ 5435 static void gen_sraq(DisasContext *ctx) 5436 { 5437 TCGLabel *l1 = gen_new_label(); 5438 TCGLabel *l2 = gen_new_label(); 5439 TCGv t0 = tcg_temp_new(); 5440 TCGv t1 = tcg_temp_local_new(); 5441 TCGv t2 = tcg_temp_local_new(); 5442 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5443 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5444 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5445 tcg_gen_subfi_tl(t2, 32, t2); 5446 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5447 tcg_gen_or_tl(t0, t0, t2); 5448 gen_store_spr(SPR_MQ, t0); 5449 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5450 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5451 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5452 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5453 gen_set_label(l1); 5454 tcg_temp_free(t0); 5455 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5456 tcg_gen_movi_tl(cpu_ca, 0); 5457 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5458 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5459 tcg_gen_movi_tl(cpu_ca, 1); 5460 gen_set_label(l2); 5461 tcg_temp_free(t1); 5462 tcg_temp_free(t2); 5463 if (unlikely(Rc(ctx->opcode) != 0)) 5464 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5465 } 5466 5467 /* sre - sre. */ 5468 static void gen_sre(DisasContext *ctx) 5469 { 5470 TCGv t0 = tcg_temp_new(); 5471 TCGv t1 = tcg_temp_new(); 5472 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5473 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5474 tcg_gen_subfi_tl(t1, 32, t1); 5475 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5476 tcg_gen_or_tl(t1, t0, t1); 5477 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5478 gen_store_spr(SPR_MQ, t1); 5479 tcg_temp_free(t0); 5480 tcg_temp_free(t1); 5481 if (unlikely(Rc(ctx->opcode) != 0)) 5482 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5483 } 5484 5485 /* srea - srea. */ 5486 static void gen_srea(DisasContext *ctx) 5487 { 5488 TCGv t0 = tcg_temp_new(); 5489 TCGv t1 = tcg_temp_new(); 5490 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5491 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5492 gen_store_spr(SPR_MQ, t0); 5493 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5494 tcg_temp_free(t0); 5495 tcg_temp_free(t1); 5496 if (unlikely(Rc(ctx->opcode) != 0)) 5497 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5498 } 5499 5500 /* sreq */ 5501 static void gen_sreq(DisasContext *ctx) 5502 { 5503 TCGv t0 = tcg_temp_new(); 5504 TCGv t1 = tcg_temp_new(); 5505 TCGv t2 = tcg_temp_new(); 5506 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5507 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5508 tcg_gen_shr_tl(t1, t1, t0); 5509 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5510 gen_load_spr(t2, SPR_MQ); 5511 gen_store_spr(SPR_MQ, t0); 5512 tcg_gen_and_tl(t0, t0, t1); 5513 tcg_gen_andc_tl(t2, t2, t1); 5514 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5515 tcg_temp_free(t0); 5516 tcg_temp_free(t1); 5517 tcg_temp_free(t2); 5518 if (unlikely(Rc(ctx->opcode) != 0)) 5519 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5520 } 5521 5522 /* sriq */ 5523 static void gen_sriq(DisasContext *ctx) 5524 { 5525 int sh = SH(ctx->opcode); 5526 TCGv t0 = tcg_temp_new(); 5527 TCGv t1 = tcg_temp_new(); 5528 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5529 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5530 tcg_gen_or_tl(t1, t0, t1); 5531 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5532 gen_store_spr(SPR_MQ, t1); 5533 tcg_temp_free(t0); 5534 tcg_temp_free(t1); 5535 if (unlikely(Rc(ctx->opcode) != 0)) 5536 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5537 } 5538 5539 /* srliq */ 5540 static void gen_srliq(DisasContext *ctx) 5541 { 5542 int sh = SH(ctx->opcode); 5543 TCGv t0 = tcg_temp_new(); 5544 TCGv t1 = tcg_temp_new(); 5545 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5546 gen_load_spr(t1, SPR_MQ); 5547 gen_store_spr(SPR_MQ, t0); 5548 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5549 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5550 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5551 tcg_temp_free(t0); 5552 tcg_temp_free(t1); 5553 if (unlikely(Rc(ctx->opcode) != 0)) 5554 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5555 } 5556 5557 /* srlq */ 5558 static void gen_srlq(DisasContext *ctx) 5559 { 5560 TCGLabel *l1 = gen_new_label(); 5561 TCGLabel *l2 = gen_new_label(); 5562 TCGv t0 = tcg_temp_local_new(); 5563 TCGv t1 = tcg_temp_local_new(); 5564 TCGv t2 = tcg_temp_local_new(); 5565 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5566 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5567 tcg_gen_shr_tl(t2, t1, t2); 5568 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5569 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5570 gen_load_spr(t0, SPR_MQ); 5571 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5572 tcg_gen_br(l2); 5573 gen_set_label(l1); 5574 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5575 tcg_gen_and_tl(t0, t0, t2); 5576 gen_load_spr(t1, SPR_MQ); 5577 tcg_gen_andc_tl(t1, t1, t2); 5578 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5579 gen_set_label(l2); 5580 tcg_temp_free(t0); 5581 tcg_temp_free(t1); 5582 tcg_temp_free(t2); 5583 if (unlikely(Rc(ctx->opcode) != 0)) 5584 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5585 } 5586 5587 /* srq */ 5588 static void gen_srq(DisasContext *ctx) 5589 { 5590 TCGLabel *l1 = gen_new_label(); 5591 TCGv t0 = tcg_temp_new(); 5592 TCGv t1 = tcg_temp_new(); 5593 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5594 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5595 tcg_gen_subfi_tl(t1, 32, t1); 5596 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5597 tcg_gen_or_tl(t1, t0, t1); 5598 gen_store_spr(SPR_MQ, t1); 5599 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5600 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5601 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5602 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5603 gen_set_label(l1); 5604 tcg_temp_free(t0); 5605 tcg_temp_free(t1); 5606 if (unlikely(Rc(ctx->opcode) != 0)) 5607 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5608 } 5609 5610 /* PowerPC 602 specific instructions */ 5611 5612 /* dsa */ 5613 static void gen_dsa(DisasContext *ctx) 5614 { 5615 /* XXX: TODO */ 5616 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5617 } 5618 5619 /* esa */ 5620 static void gen_esa(DisasContext *ctx) 5621 { 5622 /* XXX: TODO */ 5623 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5624 } 5625 5626 /* mfrom */ 5627 static void gen_mfrom(DisasContext *ctx) 5628 { 5629 #if defined(CONFIG_USER_ONLY) 5630 GEN_PRIV; 5631 #else 5632 CHK_SV; 5633 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5634 #endif /* defined(CONFIG_USER_ONLY) */ 5635 } 5636 5637 /* 602 - 603 - G2 TLB management */ 5638 5639 /* tlbld */ 5640 static void gen_tlbld_6xx(DisasContext *ctx) 5641 { 5642 #if defined(CONFIG_USER_ONLY) 5643 GEN_PRIV; 5644 #else 5645 CHK_SV; 5646 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5647 #endif /* defined(CONFIG_USER_ONLY) */ 5648 } 5649 5650 /* tlbli */ 5651 static void gen_tlbli_6xx(DisasContext *ctx) 5652 { 5653 #if defined(CONFIG_USER_ONLY) 5654 GEN_PRIV; 5655 #else 5656 CHK_SV; 5657 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5658 #endif /* defined(CONFIG_USER_ONLY) */ 5659 } 5660 5661 /* 74xx TLB management */ 5662 5663 /* tlbld */ 5664 static void gen_tlbld_74xx(DisasContext *ctx) 5665 { 5666 #if defined(CONFIG_USER_ONLY) 5667 GEN_PRIV; 5668 #else 5669 CHK_SV; 5670 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5671 #endif /* defined(CONFIG_USER_ONLY) */ 5672 } 5673 5674 /* tlbli */ 5675 static void gen_tlbli_74xx(DisasContext *ctx) 5676 { 5677 #if defined(CONFIG_USER_ONLY) 5678 GEN_PRIV; 5679 #else 5680 CHK_SV; 5681 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5682 #endif /* defined(CONFIG_USER_ONLY) */ 5683 } 5684 5685 /* POWER instructions not in PowerPC 601 */ 5686 5687 /* clf */ 5688 static void gen_clf(DisasContext *ctx) 5689 { 5690 /* Cache line flush: implemented as no-op */ 5691 } 5692 5693 /* cli */ 5694 static void gen_cli(DisasContext *ctx) 5695 { 5696 #if defined(CONFIG_USER_ONLY) 5697 GEN_PRIV; 5698 #else 5699 /* Cache line invalidate: privileged and treated as no-op */ 5700 CHK_SV; 5701 #endif /* defined(CONFIG_USER_ONLY) */ 5702 } 5703 5704 /* dclst */ 5705 static void gen_dclst(DisasContext *ctx) 5706 { 5707 /* Data cache line store: treated as no-op */ 5708 } 5709 5710 static void gen_mfsri(DisasContext *ctx) 5711 { 5712 #if defined(CONFIG_USER_ONLY) 5713 GEN_PRIV; 5714 #else 5715 int ra = rA(ctx->opcode); 5716 int rd = rD(ctx->opcode); 5717 TCGv t0; 5718 5719 CHK_SV; 5720 t0 = tcg_temp_new(); 5721 gen_addr_reg_index(ctx, t0); 5722 tcg_gen_extract_tl(t0, t0, 28, 4); 5723 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5724 tcg_temp_free(t0); 5725 if (ra != 0 && ra != rd) 5726 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5727 #endif /* defined(CONFIG_USER_ONLY) */ 5728 } 5729 5730 static void gen_rac(DisasContext *ctx) 5731 { 5732 #if defined(CONFIG_USER_ONLY) 5733 GEN_PRIV; 5734 #else 5735 TCGv t0; 5736 5737 CHK_SV; 5738 t0 = tcg_temp_new(); 5739 gen_addr_reg_index(ctx, t0); 5740 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5741 tcg_temp_free(t0); 5742 #endif /* defined(CONFIG_USER_ONLY) */ 5743 } 5744 5745 static void gen_rfsvc(DisasContext *ctx) 5746 { 5747 #if defined(CONFIG_USER_ONLY) 5748 GEN_PRIV; 5749 #else 5750 CHK_SV; 5751 5752 gen_helper_rfsvc(cpu_env); 5753 gen_sync_exception(ctx); 5754 #endif /* defined(CONFIG_USER_ONLY) */ 5755 } 5756 5757 /* svc is not implemented for now */ 5758 5759 /* BookE specific instructions */ 5760 5761 /* XXX: not implemented on 440 ? */ 5762 static void gen_mfapidi(DisasContext *ctx) 5763 { 5764 /* XXX: TODO */ 5765 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5766 } 5767 5768 /* XXX: not implemented on 440 ? */ 5769 static void gen_tlbiva(DisasContext *ctx) 5770 { 5771 #if defined(CONFIG_USER_ONLY) 5772 GEN_PRIV; 5773 #else 5774 TCGv t0; 5775 5776 CHK_SV; 5777 t0 = tcg_temp_new(); 5778 gen_addr_reg_index(ctx, t0); 5779 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5780 tcg_temp_free(t0); 5781 #endif /* defined(CONFIG_USER_ONLY) */ 5782 } 5783 5784 /* All 405 MAC instructions are translated here */ 5785 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5786 int ra, int rb, int rt, int Rc) 5787 { 5788 TCGv t0, t1; 5789 5790 t0 = tcg_temp_local_new(); 5791 t1 = tcg_temp_local_new(); 5792 5793 switch (opc3 & 0x0D) { 5794 case 0x05: 5795 /* macchw - macchw. - macchwo - macchwo. */ 5796 /* macchws - macchws. - macchwso - macchwso. */ 5797 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5798 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5799 /* mulchw - mulchw. */ 5800 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5801 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5802 tcg_gen_ext16s_tl(t1, t1); 5803 break; 5804 case 0x04: 5805 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5806 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5807 /* mulchwu - mulchwu. */ 5808 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5809 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5810 tcg_gen_ext16u_tl(t1, t1); 5811 break; 5812 case 0x01: 5813 /* machhw - machhw. - machhwo - machhwo. */ 5814 /* machhws - machhws. - machhwso - machhwso. */ 5815 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5816 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5817 /* mulhhw - mulhhw. */ 5818 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5819 tcg_gen_ext16s_tl(t0, t0); 5820 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5821 tcg_gen_ext16s_tl(t1, t1); 5822 break; 5823 case 0x00: 5824 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5825 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5826 /* mulhhwu - mulhhwu. */ 5827 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5828 tcg_gen_ext16u_tl(t0, t0); 5829 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5830 tcg_gen_ext16u_tl(t1, t1); 5831 break; 5832 case 0x0D: 5833 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5834 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5835 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5836 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5837 /* mullhw - mullhw. */ 5838 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5839 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5840 break; 5841 case 0x0C: 5842 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5843 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5844 /* mullhwu - mullhwu. */ 5845 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5846 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5847 break; 5848 } 5849 if (opc2 & 0x04) { 5850 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5851 tcg_gen_mul_tl(t1, t0, t1); 5852 if (opc2 & 0x02) { 5853 /* nmultiply-and-accumulate (0x0E) */ 5854 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5855 } else { 5856 /* multiply-and-accumulate (0x0C) */ 5857 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5858 } 5859 5860 if (opc3 & 0x12) { 5861 /* Check overflow and/or saturate */ 5862 TCGLabel *l1 = gen_new_label(); 5863 5864 if (opc3 & 0x10) { 5865 /* Start with XER OV disabled, the most likely case */ 5866 tcg_gen_movi_tl(cpu_ov, 0); 5867 } 5868 if (opc3 & 0x01) { 5869 /* Signed */ 5870 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5871 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5872 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5873 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5874 if (opc3 & 0x02) { 5875 /* Saturate */ 5876 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5877 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5878 } 5879 } else { 5880 /* Unsigned */ 5881 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5882 if (opc3 & 0x02) { 5883 /* Saturate */ 5884 tcg_gen_movi_tl(t0, UINT32_MAX); 5885 } 5886 } 5887 if (opc3 & 0x10) { 5888 /* Check overflow */ 5889 tcg_gen_movi_tl(cpu_ov, 1); 5890 tcg_gen_movi_tl(cpu_so, 1); 5891 } 5892 gen_set_label(l1); 5893 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5894 } 5895 } else { 5896 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5897 } 5898 tcg_temp_free(t0); 5899 tcg_temp_free(t1); 5900 if (unlikely(Rc) != 0) { 5901 /* Update Rc0 */ 5902 gen_set_Rc0(ctx, cpu_gpr[rt]); 5903 } 5904 } 5905 5906 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5907 static void glue(gen_, name)(DisasContext *ctx) \ 5908 { \ 5909 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5910 rD(ctx->opcode), Rc(ctx->opcode)); \ 5911 } 5912 5913 /* macchw - macchw. */ 5914 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5915 /* macchwo - macchwo. */ 5916 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5917 /* macchws - macchws. */ 5918 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5919 /* macchwso - macchwso. */ 5920 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5921 /* macchwsu - macchwsu. */ 5922 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5923 /* macchwsuo - macchwsuo. */ 5924 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5925 /* macchwu - macchwu. */ 5926 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5927 /* macchwuo - macchwuo. */ 5928 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5929 /* machhw - machhw. */ 5930 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5931 /* machhwo - machhwo. */ 5932 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5933 /* machhws - machhws. */ 5934 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5935 /* machhwso - machhwso. */ 5936 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5937 /* machhwsu - machhwsu. */ 5938 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5939 /* machhwsuo - machhwsuo. */ 5940 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5941 /* machhwu - machhwu. */ 5942 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5943 /* machhwuo - machhwuo. */ 5944 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5945 /* maclhw - maclhw. */ 5946 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5947 /* maclhwo - maclhwo. */ 5948 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5949 /* maclhws - maclhws. */ 5950 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5951 /* maclhwso - maclhwso. */ 5952 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 5953 /* maclhwu - maclhwu. */ 5954 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 5955 /* maclhwuo - maclhwuo. */ 5956 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 5957 /* maclhwsu - maclhwsu. */ 5958 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 5959 /* maclhwsuo - maclhwsuo. */ 5960 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 5961 /* nmacchw - nmacchw. */ 5962 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 5963 /* nmacchwo - nmacchwo. */ 5964 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 5965 /* nmacchws - nmacchws. */ 5966 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 5967 /* nmacchwso - nmacchwso. */ 5968 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 5969 /* nmachhw - nmachhw. */ 5970 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 5971 /* nmachhwo - nmachhwo. */ 5972 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 5973 /* nmachhws - nmachhws. */ 5974 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 5975 /* nmachhwso - nmachhwso. */ 5976 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 5977 /* nmaclhw - nmaclhw. */ 5978 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 5979 /* nmaclhwo - nmaclhwo. */ 5980 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 5981 /* nmaclhws - nmaclhws. */ 5982 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 5983 /* nmaclhwso - nmaclhwso. */ 5984 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 5985 5986 /* mulchw - mulchw. */ 5987 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 5988 /* mulchwu - mulchwu. */ 5989 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 5990 /* mulhhw - mulhhw. */ 5991 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 5992 /* mulhhwu - mulhhwu. */ 5993 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 5994 /* mullhw - mullhw. */ 5995 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 5996 /* mullhwu - mullhwu. */ 5997 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 5998 5999 /* mfdcr */ 6000 static void gen_mfdcr(DisasContext *ctx) 6001 { 6002 #if defined(CONFIG_USER_ONLY) 6003 GEN_PRIV; 6004 #else 6005 TCGv dcrn; 6006 6007 CHK_SV; 6008 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6009 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6010 tcg_temp_free(dcrn); 6011 #endif /* defined(CONFIG_USER_ONLY) */ 6012 } 6013 6014 /* mtdcr */ 6015 static void gen_mtdcr(DisasContext *ctx) 6016 { 6017 #if defined(CONFIG_USER_ONLY) 6018 GEN_PRIV; 6019 #else 6020 TCGv dcrn; 6021 6022 CHK_SV; 6023 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6024 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6025 tcg_temp_free(dcrn); 6026 #endif /* defined(CONFIG_USER_ONLY) */ 6027 } 6028 6029 /* mfdcrx */ 6030 /* XXX: not implemented on 440 ? */ 6031 static void gen_mfdcrx(DisasContext *ctx) 6032 { 6033 #if defined(CONFIG_USER_ONLY) 6034 GEN_PRIV; 6035 #else 6036 CHK_SV; 6037 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6038 cpu_gpr[rA(ctx->opcode)]); 6039 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6040 #endif /* defined(CONFIG_USER_ONLY) */ 6041 } 6042 6043 /* mtdcrx */ 6044 /* XXX: not implemented on 440 ? */ 6045 static void gen_mtdcrx(DisasContext *ctx) 6046 { 6047 #if defined(CONFIG_USER_ONLY) 6048 GEN_PRIV; 6049 #else 6050 CHK_SV; 6051 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6052 cpu_gpr[rS(ctx->opcode)]); 6053 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6054 #endif /* defined(CONFIG_USER_ONLY) */ 6055 } 6056 6057 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6058 static void gen_mfdcrux(DisasContext *ctx) 6059 { 6060 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6061 cpu_gpr[rA(ctx->opcode)]); 6062 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6063 } 6064 6065 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6066 static void gen_mtdcrux(DisasContext *ctx) 6067 { 6068 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6069 cpu_gpr[rS(ctx->opcode)]); 6070 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6071 } 6072 6073 /* dccci */ 6074 static void gen_dccci(DisasContext *ctx) 6075 { 6076 CHK_SV; 6077 /* interpreted as no-op */ 6078 } 6079 6080 /* dcread */ 6081 static void gen_dcread(DisasContext *ctx) 6082 { 6083 #if defined(CONFIG_USER_ONLY) 6084 GEN_PRIV; 6085 #else 6086 TCGv EA, val; 6087 6088 CHK_SV; 6089 gen_set_access_type(ctx, ACCESS_CACHE); 6090 EA = tcg_temp_new(); 6091 gen_addr_reg_index(ctx, EA); 6092 val = tcg_temp_new(); 6093 gen_qemu_ld32u(ctx, val, EA); 6094 tcg_temp_free(val); 6095 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6096 tcg_temp_free(EA); 6097 #endif /* defined(CONFIG_USER_ONLY) */ 6098 } 6099 6100 /* icbt */ 6101 static void gen_icbt_40x(DisasContext *ctx) 6102 { 6103 /* interpreted as no-op */ 6104 /* XXX: specification say this is treated as a load by the MMU 6105 * but does not generate any exception 6106 */ 6107 } 6108 6109 /* iccci */ 6110 static void gen_iccci(DisasContext *ctx) 6111 { 6112 CHK_SV; 6113 /* interpreted as no-op */ 6114 } 6115 6116 /* icread */ 6117 static void gen_icread(DisasContext *ctx) 6118 { 6119 CHK_SV; 6120 /* interpreted as no-op */ 6121 } 6122 6123 /* rfci (supervisor only) */ 6124 static void gen_rfci_40x(DisasContext *ctx) 6125 { 6126 #if defined(CONFIG_USER_ONLY) 6127 GEN_PRIV; 6128 #else 6129 CHK_SV; 6130 /* Restore CPU state */ 6131 gen_helper_40x_rfci(cpu_env); 6132 gen_sync_exception(ctx); 6133 #endif /* defined(CONFIG_USER_ONLY) */ 6134 } 6135 6136 static void gen_rfci(DisasContext *ctx) 6137 { 6138 #if defined(CONFIG_USER_ONLY) 6139 GEN_PRIV; 6140 #else 6141 CHK_SV; 6142 /* Restore CPU state */ 6143 gen_helper_rfci(cpu_env); 6144 gen_sync_exception(ctx); 6145 #endif /* defined(CONFIG_USER_ONLY) */ 6146 } 6147 6148 /* BookE specific */ 6149 6150 /* XXX: not implemented on 440 ? */ 6151 static void gen_rfdi(DisasContext *ctx) 6152 { 6153 #if defined(CONFIG_USER_ONLY) 6154 GEN_PRIV; 6155 #else 6156 CHK_SV; 6157 /* Restore CPU state */ 6158 gen_helper_rfdi(cpu_env); 6159 gen_sync_exception(ctx); 6160 #endif /* defined(CONFIG_USER_ONLY) */ 6161 } 6162 6163 /* XXX: not implemented on 440 ? */ 6164 static void gen_rfmci(DisasContext *ctx) 6165 { 6166 #if defined(CONFIG_USER_ONLY) 6167 GEN_PRIV; 6168 #else 6169 CHK_SV; 6170 /* Restore CPU state */ 6171 gen_helper_rfmci(cpu_env); 6172 gen_sync_exception(ctx); 6173 #endif /* defined(CONFIG_USER_ONLY) */ 6174 } 6175 6176 /* TLB management - PowerPC 405 implementation */ 6177 6178 /* tlbre */ 6179 static void gen_tlbre_40x(DisasContext *ctx) 6180 { 6181 #if defined(CONFIG_USER_ONLY) 6182 GEN_PRIV; 6183 #else 6184 CHK_SV; 6185 switch (rB(ctx->opcode)) { 6186 case 0: 6187 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6188 cpu_gpr[rA(ctx->opcode)]); 6189 break; 6190 case 1: 6191 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6192 cpu_gpr[rA(ctx->opcode)]); 6193 break; 6194 default: 6195 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6196 break; 6197 } 6198 #endif /* defined(CONFIG_USER_ONLY) */ 6199 } 6200 6201 /* tlbsx - tlbsx. */ 6202 static void gen_tlbsx_40x(DisasContext *ctx) 6203 { 6204 #if defined(CONFIG_USER_ONLY) 6205 GEN_PRIV; 6206 #else 6207 TCGv t0; 6208 6209 CHK_SV; 6210 t0 = tcg_temp_new(); 6211 gen_addr_reg_index(ctx, t0); 6212 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6213 tcg_temp_free(t0); 6214 if (Rc(ctx->opcode)) { 6215 TCGLabel *l1 = gen_new_label(); 6216 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6217 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6218 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6219 gen_set_label(l1); 6220 } 6221 #endif /* defined(CONFIG_USER_ONLY) */ 6222 } 6223 6224 /* tlbwe */ 6225 static void gen_tlbwe_40x(DisasContext *ctx) 6226 { 6227 #if defined(CONFIG_USER_ONLY) 6228 GEN_PRIV; 6229 #else 6230 CHK_SV; 6231 6232 switch (rB(ctx->opcode)) { 6233 case 0: 6234 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6235 cpu_gpr[rS(ctx->opcode)]); 6236 break; 6237 case 1: 6238 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6239 cpu_gpr[rS(ctx->opcode)]); 6240 break; 6241 default: 6242 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6243 break; 6244 } 6245 #endif /* defined(CONFIG_USER_ONLY) */ 6246 } 6247 6248 /* TLB management - PowerPC 440 implementation */ 6249 6250 /* tlbre */ 6251 static void gen_tlbre_440(DisasContext *ctx) 6252 { 6253 #if defined(CONFIG_USER_ONLY) 6254 GEN_PRIV; 6255 #else 6256 CHK_SV; 6257 6258 switch (rB(ctx->opcode)) { 6259 case 0: 6260 case 1: 6261 case 2: 6262 { 6263 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6264 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6265 t0, cpu_gpr[rA(ctx->opcode)]); 6266 tcg_temp_free_i32(t0); 6267 } 6268 break; 6269 default: 6270 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6271 break; 6272 } 6273 #endif /* defined(CONFIG_USER_ONLY) */ 6274 } 6275 6276 /* tlbsx - tlbsx. */ 6277 static void gen_tlbsx_440(DisasContext *ctx) 6278 { 6279 #if defined(CONFIG_USER_ONLY) 6280 GEN_PRIV; 6281 #else 6282 TCGv t0; 6283 6284 CHK_SV; 6285 t0 = tcg_temp_new(); 6286 gen_addr_reg_index(ctx, t0); 6287 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6288 tcg_temp_free(t0); 6289 if (Rc(ctx->opcode)) { 6290 TCGLabel *l1 = gen_new_label(); 6291 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6292 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6293 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6294 gen_set_label(l1); 6295 } 6296 #endif /* defined(CONFIG_USER_ONLY) */ 6297 } 6298 6299 /* tlbwe */ 6300 static void gen_tlbwe_440(DisasContext *ctx) 6301 { 6302 #if defined(CONFIG_USER_ONLY) 6303 GEN_PRIV; 6304 #else 6305 CHK_SV; 6306 switch (rB(ctx->opcode)) { 6307 case 0: 6308 case 1: 6309 case 2: 6310 { 6311 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6312 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 6313 cpu_gpr[rS(ctx->opcode)]); 6314 tcg_temp_free_i32(t0); 6315 } 6316 break; 6317 default: 6318 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6319 break; 6320 } 6321 #endif /* defined(CONFIG_USER_ONLY) */ 6322 } 6323 6324 /* TLB management - PowerPC BookE 2.06 implementation */ 6325 6326 /* tlbre */ 6327 static void gen_tlbre_booke206(DisasContext *ctx) 6328 { 6329 #if defined(CONFIG_USER_ONLY) 6330 GEN_PRIV; 6331 #else 6332 CHK_SV; 6333 gen_helper_booke206_tlbre(cpu_env); 6334 #endif /* defined(CONFIG_USER_ONLY) */ 6335 } 6336 6337 /* tlbsx - tlbsx. */ 6338 static void gen_tlbsx_booke206(DisasContext *ctx) 6339 { 6340 #if defined(CONFIG_USER_ONLY) 6341 GEN_PRIV; 6342 #else 6343 TCGv t0; 6344 6345 CHK_SV; 6346 if (rA(ctx->opcode)) { 6347 t0 = tcg_temp_new(); 6348 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6349 } else { 6350 t0 = tcg_const_tl(0); 6351 } 6352 6353 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6354 gen_helper_booke206_tlbsx(cpu_env, t0); 6355 tcg_temp_free(t0); 6356 #endif /* defined(CONFIG_USER_ONLY) */ 6357 } 6358 6359 /* tlbwe */ 6360 static void gen_tlbwe_booke206(DisasContext *ctx) 6361 { 6362 #if defined(CONFIG_USER_ONLY) 6363 GEN_PRIV; 6364 #else 6365 CHK_SV; 6366 gen_helper_booke206_tlbwe(cpu_env); 6367 #endif /* defined(CONFIG_USER_ONLY) */ 6368 } 6369 6370 static void gen_tlbivax_booke206(DisasContext *ctx) 6371 { 6372 #if defined(CONFIG_USER_ONLY) 6373 GEN_PRIV; 6374 #else 6375 TCGv t0; 6376 6377 CHK_SV; 6378 t0 = tcg_temp_new(); 6379 gen_addr_reg_index(ctx, t0); 6380 gen_helper_booke206_tlbivax(cpu_env, t0); 6381 tcg_temp_free(t0); 6382 #endif /* defined(CONFIG_USER_ONLY) */ 6383 } 6384 6385 static void gen_tlbilx_booke206(DisasContext *ctx) 6386 { 6387 #if defined(CONFIG_USER_ONLY) 6388 GEN_PRIV; 6389 #else 6390 TCGv t0; 6391 6392 CHK_SV; 6393 t0 = tcg_temp_new(); 6394 gen_addr_reg_index(ctx, t0); 6395 6396 switch((ctx->opcode >> 21) & 0x3) { 6397 case 0: 6398 gen_helper_booke206_tlbilx0(cpu_env, t0); 6399 break; 6400 case 1: 6401 gen_helper_booke206_tlbilx1(cpu_env, t0); 6402 break; 6403 case 3: 6404 gen_helper_booke206_tlbilx3(cpu_env, t0); 6405 break; 6406 default: 6407 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6408 break; 6409 } 6410 6411 tcg_temp_free(t0); 6412 #endif /* defined(CONFIG_USER_ONLY) */ 6413 } 6414 6415 6416 /* wrtee */ 6417 static void gen_wrtee(DisasContext *ctx) 6418 { 6419 #if defined(CONFIG_USER_ONLY) 6420 GEN_PRIV; 6421 #else 6422 TCGv t0; 6423 6424 CHK_SV; 6425 t0 = tcg_temp_new(); 6426 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6427 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6428 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6429 tcg_temp_free(t0); 6430 /* Stop translation to have a chance to raise an exception 6431 * if we just set msr_ee to 1 6432 */ 6433 gen_stop_exception(ctx); 6434 #endif /* defined(CONFIG_USER_ONLY) */ 6435 } 6436 6437 /* wrteei */ 6438 static void gen_wrteei(DisasContext *ctx) 6439 { 6440 #if defined(CONFIG_USER_ONLY) 6441 GEN_PRIV; 6442 #else 6443 CHK_SV; 6444 if (ctx->opcode & 0x00008000) { 6445 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6446 /* Stop translation to have a chance to raise an exception */ 6447 gen_stop_exception(ctx); 6448 } else { 6449 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6450 } 6451 #endif /* defined(CONFIG_USER_ONLY) */ 6452 } 6453 6454 /* PowerPC 440 specific instructions */ 6455 6456 /* dlmzb */ 6457 static void gen_dlmzb(DisasContext *ctx) 6458 { 6459 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6460 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6461 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6462 tcg_temp_free_i32(t0); 6463 } 6464 6465 /* mbar replaces eieio on 440 */ 6466 static void gen_mbar(DisasContext *ctx) 6467 { 6468 /* interpreted as no-op */ 6469 } 6470 6471 /* msync replaces sync on 440 */ 6472 static void gen_msync_4xx(DisasContext *ctx) 6473 { 6474 /* Only e500 seems to treat reserved bits as invalid */ 6475 if ((ctx->insns_flags2 & PPC2_BOOKE206) && 6476 (ctx->opcode & 0x03FFF801)) { 6477 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6478 } 6479 /* otherwise interpreted as no-op */ 6480 } 6481 6482 /* icbt */ 6483 static void gen_icbt_440(DisasContext *ctx) 6484 { 6485 /* interpreted as no-op */ 6486 /* XXX: specification say this is treated as a load by the MMU 6487 * but does not generate any exception 6488 */ 6489 } 6490 6491 /* Embedded.Processor Control */ 6492 6493 static void gen_msgclr(DisasContext *ctx) 6494 { 6495 #if defined(CONFIG_USER_ONLY) 6496 GEN_PRIV; 6497 #else 6498 CHK_HV; 6499 /* 64-bit server processors compliant with arch 2.x */ 6500 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6501 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6502 } else { 6503 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6504 } 6505 #endif /* defined(CONFIG_USER_ONLY) */ 6506 } 6507 6508 static void gen_msgsnd(DisasContext *ctx) 6509 { 6510 #if defined(CONFIG_USER_ONLY) 6511 GEN_PRIV; 6512 #else 6513 CHK_HV; 6514 /* 64-bit server processors compliant with arch 2.x */ 6515 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6516 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6517 } else { 6518 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6519 } 6520 #endif /* defined(CONFIG_USER_ONLY) */ 6521 } 6522 6523 static void gen_msgsync(DisasContext *ctx) 6524 { 6525 #if defined(CONFIG_USER_ONLY) 6526 GEN_PRIV; 6527 #else 6528 CHK_HV; 6529 #endif /* defined(CONFIG_USER_ONLY) */ 6530 /* interpreted as no-op */ 6531 } 6532 6533 #if defined(TARGET_PPC64) 6534 static void gen_maddld(DisasContext *ctx) 6535 { 6536 TCGv_i64 t1 = tcg_temp_new_i64(); 6537 6538 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6539 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6540 tcg_temp_free_i64(t1); 6541 } 6542 6543 /* maddhd maddhdu */ 6544 static void gen_maddhd_maddhdu(DisasContext *ctx) 6545 { 6546 TCGv_i64 lo = tcg_temp_new_i64(); 6547 TCGv_i64 hi = tcg_temp_new_i64(); 6548 TCGv_i64 t1 = tcg_temp_new_i64(); 6549 6550 if (Rc(ctx->opcode)) { 6551 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6552 cpu_gpr[rB(ctx->opcode)]); 6553 tcg_gen_movi_i64(t1, 0); 6554 } else { 6555 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6556 cpu_gpr[rB(ctx->opcode)]); 6557 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6558 } 6559 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6560 cpu_gpr[rC(ctx->opcode)], t1); 6561 tcg_temp_free_i64(lo); 6562 tcg_temp_free_i64(hi); 6563 tcg_temp_free_i64(t1); 6564 } 6565 #endif /* defined(TARGET_PPC64) */ 6566 6567 static void gen_tbegin(DisasContext *ctx) 6568 { 6569 if (unlikely(!ctx->tm_enabled)) { 6570 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6571 return; 6572 } 6573 gen_helper_tbegin(cpu_env); 6574 } 6575 6576 #define GEN_TM_NOOP(name) \ 6577 static inline void gen_##name(DisasContext *ctx) \ 6578 { \ 6579 if (unlikely(!ctx->tm_enabled)) { \ 6580 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6581 return; \ 6582 } \ 6583 /* Because tbegin always fails in QEMU, these user \ 6584 * space instructions all have a simple implementation: \ 6585 * \ 6586 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6587 * = 0b0 || 0b00 || 0b0 \ 6588 */ \ 6589 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6590 } 6591 6592 GEN_TM_NOOP(tend); 6593 GEN_TM_NOOP(tabort); 6594 GEN_TM_NOOP(tabortwc); 6595 GEN_TM_NOOP(tabortwci); 6596 GEN_TM_NOOP(tabortdc); 6597 GEN_TM_NOOP(tabortdci); 6598 GEN_TM_NOOP(tsr); 6599 static inline void gen_cp_abort(DisasContext *ctx) 6600 { 6601 // Do Nothing 6602 } 6603 6604 #define GEN_CP_PASTE_NOOP(name) \ 6605 static inline void gen_##name(DisasContext *ctx) \ 6606 { \ 6607 /* Generate invalid exception until \ 6608 * we have an implementation of the copy \ 6609 * paste facility \ 6610 */ \ 6611 gen_invalid(ctx); \ 6612 } 6613 6614 GEN_CP_PASTE_NOOP(copy) 6615 GEN_CP_PASTE_NOOP(paste) 6616 6617 static void gen_tcheck(DisasContext *ctx) 6618 { 6619 if (unlikely(!ctx->tm_enabled)) { 6620 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6621 return; 6622 } 6623 /* Because tbegin always fails, the tcheck implementation 6624 * is simple: 6625 * 6626 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6627 * = 0b1 || 0b00 || 0b0 6628 */ 6629 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6630 } 6631 6632 #if defined(CONFIG_USER_ONLY) 6633 #define GEN_TM_PRIV_NOOP(name) \ 6634 static inline void gen_##name(DisasContext *ctx) \ 6635 { \ 6636 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6637 } 6638 6639 #else 6640 6641 #define GEN_TM_PRIV_NOOP(name) \ 6642 static inline void gen_##name(DisasContext *ctx) \ 6643 { \ 6644 CHK_SV; \ 6645 if (unlikely(!ctx->tm_enabled)) { \ 6646 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6647 return; \ 6648 } \ 6649 /* Because tbegin always fails, the implementation is \ 6650 * simple: \ 6651 * \ 6652 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6653 * = 0b0 || 0b00 | 0b0 \ 6654 */ \ 6655 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6656 } 6657 6658 #endif 6659 6660 GEN_TM_PRIV_NOOP(treclaim); 6661 GEN_TM_PRIV_NOOP(trechkpt); 6662 6663 static inline void get_fpr(TCGv_i64 dst, int regno) 6664 { 6665 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, vsr[regno].u64[0])); 6666 } 6667 6668 static inline void set_fpr(int regno, TCGv_i64 src) 6669 { 6670 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, vsr[regno].u64[0])); 6671 } 6672 6673 static inline void get_avr64(TCGv_i64 dst, int regno, bool high) 6674 { 6675 #ifdef HOST_WORDS_BIGENDIAN 6676 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, 6677 vsr[32 + regno].u64[(high ? 0 : 1)])); 6678 #else 6679 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, 6680 vsr[32 + regno].u64[(high ? 1 : 0)])); 6681 #endif 6682 } 6683 6684 static inline void set_avr64(int regno, TCGv_i64 src, bool high) 6685 { 6686 #ifdef HOST_WORDS_BIGENDIAN 6687 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, 6688 vsr[32 + regno].u64[(high ? 0 : 1)])); 6689 #else 6690 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, 6691 vsr[32 + regno].u64[(high ? 1 : 0)])); 6692 #endif 6693 } 6694 6695 #include "translate/fp-impl.inc.c" 6696 6697 #include "translate/vmx-impl.inc.c" 6698 6699 #include "translate/vsx-impl.inc.c" 6700 6701 #include "translate/dfp-impl.inc.c" 6702 6703 #include "translate/spe-impl.inc.c" 6704 6705 /* Handles lfdp, lxsd, lxssp */ 6706 static void gen_dform39(DisasContext *ctx) 6707 { 6708 switch (ctx->opcode & 0x3) { 6709 case 0: /* lfdp */ 6710 if (ctx->insns_flags2 & PPC2_ISA205) { 6711 return gen_lfdp(ctx); 6712 } 6713 break; 6714 case 2: /* lxsd */ 6715 if (ctx->insns_flags2 & PPC2_ISA300) { 6716 return gen_lxsd(ctx); 6717 } 6718 break; 6719 case 3: /* lxssp */ 6720 if (ctx->insns_flags2 & PPC2_ISA300) { 6721 return gen_lxssp(ctx); 6722 } 6723 break; 6724 } 6725 return gen_invalid(ctx); 6726 } 6727 6728 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 6729 static void gen_dform3D(DisasContext *ctx) 6730 { 6731 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 6732 switch (ctx->opcode & 0x7) { 6733 case 1: /* lxv */ 6734 if (ctx->insns_flags2 & PPC2_ISA300) { 6735 return gen_lxv(ctx); 6736 } 6737 break; 6738 case 5: /* stxv */ 6739 if (ctx->insns_flags2 & PPC2_ISA300) { 6740 return gen_stxv(ctx); 6741 } 6742 break; 6743 } 6744 } else { /* DS-FORM */ 6745 switch (ctx->opcode & 0x3) { 6746 case 0: /* stfdp */ 6747 if (ctx->insns_flags2 & PPC2_ISA205) { 6748 return gen_stfdp(ctx); 6749 } 6750 break; 6751 case 2: /* stxsd */ 6752 if (ctx->insns_flags2 & PPC2_ISA300) { 6753 return gen_stxsd(ctx); 6754 } 6755 break; 6756 case 3: /* stxssp */ 6757 if (ctx->insns_flags2 & PPC2_ISA300) { 6758 return gen_stxssp(ctx); 6759 } 6760 break; 6761 } 6762 } 6763 return gen_invalid(ctx); 6764 } 6765 6766 static opcode_t opcodes[] = { 6767 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6768 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6769 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6770 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6771 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6772 #if defined(TARGET_PPC64) 6773 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6774 #endif 6775 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6776 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6777 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6778 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6779 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6780 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6781 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6782 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6783 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6784 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6785 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6786 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6787 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6788 #if defined(TARGET_PPC64) 6789 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6790 #endif 6791 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6792 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6793 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6794 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6795 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6796 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6797 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6798 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 6799 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6800 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 6801 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6802 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6803 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6804 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6805 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6806 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6807 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6808 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6809 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6810 #if defined(TARGET_PPC64) 6811 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6812 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6813 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6814 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6815 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6816 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6817 #endif 6818 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6819 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6820 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6821 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6822 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6823 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6824 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6825 #if defined(TARGET_PPC64) 6826 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6827 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6828 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6829 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6830 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6831 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6832 PPC_NONE, PPC2_ISA300), 6833 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6834 PPC_NONE, PPC2_ISA300), 6835 #endif 6836 #if defined(TARGET_PPC64) 6837 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6838 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6839 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6840 #endif 6841 /* handles lfdp, lxsd, lxssp */ 6842 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6843 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 6844 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6845 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6846 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6847 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6848 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6849 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6850 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6851 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 6852 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6853 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6854 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6855 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6856 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 6857 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 6858 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6859 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6860 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6861 #if defined(TARGET_PPC64) 6862 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 6863 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 6864 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6865 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6866 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6867 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6868 #endif 6869 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6870 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6871 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 6872 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6873 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6874 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6875 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6876 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6877 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6878 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6879 #if defined(TARGET_PPC64) 6880 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6881 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6882 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6883 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6884 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6885 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6886 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6887 #endif 6888 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6889 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6890 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6891 #if defined(TARGET_PPC64) 6892 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6893 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6894 #endif 6895 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6896 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6897 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6898 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6899 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6900 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6901 #if defined(TARGET_PPC64) 6902 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6903 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6904 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 6905 #endif 6906 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6907 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6908 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6909 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6910 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6911 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6912 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6913 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6914 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6915 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6916 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6917 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6918 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6919 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6920 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6921 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 6922 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6923 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6924 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6925 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6926 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6927 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6928 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6929 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6930 #if defined(TARGET_PPC64) 6931 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6932 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6933 PPC_SEGMENT_64B), 6934 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6935 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6936 PPC_SEGMENT_64B), 6937 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6938 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6939 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6940 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6941 #endif 6942 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6943 /* XXX Those instructions will need to be handled differently for 6944 * different ISA versions */ 6945 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6946 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6947 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 6948 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 6949 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6950 #if defined(TARGET_PPC64) 6951 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6952 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6953 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 6954 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6955 #endif 6956 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6957 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6958 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6959 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6960 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6961 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6962 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6963 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6964 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6965 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6966 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 6967 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6968 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 6969 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 6970 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 6971 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 6972 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 6973 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 6974 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 6975 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6976 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 6977 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 6978 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 6979 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 6980 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 6981 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 6982 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 6983 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 6984 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 6985 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 6986 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 6987 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 6988 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 6989 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 6990 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 6991 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 6992 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 6993 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 6994 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 6995 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 6996 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 6997 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 6998 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 6999 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 7000 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7001 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7002 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7003 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7004 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7005 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7006 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7007 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7008 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7009 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7010 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7011 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7012 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7013 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7014 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7015 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7016 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7017 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7018 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7019 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7020 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7021 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7022 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7023 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7024 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7025 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7026 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7027 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7028 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7029 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7030 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7031 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7032 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7033 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7034 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7035 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7036 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7037 PPC_NONE, PPC2_BOOKE206), 7038 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7039 PPC_NONE, PPC2_BOOKE206), 7040 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7041 PPC_NONE, PPC2_BOOKE206), 7042 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7043 PPC_NONE, PPC2_BOOKE206), 7044 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7045 PPC_NONE, PPC2_BOOKE206), 7046 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7047 PPC_NONE, PPC2_PRCNTL), 7048 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7049 PPC_NONE, PPC2_PRCNTL), 7050 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7051 PPC_NONE, PPC2_PRCNTL), 7052 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7053 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7054 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7055 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7056 PPC_BOOKE, PPC2_BOOKE206), 7057 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE), 7058 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7059 PPC_BOOKE, PPC2_BOOKE206), 7060 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7061 PPC_440_SPEC), 7062 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7063 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7064 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7065 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7066 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7067 #if defined(TARGET_PPC64) 7068 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7069 PPC2_ISA300), 7070 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7071 #endif 7072 7073 #undef GEN_INT_ARITH_ADD 7074 #undef GEN_INT_ARITH_ADD_CONST 7075 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7076 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7077 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7078 add_ca, compute_ca, compute_ov) \ 7079 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7080 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7081 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7082 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7083 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7084 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7085 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7086 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7087 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7088 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 7089 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7090 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7091 7092 #undef GEN_INT_ARITH_DIVW 7093 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7094 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7095 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7096 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7097 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7098 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7099 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7100 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7101 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7102 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7103 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7104 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7105 7106 #if defined(TARGET_PPC64) 7107 #undef GEN_INT_ARITH_DIVD 7108 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7109 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7110 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7111 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7112 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7113 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7114 7115 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7116 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7117 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7118 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7119 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7120 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7121 7122 #undef GEN_INT_ARITH_MUL_HELPER 7123 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7124 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7125 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7126 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7127 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7128 #endif 7129 7130 #undef GEN_INT_ARITH_SUBF 7131 #undef GEN_INT_ARITH_SUBF_CONST 7132 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7133 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7134 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7135 add_ca, compute_ca, compute_ov) \ 7136 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7137 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7138 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7139 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7140 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7141 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7142 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7143 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7144 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7145 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7146 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7147 7148 #undef GEN_LOGICAL1 7149 #undef GEN_LOGICAL2 7150 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7151 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7152 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7153 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7154 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7155 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7156 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7157 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7158 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7159 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7160 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7161 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 7162 #if defined(TARGET_PPC64) 7163 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 7164 #endif 7165 7166 #if defined(TARGET_PPC64) 7167 #undef GEN_PPC64_R2 7168 #undef GEN_PPC64_R4 7169 #define GEN_PPC64_R2(name, opc1, opc2) \ 7170 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7171 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7172 PPC_64B) 7173 #define GEN_PPC64_R4(name, opc1, opc2) \ 7174 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7175 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 7176 PPC_64B), \ 7177 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7178 PPC_64B), \ 7179 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 7180 PPC_64B) 7181 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 7182 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 7183 GEN_PPC64_R4(rldic, 0x1E, 0x04), 7184 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 7185 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 7186 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 7187 #endif 7188 7189 #undef GEN_LD 7190 #undef GEN_LDU 7191 #undef GEN_LDUX 7192 #undef GEN_LDX_E 7193 #undef GEN_LDS 7194 #define GEN_LD(name, ldop, opc, type) \ 7195 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7196 #define GEN_LDU(name, ldop, opc, type) \ 7197 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 7198 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 7199 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7200 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 7201 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 7202 #define GEN_LDS(name, ldop, op, type) \ 7203 GEN_LD(name, ldop, op | 0x20, type) \ 7204 GEN_LDU(name, ldop, op | 0x21, type) \ 7205 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 7206 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 7207 7208 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 7209 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 7210 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 7211 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 7212 #if defined(TARGET_PPC64) 7213 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 7214 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 7215 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 7216 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 7217 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 7218 7219 /* HV/P7 and later only */ 7220 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 7221 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 7222 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 7223 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 7224 #endif 7225 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 7226 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 7227 7228 /* External PID based load */ 7229 #undef GEN_LDEPX 7230 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 7231 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7232 0x00000001, PPC_NONE, PPC2_BOOKE206), 7233 7234 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 7235 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 7236 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 7237 #if defined(TARGET_PPC64) 7238 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 7239 #endif 7240 7241 #undef GEN_ST 7242 #undef GEN_STU 7243 #undef GEN_STUX 7244 #undef GEN_STX_E 7245 #undef GEN_STS 7246 #define GEN_ST(name, stop, opc, type) \ 7247 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7248 #define GEN_STU(name, stop, opc, type) \ 7249 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 7250 #define GEN_STUX(name, stop, opc2, opc3, type) \ 7251 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7252 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 7253 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 7254 #define GEN_STS(name, stop, op, type) \ 7255 GEN_ST(name, stop, op | 0x20, type) \ 7256 GEN_STU(name, stop, op | 0x21, type) \ 7257 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 7258 GEN_STX(name, stop, 0x17, op | 0x00, type) 7259 7260 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 7261 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 7262 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 7263 #if defined(TARGET_PPC64) 7264 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 7265 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 7266 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 7267 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 7268 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 7269 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 7270 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 7271 #endif 7272 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 7273 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 7274 7275 #undef GEN_STEPX 7276 #define GEN_STEPX(name, ldop, opc2, opc3) \ 7277 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7278 0x00000001, PPC_NONE, PPC2_BOOKE206), 7279 7280 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 7281 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 7282 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 7283 #if defined(TARGET_PPC64) 7284 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04) 7285 #endif 7286 7287 #undef GEN_CRLOGIC 7288 #define GEN_CRLOGIC(name, tcg_op, opc) \ 7289 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 7290 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 7291 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 7292 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 7293 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 7294 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 7295 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 7296 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 7297 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 7298 7299 #undef GEN_MAC_HANDLER 7300 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 7301 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 7302 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 7303 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 7304 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 7305 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 7306 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 7307 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 7308 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 7309 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 7310 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 7311 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 7312 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 7313 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 7314 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 7315 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 7316 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 7317 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 7318 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 7319 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 7320 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 7321 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 7322 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 7323 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 7324 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 7325 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 7326 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 7327 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 7328 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 7329 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 7330 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 7331 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 7332 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 7333 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 7334 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 7335 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 7336 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 7337 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 7338 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 7339 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 7340 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 7341 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 7342 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 7343 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 7344 7345 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 7346 PPC_NONE, PPC2_TM), 7347 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 7348 PPC_NONE, PPC2_TM), 7349 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 7350 PPC_NONE, PPC2_TM), 7351 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 7352 PPC_NONE, PPC2_TM), 7353 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 7354 PPC_NONE, PPC2_TM), 7355 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 7356 PPC_NONE, PPC2_TM), 7357 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 7358 PPC_NONE, PPC2_TM), 7359 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 7360 PPC_NONE, PPC2_TM), 7361 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 7362 PPC_NONE, PPC2_TM), 7363 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 7364 PPC_NONE, PPC2_TM), 7365 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 7366 PPC_NONE, PPC2_TM), 7367 7368 #include "translate/fp-ops.inc.c" 7369 7370 #include "translate/vmx-ops.inc.c" 7371 7372 #include "translate/vsx-ops.inc.c" 7373 7374 #include "translate/dfp-ops.inc.c" 7375 7376 #include "translate/spe-ops.inc.c" 7377 }; 7378 7379 #include "helper_regs.h" 7380 #include "translate_init.inc.c" 7381 7382 /*****************************************************************************/ 7383 /* Misc PowerPC helpers */ 7384 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 7385 int flags) 7386 { 7387 #define RGPL 4 7388 #define RFPL 4 7389 7390 PowerPCCPU *cpu = POWERPC_CPU(cs); 7391 CPUPPCState *env = &cpu->env; 7392 int i; 7393 7394 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 7395 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 7396 env->nip, env->lr, env->ctr, cpu_read_xer(env), 7397 cs->cpu_index); 7398 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 7399 TARGET_FMT_lx " iidx %d didx %d\n", 7400 env->msr, env->spr[SPR_HID0], 7401 env->hflags, env->immu_idx, env->dmmu_idx); 7402 #if !defined(NO_TIMER_DUMP) 7403 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 7404 #if !defined(CONFIG_USER_ONLY) 7405 " DECR %08" PRIu32 7406 #endif 7407 "\n", 7408 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 7409 #if !defined(CONFIG_USER_ONLY) 7410 , cpu_ppc_load_decr(env) 7411 #endif 7412 ); 7413 #endif 7414 for (i = 0; i < 32; i++) { 7415 if ((i & (RGPL - 1)) == 0) 7416 cpu_fprintf(f, "GPR%02d", i); 7417 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 7418 if ((i & (RGPL - 1)) == (RGPL - 1)) 7419 cpu_fprintf(f, "\n"); 7420 } 7421 cpu_fprintf(f, "CR "); 7422 for (i = 0; i < 8; i++) 7423 cpu_fprintf(f, "%01x", env->crf[i]); 7424 cpu_fprintf(f, " ["); 7425 for (i = 0; i < 8; i++) { 7426 char a = '-'; 7427 if (env->crf[i] & 0x08) 7428 a = 'L'; 7429 else if (env->crf[i] & 0x04) 7430 a = 'G'; 7431 else if (env->crf[i] & 0x02) 7432 a = 'E'; 7433 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 7434 } 7435 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 7436 env->reserve_addr); 7437 7438 if (flags & CPU_DUMP_FPU) { 7439 for (i = 0; i < 32; i++) { 7440 if ((i & (RFPL - 1)) == 0) { 7441 cpu_fprintf(f, "FPR%02d", i); 7442 } 7443 cpu_fprintf(f, " %016" PRIx64, *cpu_fpr_ptr(env, i)); 7444 if ((i & (RFPL - 1)) == (RFPL - 1)) { 7445 cpu_fprintf(f, "\n"); 7446 } 7447 } 7448 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 7449 } 7450 7451 #if !defined(CONFIG_USER_ONLY) 7452 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 7453 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 7454 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 7455 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 7456 7457 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 7458 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 7459 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 7460 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 7461 7462 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 7463 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 7464 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 7465 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 7466 7467 #if defined(TARGET_PPC64) 7468 if (env->excp_model == POWERPC_EXCP_POWER7 || 7469 env->excp_model == POWERPC_EXCP_POWER8) { 7470 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 7471 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 7472 } 7473 #endif 7474 if (env->excp_model == POWERPC_EXCP_BOOKE) { 7475 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 7476 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 7477 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 7478 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 7479 7480 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 7481 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 7482 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 7483 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 7484 7485 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 7486 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 7487 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 7488 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 7489 7490 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 7491 " EPR " TARGET_FMT_lx "\n", 7492 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 7493 env->spr[SPR_BOOKE_EPR]); 7494 7495 /* FSL-specific */ 7496 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 7497 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 7498 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 7499 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 7500 7501 /* 7502 * IVORs are left out as they are large and do not change often -- 7503 * they can be read with "p $ivor0", "p $ivor1", etc. 7504 */ 7505 } 7506 7507 #if defined(TARGET_PPC64) 7508 if (env->flags & POWERPC_FLAG_CFAR) { 7509 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 7510 } 7511 #endif 7512 7513 if (env->spr_cb[SPR_LPCR].name) 7514 cpu_fprintf(f, " LPCR " TARGET_FMT_lx "\n", env->spr[SPR_LPCR]); 7515 7516 switch (env->mmu_model) { 7517 case POWERPC_MMU_32B: 7518 case POWERPC_MMU_601: 7519 case POWERPC_MMU_SOFT_6xx: 7520 case POWERPC_MMU_SOFT_74xx: 7521 #if defined(TARGET_PPC64) 7522 case POWERPC_MMU_64B: 7523 case POWERPC_MMU_2_03: 7524 case POWERPC_MMU_2_06: 7525 case POWERPC_MMU_2_07: 7526 case POWERPC_MMU_3_00: 7527 #endif 7528 if (env->spr_cb[SPR_SDR1].name) { /* SDR1 Exists */ 7529 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " ", env->spr[SPR_SDR1]); 7530 } 7531 if (env->spr_cb[SPR_PTCR].name) { /* PTCR Exists */ 7532 cpu_fprintf(f, " PTCR " TARGET_FMT_lx " ", env->spr[SPR_PTCR]); 7533 } 7534 cpu_fprintf(f, " DAR " TARGET_FMT_lx " DSISR " TARGET_FMT_lx "\n", 7535 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 7536 break; 7537 case POWERPC_MMU_BOOKE206: 7538 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 7539 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 7540 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 7541 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 7542 7543 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 7544 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 7545 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 7546 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 7547 7548 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 7549 " TLB1CFG " TARGET_FMT_lx "\n", 7550 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 7551 env->spr[SPR_BOOKE_TLB1CFG]); 7552 break; 7553 default: 7554 break; 7555 } 7556 #endif 7557 7558 #undef RGPL 7559 #undef RFPL 7560 } 7561 7562 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 7563 fprintf_function cpu_fprintf, int flags) 7564 { 7565 #if defined(DO_PPC_STATISTICS) 7566 PowerPCCPU *cpu = POWERPC_CPU(cs); 7567 opc_handler_t **t1, **t2, **t3, *handler; 7568 int op1, op2, op3; 7569 7570 t1 = cpu->env.opcodes; 7571 for (op1 = 0; op1 < 64; op1++) { 7572 handler = t1[op1]; 7573 if (is_indirect_opcode(handler)) { 7574 t2 = ind_table(handler); 7575 for (op2 = 0; op2 < 32; op2++) { 7576 handler = t2[op2]; 7577 if (is_indirect_opcode(handler)) { 7578 t3 = ind_table(handler); 7579 for (op3 = 0; op3 < 32; op3++) { 7580 handler = t3[op3]; 7581 if (handler->count == 0) 7582 continue; 7583 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 7584 "%016" PRIx64 " %" PRId64 "\n", 7585 op1, op2, op3, op1, (op3 << 5) | op2, 7586 handler->oname, 7587 handler->count, handler->count); 7588 } 7589 } else { 7590 if (handler->count == 0) 7591 continue; 7592 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 7593 "%016" PRIx64 " %" PRId64 "\n", 7594 op1, op2, op1, op2, handler->oname, 7595 handler->count, handler->count); 7596 } 7597 } 7598 } else { 7599 if (handler->count == 0) 7600 continue; 7601 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 7602 " %" PRId64 "\n", 7603 op1, op1, handler->oname, 7604 handler->count, handler->count); 7605 } 7606 } 7607 #endif 7608 } 7609 7610 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 7611 { 7612 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7613 CPUPPCState *env = cs->env_ptr; 7614 int bound; 7615 7616 ctx->exception = POWERPC_EXCP_NONE; 7617 ctx->spr_cb = env->spr_cb; 7618 ctx->pr = msr_pr; 7619 ctx->mem_idx = env->dmmu_idx; 7620 ctx->dr = msr_dr; 7621 #if !defined(CONFIG_USER_ONLY) 7622 ctx->hv = msr_hv || !env->has_hv_mode; 7623 #endif 7624 ctx->insns_flags = env->insns_flags; 7625 ctx->insns_flags2 = env->insns_flags2; 7626 ctx->access_type = -1; 7627 ctx->need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7628 ctx->le_mode = !!(env->hflags & (1 << MSR_LE)); 7629 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 7630 ctx->flags = env->flags; 7631 #if defined(TARGET_PPC64) 7632 ctx->sf_mode = msr_is_64bit(env, env->msr); 7633 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7634 #endif 7635 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 7636 || env->mmu_model == POWERPC_MMU_601 7637 || (env->mmu_model & POWERPC_MMU_64B); 7638 7639 ctx->fpu_enabled = !!msr_fp; 7640 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7641 ctx->spe_enabled = !!msr_spe; 7642 else 7643 ctx->spe_enabled = false; 7644 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7645 ctx->altivec_enabled = !!msr_vr; 7646 else 7647 ctx->altivec_enabled = false; 7648 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7649 ctx->vsx_enabled = !!msr_vsx; 7650 } else { 7651 ctx->vsx_enabled = false; 7652 } 7653 #if defined(TARGET_PPC64) 7654 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7655 ctx->tm_enabled = !!msr_tm; 7656 } else { 7657 ctx->tm_enabled = false; 7658 } 7659 #endif 7660 ctx->gtse = !!(env->spr[SPR_LPCR] & LPCR_GTSE); 7661 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7662 ctx->singlestep_enabled = CPU_SINGLE_STEP; 7663 else 7664 ctx->singlestep_enabled = 0; 7665 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7666 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7667 if ((env->flags & POWERPC_FLAG_DE) && msr_de) { 7668 ctx->singlestep_enabled = 0; 7669 target_ulong dbcr0 = env->spr[SPR_BOOKE_DBCR0]; 7670 if (dbcr0 & DBCR0_ICMP) { 7671 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 7672 } 7673 if (dbcr0 & DBCR0_BRT) { 7674 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7675 } 7676 7677 } 7678 if (unlikely(ctx->base.singlestep_enabled)) { 7679 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7680 } 7681 #if defined (DO_SINGLE_STEP) && 0 7682 /* Single step trace mode */ 7683 msr_se = 1; 7684 #endif 7685 7686 bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4; 7687 ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 7688 } 7689 7690 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 7691 { 7692 } 7693 7694 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 7695 { 7696 tcg_gen_insn_start(dcbase->pc_next); 7697 } 7698 7699 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs, 7700 const CPUBreakpoint *bp) 7701 { 7702 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7703 7704 gen_debug_exception(ctx); 7705 dcbase->is_jmp = DISAS_NORETURN; 7706 /* The address covered by the breakpoint must be included in 7707 [tb->pc, tb->pc + tb->size) in order to for it to be 7708 properly cleared -- thus we increment the PC here so that 7709 the logic setting tb->size below does the right thing. */ 7710 ctx->base.pc_next += 4; 7711 return true; 7712 } 7713 7714 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 7715 { 7716 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7717 CPUPPCState *env = cs->env_ptr; 7718 opc_handler_t **table, *handler; 7719 7720 LOG_DISAS("----------------\n"); 7721 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7722 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 7723 7724 if (unlikely(need_byteswap(ctx))) { 7725 ctx->opcode = bswap32(cpu_ldl_code(env, ctx->base.pc_next)); 7726 } else { 7727 ctx->opcode = cpu_ldl_code(env, ctx->base.pc_next); 7728 } 7729 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7730 ctx->opcode, opc1(ctx->opcode), opc2(ctx->opcode), 7731 opc3(ctx->opcode), opc4(ctx->opcode), 7732 ctx->le_mode ? "little" : "big"); 7733 ctx->base.pc_next += 4; 7734 table = env->opcodes; 7735 handler = table[opc1(ctx->opcode)]; 7736 if (is_indirect_opcode(handler)) { 7737 table = ind_table(handler); 7738 handler = table[opc2(ctx->opcode)]; 7739 if (is_indirect_opcode(handler)) { 7740 table = ind_table(handler); 7741 handler = table[opc3(ctx->opcode)]; 7742 if (is_indirect_opcode(handler)) { 7743 table = ind_table(handler); 7744 handler = table[opc4(ctx->opcode)]; 7745 } 7746 } 7747 } 7748 /* Is opcode *REALLY* valid ? */ 7749 if (unlikely(handler->handler == &gen_invalid)) { 7750 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7751 "%02x - %02x - %02x - %02x (%08x) " 7752 TARGET_FMT_lx " %d\n", 7753 opc1(ctx->opcode), opc2(ctx->opcode), 7754 opc3(ctx->opcode), opc4(ctx->opcode), 7755 ctx->opcode, ctx->base.pc_next - 4, (int)msr_ir); 7756 } else { 7757 uint32_t inval; 7758 7759 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 7760 && Rc(ctx->opcode))) { 7761 inval = handler->inval2; 7762 } else { 7763 inval = handler->inval1; 7764 } 7765 7766 if (unlikely((ctx->opcode & inval) != 0)) { 7767 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7768 "%02x - %02x - %02x - %02x (%08x) " 7769 TARGET_FMT_lx "\n", ctx->opcode & inval, 7770 opc1(ctx->opcode), opc2(ctx->opcode), 7771 opc3(ctx->opcode), opc4(ctx->opcode), 7772 ctx->opcode, ctx->base.pc_next - 4); 7773 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7774 ctx->base.is_jmp = DISAS_NORETURN; 7775 return; 7776 } 7777 } 7778 (*(handler->handler))(ctx); 7779 #if defined(DO_PPC_STATISTICS) 7780 handler->count++; 7781 #endif 7782 /* Check trace mode exceptions */ 7783 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP && 7784 (ctx->base.pc_next <= 0x100 || ctx->base.pc_next > 0xF00) && 7785 ctx->exception != POWERPC_SYSCALL && 7786 ctx->exception != POWERPC_EXCP_TRAP && 7787 ctx->exception != POWERPC_EXCP_BRANCH)) { 7788 uint32_t excp = gen_prep_dbgex(ctx); 7789 gen_exception_nip(ctx, excp, ctx->base.pc_next); 7790 } 7791 7792 if (tcg_check_temp_count()) { 7793 qemu_log("Opcode %02x %02x %02x %02x (%08x) leaked " 7794 "temporaries\n", opc1(ctx->opcode), opc2(ctx->opcode), 7795 opc3(ctx->opcode), opc4(ctx->opcode), ctx->opcode); 7796 } 7797 7798 ctx->base.is_jmp = ctx->exception == POWERPC_EXCP_NONE ? 7799 DISAS_NEXT : DISAS_NORETURN; 7800 } 7801 7802 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 7803 { 7804 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7805 7806 if (ctx->exception == POWERPC_EXCP_NONE) { 7807 gen_goto_tb(ctx, 0, ctx->base.pc_next); 7808 } else if (ctx->exception != POWERPC_EXCP_BRANCH) { 7809 if (unlikely(ctx->base.singlestep_enabled)) { 7810 gen_debug_exception(ctx); 7811 } 7812 /* Generate the return instruction */ 7813 tcg_gen_exit_tb(NULL, 0); 7814 } 7815 } 7816 7817 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 7818 { 7819 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 7820 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 7821 } 7822 7823 static const TranslatorOps ppc_tr_ops = { 7824 .init_disas_context = ppc_tr_init_disas_context, 7825 .tb_start = ppc_tr_tb_start, 7826 .insn_start = ppc_tr_insn_start, 7827 .breakpoint_check = ppc_tr_breakpoint_check, 7828 .translate_insn = ppc_tr_translate_insn, 7829 .tb_stop = ppc_tr_tb_stop, 7830 .disas_log = ppc_tr_disas_log, 7831 }; 7832 7833 void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb) 7834 { 7835 DisasContext ctx; 7836 7837 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb); 7838 } 7839 7840 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7841 target_ulong *data) 7842 { 7843 env->nip = data[0]; 7844 } 7845