1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "qemu/host-utils.h" 28 #include "exec/cpu_ldst.h" 29 30 #include "exec/helper-proto.h" 31 #include "exec/helper-gen.h" 32 33 #include "trace-tcg.h" 34 #include "exec/log.h" 35 36 37 #define CPU_SINGLE_STEP 0x1 38 #define CPU_BRANCH_STEP 0x2 39 #define GDBSTUB_SINGLE_STEP 0x4 40 41 /* Include definitions for instructions classes and implementations flags */ 42 //#define PPC_DEBUG_DISAS 43 //#define DO_PPC_STATISTICS 44 45 #ifdef PPC_DEBUG_DISAS 46 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 47 #else 48 # define LOG_DISAS(...) do { } while (0) 49 #endif 50 /*****************************************************************************/ 51 /* Code translation helpers */ 52 53 /* global register indexes */ 54 static TCGv_env cpu_env; 55 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 56 + 10*4 + 22*5 /* SPE GPRh */ 57 + 10*4 + 22*5 /* FPR */ 58 + 2*(10*6 + 22*7) /* AVRh, AVRl */ 59 + 10*5 + 22*6 /* VSR */ 60 + 8*5 /* CRF */]; 61 static TCGv cpu_gpr[32]; 62 static TCGv cpu_gprh[32]; 63 static TCGv_i64 cpu_fpr[32]; 64 static TCGv_i64 cpu_avrh[32], cpu_avrl[32]; 65 static TCGv_i64 cpu_vsr[32]; 66 static TCGv_i32 cpu_crf[8]; 67 static TCGv cpu_nip; 68 static TCGv cpu_msr; 69 static TCGv cpu_ctr; 70 static TCGv cpu_lr; 71 #if defined(TARGET_PPC64) 72 static TCGv cpu_cfar; 73 #endif 74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 75 static TCGv cpu_reserve; 76 static TCGv cpu_fpscr; 77 static TCGv_i32 cpu_access_type; 78 79 #include "exec/gen-icount.h" 80 81 void ppc_translate_init(void) 82 { 83 int i; 84 char* p; 85 size_t cpu_reg_names_size; 86 static int done_init = 0; 87 88 if (done_init) 89 return; 90 91 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env"); 92 tcg_ctx.tcg_env = cpu_env; 93 94 p = cpu_reg_names; 95 cpu_reg_names_size = sizeof(cpu_reg_names); 96 97 for (i = 0; i < 8; i++) { 98 snprintf(p, cpu_reg_names_size, "crf%d", i); 99 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 100 offsetof(CPUPPCState, crf[i]), p); 101 p += 5; 102 cpu_reg_names_size -= 5; 103 } 104 105 for (i = 0; i < 32; i++) { 106 snprintf(p, cpu_reg_names_size, "r%d", i); 107 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 108 offsetof(CPUPPCState, gpr[i]), p); 109 p += (i < 10) ? 3 : 4; 110 cpu_reg_names_size -= (i < 10) ? 3 : 4; 111 snprintf(p, cpu_reg_names_size, "r%dH", i); 112 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 113 offsetof(CPUPPCState, gprh[i]), p); 114 p += (i < 10) ? 4 : 5; 115 cpu_reg_names_size -= (i < 10) ? 4 : 5; 116 117 snprintf(p, cpu_reg_names_size, "fp%d", i); 118 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env, 119 offsetof(CPUPPCState, fpr[i]), p); 120 p += (i < 10) ? 4 : 5; 121 cpu_reg_names_size -= (i < 10) ? 4 : 5; 122 123 snprintf(p, cpu_reg_names_size, "avr%dH", i); 124 #ifdef HOST_WORDS_BIGENDIAN 125 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 126 offsetof(CPUPPCState, avr[i].u64[0]), p); 127 #else 128 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 129 offsetof(CPUPPCState, avr[i].u64[1]), p); 130 #endif 131 p += (i < 10) ? 6 : 7; 132 cpu_reg_names_size -= (i < 10) ? 6 : 7; 133 134 snprintf(p, cpu_reg_names_size, "avr%dL", i); 135 #ifdef HOST_WORDS_BIGENDIAN 136 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 137 offsetof(CPUPPCState, avr[i].u64[1]), p); 138 #else 139 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 140 offsetof(CPUPPCState, avr[i].u64[0]), p); 141 #endif 142 p += (i < 10) ? 6 : 7; 143 cpu_reg_names_size -= (i < 10) ? 6 : 7; 144 snprintf(p, cpu_reg_names_size, "vsr%d", i); 145 cpu_vsr[i] = tcg_global_mem_new_i64(cpu_env, 146 offsetof(CPUPPCState, vsr[i]), p); 147 p += (i < 10) ? 5 : 6; 148 cpu_reg_names_size -= (i < 10) ? 5 : 6; 149 } 150 151 cpu_nip = tcg_global_mem_new(cpu_env, 152 offsetof(CPUPPCState, nip), "nip"); 153 154 cpu_msr = tcg_global_mem_new(cpu_env, 155 offsetof(CPUPPCState, msr), "msr"); 156 157 cpu_ctr = tcg_global_mem_new(cpu_env, 158 offsetof(CPUPPCState, ctr), "ctr"); 159 160 cpu_lr = tcg_global_mem_new(cpu_env, 161 offsetof(CPUPPCState, lr), "lr"); 162 163 #if defined(TARGET_PPC64) 164 cpu_cfar = tcg_global_mem_new(cpu_env, 165 offsetof(CPUPPCState, cfar), "cfar"); 166 #endif 167 168 cpu_xer = tcg_global_mem_new(cpu_env, 169 offsetof(CPUPPCState, xer), "xer"); 170 cpu_so = tcg_global_mem_new(cpu_env, 171 offsetof(CPUPPCState, so), "SO"); 172 cpu_ov = tcg_global_mem_new(cpu_env, 173 offsetof(CPUPPCState, ov), "OV"); 174 cpu_ca = tcg_global_mem_new(cpu_env, 175 offsetof(CPUPPCState, ca), "CA"); 176 cpu_ov32 = tcg_global_mem_new(cpu_env, 177 offsetof(CPUPPCState, ov32), "OV32"); 178 cpu_ca32 = tcg_global_mem_new(cpu_env, 179 offsetof(CPUPPCState, ca32), "CA32"); 180 181 cpu_reserve = tcg_global_mem_new(cpu_env, 182 offsetof(CPUPPCState, reserve_addr), 183 "reserve_addr"); 184 185 cpu_fpscr = tcg_global_mem_new(cpu_env, 186 offsetof(CPUPPCState, fpscr), "fpscr"); 187 188 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 189 offsetof(CPUPPCState, access_type), "access_type"); 190 191 done_init = 1; 192 } 193 194 /* internal defines */ 195 struct DisasContext { 196 struct TranslationBlock *tb; 197 target_ulong nip; 198 uint32_t opcode; 199 uint32_t exception; 200 /* Routine used to access memory */ 201 bool pr, hv, dr, le_mode; 202 bool lazy_tlb_flush; 203 bool need_access_type; 204 int mem_idx; 205 int access_type; 206 /* Translation flags */ 207 TCGMemOp default_tcg_memop_mask; 208 #if defined(TARGET_PPC64) 209 bool sf_mode; 210 bool has_cfar; 211 #endif 212 bool fpu_enabled; 213 bool altivec_enabled; 214 bool vsx_enabled; 215 bool spe_enabled; 216 bool tm_enabled; 217 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 218 int singlestep_enabled; 219 uint64_t insns_flags; 220 uint64_t insns_flags2; 221 }; 222 223 /* Return true iff byteswap is needed in a scalar memop */ 224 static inline bool need_byteswap(const DisasContext *ctx) 225 { 226 #if defined(TARGET_WORDS_BIGENDIAN) 227 return ctx->le_mode; 228 #else 229 return !ctx->le_mode; 230 #endif 231 } 232 233 /* True when active word size < size of target_long. */ 234 #ifdef TARGET_PPC64 235 # define NARROW_MODE(C) (!(C)->sf_mode) 236 #else 237 # define NARROW_MODE(C) 0 238 #endif 239 240 struct opc_handler_t { 241 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 242 uint32_t inval1; 243 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 244 uint32_t inval2; 245 /* instruction type */ 246 uint64_t type; 247 /* extended instruction type */ 248 uint64_t type2; 249 /* handler */ 250 void (*handler)(DisasContext *ctx); 251 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 252 const char *oname; 253 #endif 254 #if defined(DO_PPC_STATISTICS) 255 uint64_t count; 256 #endif 257 }; 258 259 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 260 { 261 if (ctx->need_access_type && ctx->access_type != access_type) { 262 tcg_gen_movi_i32(cpu_access_type, access_type); 263 ctx->access_type = access_type; 264 } 265 } 266 267 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 268 { 269 if (NARROW_MODE(ctx)) { 270 nip = (uint32_t)nip; 271 } 272 tcg_gen_movi_tl(cpu_nip, nip); 273 } 274 275 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 276 { 277 TCGv_i32 t0, t1; 278 279 /* These are all synchronous exceptions, we set the PC back to 280 * the faulting instruction 281 */ 282 if (ctx->exception == POWERPC_EXCP_NONE) { 283 gen_update_nip(ctx, ctx->nip - 4); 284 } 285 t0 = tcg_const_i32(excp); 286 t1 = tcg_const_i32(error); 287 gen_helper_raise_exception_err(cpu_env, t0, t1); 288 tcg_temp_free_i32(t0); 289 tcg_temp_free_i32(t1); 290 ctx->exception = (excp); 291 } 292 293 static void gen_exception(DisasContext *ctx, uint32_t excp) 294 { 295 TCGv_i32 t0; 296 297 /* These are all synchronous exceptions, we set the PC back to 298 * the faulting instruction 299 */ 300 if (ctx->exception == POWERPC_EXCP_NONE) { 301 gen_update_nip(ctx, ctx->nip - 4); 302 } 303 t0 = tcg_const_i32(excp); 304 gen_helper_raise_exception(cpu_env, t0); 305 tcg_temp_free_i32(t0); 306 ctx->exception = (excp); 307 } 308 309 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 310 target_ulong nip) 311 { 312 TCGv_i32 t0; 313 314 gen_update_nip(ctx, nip); 315 t0 = tcg_const_i32(excp); 316 gen_helper_raise_exception(cpu_env, t0); 317 tcg_temp_free_i32(t0); 318 ctx->exception = (excp); 319 } 320 321 static void gen_debug_exception(DisasContext *ctx) 322 { 323 TCGv_i32 t0; 324 325 /* These are all synchronous exceptions, we set the PC back to 326 * the faulting instruction 327 */ 328 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 329 (ctx->exception != POWERPC_EXCP_SYNC)) { 330 gen_update_nip(ctx, ctx->nip); 331 } 332 t0 = tcg_const_i32(EXCP_DEBUG); 333 gen_helper_raise_exception(cpu_env, t0); 334 tcg_temp_free_i32(t0); 335 } 336 337 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 338 { 339 /* Will be converted to program check if needed */ 340 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 341 } 342 343 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 344 { 345 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 346 } 347 348 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 349 { 350 /* Will be converted to program check if needed */ 351 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 352 } 353 354 /* Stop translation */ 355 static inline void gen_stop_exception(DisasContext *ctx) 356 { 357 gen_update_nip(ctx, ctx->nip); 358 ctx->exception = POWERPC_EXCP_STOP; 359 } 360 361 #ifndef CONFIG_USER_ONLY 362 /* No need to update nip here, as execution flow will change */ 363 static inline void gen_sync_exception(DisasContext *ctx) 364 { 365 ctx->exception = POWERPC_EXCP_SYNC; 366 } 367 #endif 368 369 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 370 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 371 372 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 373 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 374 375 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 376 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 377 378 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 379 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 380 381 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 382 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 383 384 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 385 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 386 387 typedef struct opcode_t { 388 unsigned char opc1, opc2, opc3, opc4; 389 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 390 unsigned char pad[4]; 391 #endif 392 opc_handler_t handler; 393 const char *oname; 394 } opcode_t; 395 396 /* Helpers for priv. check */ 397 #define GEN_PRIV \ 398 do { \ 399 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 400 } while (0) 401 402 #if defined(CONFIG_USER_ONLY) 403 #define CHK_HV GEN_PRIV 404 #define CHK_SV GEN_PRIV 405 #define CHK_HVRM GEN_PRIV 406 #else 407 #define CHK_HV \ 408 do { \ 409 if (unlikely(ctx->pr || !ctx->hv)) { \ 410 GEN_PRIV; \ 411 } \ 412 } while (0) 413 #define CHK_SV \ 414 do { \ 415 if (unlikely(ctx->pr)) { \ 416 GEN_PRIV; \ 417 } \ 418 } while (0) 419 #define CHK_HVRM \ 420 do { \ 421 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 422 GEN_PRIV; \ 423 } \ 424 } while (0) 425 #endif 426 427 #define CHK_NONE 428 429 /*****************************************************************************/ 430 /* PowerPC instructions table */ 431 432 #if defined(DO_PPC_STATISTICS) 433 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 434 { \ 435 .opc1 = op1, \ 436 .opc2 = op2, \ 437 .opc3 = op3, \ 438 .opc4 = 0xff, \ 439 .handler = { \ 440 .inval1 = invl, \ 441 .type = _typ, \ 442 .type2 = _typ2, \ 443 .handler = &gen_##name, \ 444 .oname = stringify(name), \ 445 }, \ 446 .oname = stringify(name), \ 447 } 448 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 449 { \ 450 .opc1 = op1, \ 451 .opc2 = op2, \ 452 .opc3 = op3, \ 453 .opc4 = 0xff, \ 454 .handler = { \ 455 .inval1 = invl1, \ 456 .inval2 = invl2, \ 457 .type = _typ, \ 458 .type2 = _typ2, \ 459 .handler = &gen_##name, \ 460 .oname = stringify(name), \ 461 }, \ 462 .oname = stringify(name), \ 463 } 464 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 465 { \ 466 .opc1 = op1, \ 467 .opc2 = op2, \ 468 .opc3 = op3, \ 469 .opc4 = 0xff, \ 470 .handler = { \ 471 .inval1 = invl, \ 472 .type = _typ, \ 473 .type2 = _typ2, \ 474 .handler = &gen_##name, \ 475 .oname = onam, \ 476 }, \ 477 .oname = onam, \ 478 } 479 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 480 { \ 481 .opc1 = op1, \ 482 .opc2 = op2, \ 483 .opc3 = op3, \ 484 .opc4 = op4, \ 485 .handler = { \ 486 .inval1 = invl, \ 487 .type = _typ, \ 488 .type2 = _typ2, \ 489 .handler = &gen_##name, \ 490 .oname = stringify(name), \ 491 }, \ 492 .oname = stringify(name), \ 493 } 494 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 495 { \ 496 .opc1 = op1, \ 497 .opc2 = op2, \ 498 .opc3 = op3, \ 499 .opc4 = op4, \ 500 .handler = { \ 501 .inval1 = invl, \ 502 .type = _typ, \ 503 .type2 = _typ2, \ 504 .handler = &gen_##name, \ 505 .oname = onam, \ 506 }, \ 507 .oname = onam, \ 508 } 509 #else 510 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 511 { \ 512 .opc1 = op1, \ 513 .opc2 = op2, \ 514 .opc3 = op3, \ 515 .opc4 = 0xff, \ 516 .handler = { \ 517 .inval1 = invl, \ 518 .type = _typ, \ 519 .type2 = _typ2, \ 520 .handler = &gen_##name, \ 521 }, \ 522 .oname = stringify(name), \ 523 } 524 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 525 { \ 526 .opc1 = op1, \ 527 .opc2 = op2, \ 528 .opc3 = op3, \ 529 .opc4 = 0xff, \ 530 .handler = { \ 531 .inval1 = invl1, \ 532 .inval2 = invl2, \ 533 .type = _typ, \ 534 .type2 = _typ2, \ 535 .handler = &gen_##name, \ 536 }, \ 537 .oname = stringify(name), \ 538 } 539 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 540 { \ 541 .opc1 = op1, \ 542 .opc2 = op2, \ 543 .opc3 = op3, \ 544 .opc4 = 0xff, \ 545 .handler = { \ 546 .inval1 = invl, \ 547 .type = _typ, \ 548 .type2 = _typ2, \ 549 .handler = &gen_##name, \ 550 }, \ 551 .oname = onam, \ 552 } 553 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 554 { \ 555 .opc1 = op1, \ 556 .opc2 = op2, \ 557 .opc3 = op3, \ 558 .opc4 = op4, \ 559 .handler = { \ 560 .inval1 = invl, \ 561 .type = _typ, \ 562 .type2 = _typ2, \ 563 .handler = &gen_##name, \ 564 }, \ 565 .oname = stringify(name), \ 566 } 567 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 568 { \ 569 .opc1 = op1, \ 570 .opc2 = op2, \ 571 .opc3 = op3, \ 572 .opc4 = op4, \ 573 .handler = { \ 574 .inval1 = invl, \ 575 .type = _typ, \ 576 .type2 = _typ2, \ 577 .handler = &gen_##name, \ 578 }, \ 579 .oname = onam, \ 580 } 581 #endif 582 583 /* SPR load/store helpers */ 584 static inline void gen_load_spr(TCGv t, int reg) 585 { 586 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 587 } 588 589 static inline void gen_store_spr(int reg, TCGv t) 590 { 591 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 592 } 593 594 /* Invalid instruction */ 595 static void gen_invalid(DisasContext *ctx) 596 { 597 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 598 } 599 600 static opc_handler_t invalid_handler = { 601 .inval1 = 0xFFFFFFFF, 602 .inval2 = 0xFFFFFFFF, 603 .type = PPC_NONE, 604 .type2 = PPC_NONE, 605 .handler = gen_invalid, 606 }; 607 608 /*** Integer comparison ***/ 609 610 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 611 { 612 TCGv t0 = tcg_temp_new(); 613 TCGv_i32 t1 = tcg_temp_new_i32(); 614 615 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 616 617 tcg_gen_setcond_tl((s ? TCG_COND_LT: TCG_COND_LTU), t0, arg0, arg1); 618 tcg_gen_trunc_tl_i32(t1, t0); 619 tcg_gen_shli_i32(t1, t1, CRF_LT_BIT); 620 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 621 622 tcg_gen_setcond_tl((s ? TCG_COND_GT: TCG_COND_GTU), t0, arg0, arg1); 623 tcg_gen_trunc_tl_i32(t1, t0); 624 tcg_gen_shli_i32(t1, t1, CRF_GT_BIT); 625 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 626 627 tcg_gen_setcond_tl(TCG_COND_EQ, t0, arg0, arg1); 628 tcg_gen_trunc_tl_i32(t1, t0); 629 tcg_gen_shli_i32(t1, t1, CRF_EQ_BIT); 630 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 631 632 tcg_temp_free(t0); 633 tcg_temp_free_i32(t1); 634 } 635 636 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 637 { 638 TCGv t0 = tcg_const_tl(arg1); 639 gen_op_cmp(arg0, t0, s, crf); 640 tcg_temp_free(t0); 641 } 642 643 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 644 { 645 TCGv t0, t1; 646 t0 = tcg_temp_new(); 647 t1 = tcg_temp_new(); 648 if (s) { 649 tcg_gen_ext32s_tl(t0, arg0); 650 tcg_gen_ext32s_tl(t1, arg1); 651 } else { 652 tcg_gen_ext32u_tl(t0, arg0); 653 tcg_gen_ext32u_tl(t1, arg1); 654 } 655 gen_op_cmp(t0, t1, s, crf); 656 tcg_temp_free(t1); 657 tcg_temp_free(t0); 658 } 659 660 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 661 { 662 TCGv t0 = tcg_const_tl(arg1); 663 gen_op_cmp32(arg0, t0, s, crf); 664 tcg_temp_free(t0); 665 } 666 667 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 668 { 669 if (NARROW_MODE(ctx)) { 670 gen_op_cmpi32(reg, 0, 1, 0); 671 } else { 672 gen_op_cmpi(reg, 0, 1, 0); 673 } 674 } 675 676 /* cmp */ 677 static void gen_cmp(DisasContext *ctx) 678 { 679 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 680 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 681 1, crfD(ctx->opcode)); 682 } else { 683 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 684 1, crfD(ctx->opcode)); 685 } 686 } 687 688 /* cmpi */ 689 static void gen_cmpi(DisasContext *ctx) 690 { 691 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 692 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 693 1, crfD(ctx->opcode)); 694 } else { 695 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 696 1, crfD(ctx->opcode)); 697 } 698 } 699 700 /* cmpl */ 701 static void gen_cmpl(DisasContext *ctx) 702 { 703 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 704 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 705 0, crfD(ctx->opcode)); 706 } else { 707 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 708 0, crfD(ctx->opcode)); 709 } 710 } 711 712 /* cmpli */ 713 static void gen_cmpli(DisasContext *ctx) 714 { 715 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 716 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 717 0, crfD(ctx->opcode)); 718 } else { 719 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 720 0, crfD(ctx->opcode)); 721 } 722 } 723 724 /* cmprb - range comparison: isupper, isaplha, islower*/ 725 static void gen_cmprb(DisasContext *ctx) 726 { 727 TCGv_i32 src1 = tcg_temp_new_i32(); 728 TCGv_i32 src2 = tcg_temp_new_i32(); 729 TCGv_i32 src2lo = tcg_temp_new_i32(); 730 TCGv_i32 src2hi = tcg_temp_new_i32(); 731 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 732 733 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 734 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 735 736 tcg_gen_andi_i32(src1, src1, 0xFF); 737 tcg_gen_ext8u_i32(src2lo, src2); 738 tcg_gen_shri_i32(src2, src2, 8); 739 tcg_gen_ext8u_i32(src2hi, src2); 740 741 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 742 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 743 tcg_gen_and_i32(crf, src2lo, src2hi); 744 745 if (ctx->opcode & 0x00200000) { 746 tcg_gen_shri_i32(src2, src2, 8); 747 tcg_gen_ext8u_i32(src2lo, src2); 748 tcg_gen_shri_i32(src2, src2, 8); 749 tcg_gen_ext8u_i32(src2hi, src2); 750 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 751 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 752 tcg_gen_and_i32(src2lo, src2lo, src2hi); 753 tcg_gen_or_i32(crf, crf, src2lo); 754 } 755 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 756 tcg_temp_free_i32(src1); 757 tcg_temp_free_i32(src2); 758 tcg_temp_free_i32(src2lo); 759 tcg_temp_free_i32(src2hi); 760 } 761 762 #if defined(TARGET_PPC64) 763 /* cmpeqb */ 764 static void gen_cmpeqb(DisasContext *ctx) 765 { 766 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 767 cpu_gpr[rB(ctx->opcode)]); 768 } 769 #endif 770 771 /* isel (PowerPC 2.03 specification) */ 772 static void gen_isel(DisasContext *ctx) 773 { 774 uint32_t bi = rC(ctx->opcode); 775 uint32_t mask = 0x08 >> (bi & 0x03); 776 TCGv t0 = tcg_temp_new(); 777 TCGv zr; 778 779 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 780 tcg_gen_andi_tl(t0, t0, mask); 781 782 zr = tcg_const_tl(0); 783 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 784 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 785 cpu_gpr[rB(ctx->opcode)]); 786 tcg_temp_free(zr); 787 tcg_temp_free(t0); 788 } 789 790 /* cmpb: PowerPC 2.05 specification */ 791 static void gen_cmpb(DisasContext *ctx) 792 { 793 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 794 cpu_gpr[rB(ctx->opcode)]); 795 } 796 797 /*** Integer arithmetic ***/ 798 799 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 800 TCGv arg1, TCGv arg2, int sub) 801 { 802 TCGv t0 = tcg_temp_new(); 803 804 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 805 tcg_gen_xor_tl(t0, arg1, arg2); 806 if (sub) { 807 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 808 } else { 809 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 810 } 811 tcg_temp_free(t0); 812 if (NARROW_MODE(ctx)) { 813 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 814 if (is_isa300(ctx)) { 815 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 816 } 817 } else { 818 if (is_isa300(ctx)) { 819 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 820 } 821 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 822 } 823 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 824 } 825 826 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 827 TCGv res, TCGv arg0, TCGv arg1, 828 int sub) 829 { 830 TCGv t0; 831 832 if (!is_isa300(ctx)) { 833 return; 834 } 835 836 t0 = tcg_temp_new(); 837 if (sub) { 838 tcg_gen_eqv_tl(t0, arg0, arg1); 839 } else { 840 tcg_gen_xor_tl(t0, arg0, arg1); 841 } 842 tcg_gen_xor_tl(t0, t0, res); 843 tcg_gen_extract_tl(cpu_ca32, t0, 32, 1); 844 tcg_temp_free(t0); 845 } 846 847 /* Common add function */ 848 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 849 TCGv arg2, bool add_ca, bool compute_ca, 850 bool compute_ov, bool compute_rc0) 851 { 852 TCGv t0 = ret; 853 854 if (compute_ca || compute_ov) { 855 t0 = tcg_temp_new(); 856 } 857 858 if (compute_ca) { 859 if (NARROW_MODE(ctx)) { 860 /* Caution: a non-obvious corner case of the spec is that we 861 must produce the *entire* 64-bit addition, but produce the 862 carry into bit 32. */ 863 TCGv t1 = tcg_temp_new(); 864 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 865 tcg_gen_add_tl(t0, arg1, arg2); 866 if (add_ca) { 867 tcg_gen_add_tl(t0, t0, cpu_ca); 868 } 869 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */ 870 tcg_temp_free(t1); 871 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */ 872 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1); 873 if (is_isa300(ctx)) { 874 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 875 } 876 } else { 877 TCGv zero = tcg_const_tl(0); 878 if (add_ca) { 879 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero); 880 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero); 881 } else { 882 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero); 883 } 884 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, 0); 885 tcg_temp_free(zero); 886 } 887 } else { 888 tcg_gen_add_tl(t0, arg1, arg2); 889 if (add_ca) { 890 tcg_gen_add_tl(t0, t0, cpu_ca); 891 } 892 } 893 894 if (compute_ov) { 895 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 896 } 897 if (unlikely(compute_rc0)) { 898 gen_set_Rc0(ctx, t0); 899 } 900 901 if (!TCGV_EQUAL(t0, ret)) { 902 tcg_gen_mov_tl(ret, t0); 903 tcg_temp_free(t0); 904 } 905 } 906 /* Add functions with two operands */ 907 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 908 static void glue(gen_, name)(DisasContext *ctx) \ 909 { \ 910 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 911 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 912 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 913 } 914 /* Add functions with one operand and one immediate */ 915 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 916 add_ca, compute_ca, compute_ov) \ 917 static void glue(gen_, name)(DisasContext *ctx) \ 918 { \ 919 TCGv t0 = tcg_const_tl(const_val); \ 920 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 921 cpu_gpr[rA(ctx->opcode)], t0, \ 922 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 923 tcg_temp_free(t0); \ 924 } 925 926 /* add add. addo addo. */ 927 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 928 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 929 /* addc addc. addco addco. */ 930 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 931 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 932 /* adde adde. addeo addeo. */ 933 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 934 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 935 /* addme addme. addmeo addmeo. */ 936 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 937 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 938 /* addze addze. addzeo addzeo.*/ 939 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 940 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 941 /* addi */ 942 static void gen_addi(DisasContext *ctx) 943 { 944 target_long simm = SIMM(ctx->opcode); 945 946 if (rA(ctx->opcode) == 0) { 947 /* li case */ 948 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 949 } else { 950 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 951 cpu_gpr[rA(ctx->opcode)], simm); 952 } 953 } 954 /* addic addic.*/ 955 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 956 { 957 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 958 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 959 c, 0, 1, 0, compute_rc0); 960 tcg_temp_free(c); 961 } 962 963 static void gen_addic(DisasContext *ctx) 964 { 965 gen_op_addic(ctx, 0); 966 } 967 968 static void gen_addic_(DisasContext *ctx) 969 { 970 gen_op_addic(ctx, 1); 971 } 972 973 /* addis */ 974 static void gen_addis(DisasContext *ctx) 975 { 976 target_long simm = SIMM(ctx->opcode); 977 978 if (rA(ctx->opcode) == 0) { 979 /* lis case */ 980 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 981 } else { 982 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 983 cpu_gpr[rA(ctx->opcode)], simm << 16); 984 } 985 } 986 987 /* addpcis */ 988 static void gen_addpcis(DisasContext *ctx) 989 { 990 target_long d = DX(ctx->opcode); 991 992 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->nip + (d << 16)); 993 } 994 995 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 996 TCGv arg2, int sign, int compute_ov) 997 { 998 TCGv_i32 t0 = tcg_temp_new_i32(); 999 TCGv_i32 t1 = tcg_temp_new_i32(); 1000 TCGv_i32 t2 = tcg_temp_new_i32(); 1001 TCGv_i32 t3 = tcg_temp_new_i32(); 1002 1003 tcg_gen_trunc_tl_i32(t0, arg1); 1004 tcg_gen_trunc_tl_i32(t1, arg2); 1005 if (sign) { 1006 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1007 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1008 tcg_gen_and_i32(t2, t2, t3); 1009 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1010 tcg_gen_or_i32(t2, t2, t3); 1011 tcg_gen_movi_i32(t3, 0); 1012 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1013 tcg_gen_div_i32(t3, t0, t1); 1014 tcg_gen_extu_i32_tl(ret, t3); 1015 } else { 1016 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1017 tcg_gen_movi_i32(t3, 0); 1018 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1019 tcg_gen_divu_i32(t3, t0, t1); 1020 tcg_gen_extu_i32_tl(ret, t3); 1021 } 1022 if (compute_ov) { 1023 tcg_gen_extu_i32_tl(cpu_ov, t2); 1024 if (is_isa300(ctx)) { 1025 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1026 } 1027 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1028 } 1029 tcg_temp_free_i32(t0); 1030 tcg_temp_free_i32(t1); 1031 tcg_temp_free_i32(t2); 1032 tcg_temp_free_i32(t3); 1033 1034 if (unlikely(Rc(ctx->opcode) != 0)) 1035 gen_set_Rc0(ctx, ret); 1036 } 1037 /* Div functions */ 1038 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1039 static void glue(gen_, name)(DisasContext *ctx) \ 1040 { \ 1041 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1042 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1043 sign, compute_ov); \ 1044 } 1045 /* divwu divwu. divwuo divwuo. */ 1046 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1047 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1048 /* divw divw. divwo divwo. */ 1049 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1050 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1051 1052 /* div[wd]eu[o][.] */ 1053 #define GEN_DIVE(name, hlpr, compute_ov) \ 1054 static void gen_##name(DisasContext *ctx) \ 1055 { \ 1056 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1057 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1058 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1059 tcg_temp_free_i32(t0); \ 1060 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1061 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1062 } \ 1063 } 1064 1065 GEN_DIVE(divweu, divweu, 0); 1066 GEN_DIVE(divweuo, divweu, 1); 1067 GEN_DIVE(divwe, divwe, 0); 1068 GEN_DIVE(divweo, divwe, 1); 1069 1070 #if defined(TARGET_PPC64) 1071 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1072 TCGv arg2, int sign, int compute_ov) 1073 { 1074 TCGv_i64 t0 = tcg_temp_new_i64(); 1075 TCGv_i64 t1 = tcg_temp_new_i64(); 1076 TCGv_i64 t2 = tcg_temp_new_i64(); 1077 TCGv_i64 t3 = tcg_temp_new_i64(); 1078 1079 tcg_gen_mov_i64(t0, arg1); 1080 tcg_gen_mov_i64(t1, arg2); 1081 if (sign) { 1082 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1083 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1084 tcg_gen_and_i64(t2, t2, t3); 1085 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1086 tcg_gen_or_i64(t2, t2, t3); 1087 tcg_gen_movi_i64(t3, 0); 1088 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1089 tcg_gen_div_i64(ret, t0, t1); 1090 } else { 1091 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1092 tcg_gen_movi_i64(t3, 0); 1093 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1094 tcg_gen_divu_i64(ret, t0, t1); 1095 } 1096 if (compute_ov) { 1097 tcg_gen_mov_tl(cpu_ov, t2); 1098 if (is_isa300(ctx)) { 1099 tcg_gen_mov_tl(cpu_ov32, t2); 1100 } 1101 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1102 } 1103 tcg_temp_free_i64(t0); 1104 tcg_temp_free_i64(t1); 1105 tcg_temp_free_i64(t2); 1106 tcg_temp_free_i64(t3); 1107 1108 if (unlikely(Rc(ctx->opcode) != 0)) 1109 gen_set_Rc0(ctx, ret); 1110 } 1111 1112 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1113 static void glue(gen_, name)(DisasContext *ctx) \ 1114 { \ 1115 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1116 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1117 sign, compute_ov); \ 1118 } 1119 /* divdu divdu. divduo divduo. */ 1120 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1121 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1122 /* divd divd. divdo divdo. */ 1123 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1124 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1125 1126 GEN_DIVE(divdeu, divdeu, 0); 1127 GEN_DIVE(divdeuo, divdeu, 1); 1128 GEN_DIVE(divde, divde, 0); 1129 GEN_DIVE(divdeo, divde, 1); 1130 #endif 1131 1132 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1133 TCGv arg2, int sign) 1134 { 1135 TCGv_i32 t0 = tcg_temp_new_i32(); 1136 TCGv_i32 t1 = tcg_temp_new_i32(); 1137 1138 tcg_gen_trunc_tl_i32(t0, arg1); 1139 tcg_gen_trunc_tl_i32(t1, arg2); 1140 if (sign) { 1141 TCGv_i32 t2 = tcg_temp_new_i32(); 1142 TCGv_i32 t3 = tcg_temp_new_i32(); 1143 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1144 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1145 tcg_gen_and_i32(t2, t2, t3); 1146 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1147 tcg_gen_or_i32(t2, t2, t3); 1148 tcg_gen_movi_i32(t3, 0); 1149 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1150 tcg_gen_rem_i32(t3, t0, t1); 1151 tcg_gen_ext_i32_tl(ret, t3); 1152 tcg_temp_free_i32(t2); 1153 tcg_temp_free_i32(t3); 1154 } else { 1155 TCGv_i32 t2 = tcg_const_i32(1); 1156 TCGv_i32 t3 = tcg_const_i32(0); 1157 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1158 tcg_gen_remu_i32(t3, t0, t1); 1159 tcg_gen_extu_i32_tl(ret, t3); 1160 tcg_temp_free_i32(t2); 1161 tcg_temp_free_i32(t3); 1162 } 1163 tcg_temp_free_i32(t0); 1164 tcg_temp_free_i32(t1); 1165 } 1166 1167 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1168 static void glue(gen_, name)(DisasContext *ctx) \ 1169 { \ 1170 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1171 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1172 sign); \ 1173 } 1174 1175 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1176 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1177 1178 #if defined(TARGET_PPC64) 1179 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1180 TCGv arg2, int sign) 1181 { 1182 TCGv_i64 t0 = tcg_temp_new_i64(); 1183 TCGv_i64 t1 = tcg_temp_new_i64(); 1184 1185 tcg_gen_mov_i64(t0, arg1); 1186 tcg_gen_mov_i64(t1, arg2); 1187 if (sign) { 1188 TCGv_i64 t2 = tcg_temp_new_i64(); 1189 TCGv_i64 t3 = tcg_temp_new_i64(); 1190 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1191 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1192 tcg_gen_and_i64(t2, t2, t3); 1193 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1194 tcg_gen_or_i64(t2, t2, t3); 1195 tcg_gen_movi_i64(t3, 0); 1196 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1197 tcg_gen_rem_i64(ret, t0, t1); 1198 tcg_temp_free_i64(t2); 1199 tcg_temp_free_i64(t3); 1200 } else { 1201 TCGv_i64 t2 = tcg_const_i64(1); 1202 TCGv_i64 t3 = tcg_const_i64(0); 1203 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1204 tcg_gen_remu_i64(ret, t0, t1); 1205 tcg_temp_free_i64(t2); 1206 tcg_temp_free_i64(t3); 1207 } 1208 tcg_temp_free_i64(t0); 1209 tcg_temp_free_i64(t1); 1210 } 1211 1212 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1213 static void glue(gen_, name)(DisasContext *ctx) \ 1214 { \ 1215 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1216 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1217 sign); \ 1218 } 1219 1220 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1221 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1222 #endif 1223 1224 /* mulhw mulhw. */ 1225 static void gen_mulhw(DisasContext *ctx) 1226 { 1227 TCGv_i32 t0 = tcg_temp_new_i32(); 1228 TCGv_i32 t1 = tcg_temp_new_i32(); 1229 1230 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1231 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1232 tcg_gen_muls2_i32(t0, t1, t0, t1); 1233 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1234 tcg_temp_free_i32(t0); 1235 tcg_temp_free_i32(t1); 1236 if (unlikely(Rc(ctx->opcode) != 0)) 1237 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1238 } 1239 1240 /* mulhwu mulhwu. */ 1241 static void gen_mulhwu(DisasContext *ctx) 1242 { 1243 TCGv_i32 t0 = tcg_temp_new_i32(); 1244 TCGv_i32 t1 = tcg_temp_new_i32(); 1245 1246 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1247 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1248 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1249 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1250 tcg_temp_free_i32(t0); 1251 tcg_temp_free_i32(t1); 1252 if (unlikely(Rc(ctx->opcode) != 0)) 1253 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1254 } 1255 1256 /* mullw mullw. */ 1257 static void gen_mullw(DisasContext *ctx) 1258 { 1259 #if defined(TARGET_PPC64) 1260 TCGv_i64 t0, t1; 1261 t0 = tcg_temp_new_i64(); 1262 t1 = tcg_temp_new_i64(); 1263 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1264 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1265 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1266 tcg_temp_free(t0); 1267 tcg_temp_free(t1); 1268 #else 1269 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1270 cpu_gpr[rB(ctx->opcode)]); 1271 #endif 1272 if (unlikely(Rc(ctx->opcode) != 0)) 1273 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1274 } 1275 1276 /* mullwo mullwo. */ 1277 static void gen_mullwo(DisasContext *ctx) 1278 { 1279 TCGv_i32 t0 = tcg_temp_new_i32(); 1280 TCGv_i32 t1 = tcg_temp_new_i32(); 1281 1282 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1283 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1284 tcg_gen_muls2_i32(t0, t1, t0, t1); 1285 #if defined(TARGET_PPC64) 1286 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1287 #else 1288 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1289 #endif 1290 1291 tcg_gen_sari_i32(t0, t0, 31); 1292 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1293 tcg_gen_extu_i32_tl(cpu_ov, t0); 1294 if (is_isa300(ctx)) { 1295 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1296 } 1297 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1298 1299 tcg_temp_free_i32(t0); 1300 tcg_temp_free_i32(t1); 1301 if (unlikely(Rc(ctx->opcode) != 0)) 1302 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1303 } 1304 1305 /* mulli */ 1306 static void gen_mulli(DisasContext *ctx) 1307 { 1308 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1309 SIMM(ctx->opcode)); 1310 } 1311 1312 #if defined(TARGET_PPC64) 1313 /* mulhd mulhd. */ 1314 static void gen_mulhd(DisasContext *ctx) 1315 { 1316 TCGv lo = tcg_temp_new(); 1317 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1318 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1319 tcg_temp_free(lo); 1320 if (unlikely(Rc(ctx->opcode) != 0)) { 1321 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1322 } 1323 } 1324 1325 /* mulhdu mulhdu. */ 1326 static void gen_mulhdu(DisasContext *ctx) 1327 { 1328 TCGv lo = tcg_temp_new(); 1329 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1330 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1331 tcg_temp_free(lo); 1332 if (unlikely(Rc(ctx->opcode) != 0)) { 1333 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1334 } 1335 } 1336 1337 /* mulld mulld. */ 1338 static void gen_mulld(DisasContext *ctx) 1339 { 1340 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1341 cpu_gpr[rB(ctx->opcode)]); 1342 if (unlikely(Rc(ctx->opcode) != 0)) 1343 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1344 } 1345 1346 /* mulldo mulldo. */ 1347 static void gen_mulldo(DisasContext *ctx) 1348 { 1349 TCGv_i64 t0 = tcg_temp_new_i64(); 1350 TCGv_i64 t1 = tcg_temp_new_i64(); 1351 1352 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1353 cpu_gpr[rB(ctx->opcode)]); 1354 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1355 1356 tcg_gen_sari_i64(t0, t0, 63); 1357 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1358 if (is_isa300(ctx)) { 1359 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1360 } 1361 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1362 1363 tcg_temp_free_i64(t0); 1364 tcg_temp_free_i64(t1); 1365 1366 if (unlikely(Rc(ctx->opcode) != 0)) { 1367 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1368 } 1369 } 1370 #endif 1371 1372 /* Common subf function */ 1373 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1374 TCGv arg2, bool add_ca, bool compute_ca, 1375 bool compute_ov, bool compute_rc0) 1376 { 1377 TCGv t0 = ret; 1378 1379 if (compute_ca || compute_ov) { 1380 t0 = tcg_temp_new(); 1381 } 1382 1383 if (compute_ca) { 1384 /* dest = ~arg1 + arg2 [+ ca]. */ 1385 if (NARROW_MODE(ctx)) { 1386 /* Caution: a non-obvious corner case of the spec is that we 1387 must produce the *entire* 64-bit addition, but produce the 1388 carry into bit 32. */ 1389 TCGv inv1 = tcg_temp_new(); 1390 TCGv t1 = tcg_temp_new(); 1391 tcg_gen_not_tl(inv1, arg1); 1392 if (add_ca) { 1393 tcg_gen_add_tl(t0, arg2, cpu_ca); 1394 } else { 1395 tcg_gen_addi_tl(t0, arg2, 1); 1396 } 1397 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1398 tcg_gen_add_tl(t0, t0, inv1); 1399 tcg_temp_free(inv1); 1400 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1401 tcg_temp_free(t1); 1402 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */ 1403 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1); 1404 if (is_isa300(ctx)) { 1405 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 1406 } 1407 } else if (add_ca) { 1408 TCGv zero, inv1 = tcg_temp_new(); 1409 tcg_gen_not_tl(inv1, arg1); 1410 zero = tcg_const_tl(0); 1411 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1412 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1413 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, 0); 1414 tcg_temp_free(zero); 1415 tcg_temp_free(inv1); 1416 } else { 1417 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1418 tcg_gen_sub_tl(t0, arg2, arg1); 1419 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, 1); 1420 } 1421 } else if (add_ca) { 1422 /* Since we're ignoring carry-out, we can simplify the 1423 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1424 tcg_gen_sub_tl(t0, arg2, arg1); 1425 tcg_gen_add_tl(t0, t0, cpu_ca); 1426 tcg_gen_subi_tl(t0, t0, 1); 1427 } else { 1428 tcg_gen_sub_tl(t0, arg2, arg1); 1429 } 1430 1431 if (compute_ov) { 1432 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1433 } 1434 if (unlikely(compute_rc0)) { 1435 gen_set_Rc0(ctx, t0); 1436 } 1437 1438 if (!TCGV_EQUAL(t0, ret)) { 1439 tcg_gen_mov_tl(ret, t0); 1440 tcg_temp_free(t0); 1441 } 1442 } 1443 /* Sub functions with Two operands functions */ 1444 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1445 static void glue(gen_, name)(DisasContext *ctx) \ 1446 { \ 1447 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1448 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1449 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1450 } 1451 /* Sub functions with one operand and one immediate */ 1452 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1453 add_ca, compute_ca, compute_ov) \ 1454 static void glue(gen_, name)(DisasContext *ctx) \ 1455 { \ 1456 TCGv t0 = tcg_const_tl(const_val); \ 1457 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1458 cpu_gpr[rA(ctx->opcode)], t0, \ 1459 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1460 tcg_temp_free(t0); \ 1461 } 1462 /* subf subf. subfo subfo. */ 1463 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1464 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1465 /* subfc subfc. subfco subfco. */ 1466 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1467 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1468 /* subfe subfe. subfeo subfo. */ 1469 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1470 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1471 /* subfme subfme. subfmeo subfmeo. */ 1472 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1473 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1474 /* subfze subfze. subfzeo subfzeo.*/ 1475 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1476 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1477 1478 /* subfic */ 1479 static void gen_subfic(DisasContext *ctx) 1480 { 1481 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1482 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1483 c, 0, 1, 0, 0); 1484 tcg_temp_free(c); 1485 } 1486 1487 /* neg neg. nego nego. */ 1488 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1489 { 1490 TCGv zero = tcg_const_tl(0); 1491 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1492 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1493 tcg_temp_free(zero); 1494 } 1495 1496 static void gen_neg(DisasContext *ctx) 1497 { 1498 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 1499 if (unlikely(Rc(ctx->opcode))) { 1500 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1501 } 1502 } 1503 1504 static void gen_nego(DisasContext *ctx) 1505 { 1506 gen_op_arith_neg(ctx, 1); 1507 } 1508 1509 /*** Integer logical ***/ 1510 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1511 static void glue(gen_, name)(DisasContext *ctx) \ 1512 { \ 1513 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1514 cpu_gpr[rB(ctx->opcode)]); \ 1515 if (unlikely(Rc(ctx->opcode) != 0)) \ 1516 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1517 } 1518 1519 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1520 static void glue(gen_, name)(DisasContext *ctx) \ 1521 { \ 1522 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1523 if (unlikely(Rc(ctx->opcode) != 0)) \ 1524 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1525 } 1526 1527 /* and & and. */ 1528 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1529 /* andc & andc. */ 1530 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1531 1532 /* andi. */ 1533 static void gen_andi_(DisasContext *ctx) 1534 { 1535 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1536 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1537 } 1538 1539 /* andis. */ 1540 static void gen_andis_(DisasContext *ctx) 1541 { 1542 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1543 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1544 } 1545 1546 /* cntlzw */ 1547 static void gen_cntlzw(DisasContext *ctx) 1548 { 1549 TCGv_i32 t = tcg_temp_new_i32(); 1550 1551 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1552 tcg_gen_clzi_i32(t, t, 32); 1553 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1554 tcg_temp_free_i32(t); 1555 1556 if (unlikely(Rc(ctx->opcode) != 0)) 1557 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1558 } 1559 1560 /* cnttzw */ 1561 static void gen_cnttzw(DisasContext *ctx) 1562 { 1563 TCGv_i32 t = tcg_temp_new_i32(); 1564 1565 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1566 tcg_gen_ctzi_i32(t, t, 32); 1567 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1568 tcg_temp_free_i32(t); 1569 1570 if (unlikely(Rc(ctx->opcode) != 0)) { 1571 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1572 } 1573 } 1574 1575 /* eqv & eqv. */ 1576 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1577 /* extsb & extsb. */ 1578 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1579 /* extsh & extsh. */ 1580 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1581 /* nand & nand. */ 1582 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1583 /* nor & nor. */ 1584 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1585 1586 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1587 static void gen_pause(DisasContext *ctx) 1588 { 1589 TCGv_i32 t0 = tcg_const_i32(0); 1590 tcg_gen_st_i32(t0, cpu_env, 1591 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1592 tcg_temp_free_i32(t0); 1593 1594 /* Stop translation, this gives other CPUs a chance to run */ 1595 gen_exception_nip(ctx, EXCP_HLT, ctx->nip); 1596 } 1597 #endif /* defined(TARGET_PPC64) */ 1598 1599 /* or & or. */ 1600 static void gen_or(DisasContext *ctx) 1601 { 1602 int rs, ra, rb; 1603 1604 rs = rS(ctx->opcode); 1605 ra = rA(ctx->opcode); 1606 rb = rB(ctx->opcode); 1607 /* Optimisation for mr. ri case */ 1608 if (rs != ra || rs != rb) { 1609 if (rs != rb) 1610 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1611 else 1612 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1613 if (unlikely(Rc(ctx->opcode) != 0)) 1614 gen_set_Rc0(ctx, cpu_gpr[ra]); 1615 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1616 gen_set_Rc0(ctx, cpu_gpr[rs]); 1617 #if defined(TARGET_PPC64) 1618 } else if (rs != 0) { /* 0 is nop */ 1619 int prio = 0; 1620 1621 switch (rs) { 1622 case 1: 1623 /* Set process priority to low */ 1624 prio = 2; 1625 break; 1626 case 6: 1627 /* Set process priority to medium-low */ 1628 prio = 3; 1629 break; 1630 case 2: 1631 /* Set process priority to normal */ 1632 prio = 4; 1633 break; 1634 #if !defined(CONFIG_USER_ONLY) 1635 case 31: 1636 if (!ctx->pr) { 1637 /* Set process priority to very low */ 1638 prio = 1; 1639 } 1640 break; 1641 case 5: 1642 if (!ctx->pr) { 1643 /* Set process priority to medium-hight */ 1644 prio = 5; 1645 } 1646 break; 1647 case 3: 1648 if (!ctx->pr) { 1649 /* Set process priority to high */ 1650 prio = 6; 1651 } 1652 break; 1653 case 7: 1654 if (ctx->hv && !ctx->pr) { 1655 /* Set process priority to very high */ 1656 prio = 7; 1657 } 1658 break; 1659 #endif 1660 default: 1661 break; 1662 } 1663 if (prio) { 1664 TCGv t0 = tcg_temp_new(); 1665 gen_load_spr(t0, SPR_PPR); 1666 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1667 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1668 gen_store_spr(SPR_PPR, t0); 1669 tcg_temp_free(t0); 1670 } 1671 #if !defined(CONFIG_USER_ONLY) 1672 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1673 * CPU and the kernel hangs. This applies to all encodings other 1674 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1675 * and all currently undefined. 1676 */ 1677 gen_pause(ctx); 1678 #endif 1679 #endif 1680 } 1681 } 1682 /* orc & orc. */ 1683 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1684 1685 /* xor & xor. */ 1686 static void gen_xor(DisasContext *ctx) 1687 { 1688 /* Optimisation for "set to zero" case */ 1689 if (rS(ctx->opcode) != rB(ctx->opcode)) 1690 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1691 else 1692 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1693 if (unlikely(Rc(ctx->opcode) != 0)) 1694 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1695 } 1696 1697 /* ori */ 1698 static void gen_ori(DisasContext *ctx) 1699 { 1700 target_ulong uimm = UIMM(ctx->opcode); 1701 1702 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1703 return; 1704 } 1705 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1706 } 1707 1708 /* oris */ 1709 static void gen_oris(DisasContext *ctx) 1710 { 1711 target_ulong uimm = UIMM(ctx->opcode); 1712 1713 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1714 /* NOP */ 1715 return; 1716 } 1717 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1718 } 1719 1720 /* xori */ 1721 static void gen_xori(DisasContext *ctx) 1722 { 1723 target_ulong uimm = UIMM(ctx->opcode); 1724 1725 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1726 /* NOP */ 1727 return; 1728 } 1729 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1730 } 1731 1732 /* xoris */ 1733 static void gen_xoris(DisasContext *ctx) 1734 { 1735 target_ulong uimm = UIMM(ctx->opcode); 1736 1737 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1738 /* NOP */ 1739 return; 1740 } 1741 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1742 } 1743 1744 /* popcntb : PowerPC 2.03 specification */ 1745 static void gen_popcntb(DisasContext *ctx) 1746 { 1747 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1748 } 1749 1750 static void gen_popcntw(DisasContext *ctx) 1751 { 1752 #if defined(TARGET_PPC64) 1753 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1754 #else 1755 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1756 #endif 1757 } 1758 1759 #if defined(TARGET_PPC64) 1760 /* popcntd: PowerPC 2.06 specification */ 1761 static void gen_popcntd(DisasContext *ctx) 1762 { 1763 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1764 } 1765 #endif 1766 1767 /* prtyw: PowerPC 2.05 specification */ 1768 static void gen_prtyw(DisasContext *ctx) 1769 { 1770 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1771 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1772 TCGv t0 = tcg_temp_new(); 1773 tcg_gen_shri_tl(t0, rs, 16); 1774 tcg_gen_xor_tl(ra, rs, t0); 1775 tcg_gen_shri_tl(t0, ra, 8); 1776 tcg_gen_xor_tl(ra, ra, t0); 1777 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1778 tcg_temp_free(t0); 1779 } 1780 1781 #if defined(TARGET_PPC64) 1782 /* prtyd: PowerPC 2.05 specification */ 1783 static void gen_prtyd(DisasContext *ctx) 1784 { 1785 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1786 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1787 TCGv t0 = tcg_temp_new(); 1788 tcg_gen_shri_tl(t0, rs, 32); 1789 tcg_gen_xor_tl(ra, rs, t0); 1790 tcg_gen_shri_tl(t0, ra, 16); 1791 tcg_gen_xor_tl(ra, ra, t0); 1792 tcg_gen_shri_tl(t0, ra, 8); 1793 tcg_gen_xor_tl(ra, ra, t0); 1794 tcg_gen_andi_tl(ra, ra, 1); 1795 tcg_temp_free(t0); 1796 } 1797 #endif 1798 1799 #if defined(TARGET_PPC64) 1800 /* bpermd */ 1801 static void gen_bpermd(DisasContext *ctx) 1802 { 1803 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1804 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1805 } 1806 #endif 1807 1808 #if defined(TARGET_PPC64) 1809 /* extsw & extsw. */ 1810 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1811 1812 /* cntlzd */ 1813 static void gen_cntlzd(DisasContext *ctx) 1814 { 1815 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1816 if (unlikely(Rc(ctx->opcode) != 0)) 1817 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1818 } 1819 1820 /* cnttzd */ 1821 static void gen_cnttzd(DisasContext *ctx) 1822 { 1823 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1824 if (unlikely(Rc(ctx->opcode) != 0)) { 1825 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1826 } 1827 } 1828 1829 /* darn */ 1830 static void gen_darn(DisasContext *ctx) 1831 { 1832 int l = L(ctx->opcode); 1833 1834 if (l == 0) { 1835 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1836 } else if (l <= 2) { 1837 /* Return 64-bit random for both CRN and RRN */ 1838 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1839 } else { 1840 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1841 } 1842 } 1843 #endif 1844 1845 /*** Integer rotate ***/ 1846 1847 /* rlwimi & rlwimi. */ 1848 static void gen_rlwimi(DisasContext *ctx) 1849 { 1850 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1851 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1852 uint32_t sh = SH(ctx->opcode); 1853 uint32_t mb = MB(ctx->opcode); 1854 uint32_t me = ME(ctx->opcode); 1855 1856 if (sh == (31-me) && mb <= me) { 1857 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1858 } else { 1859 target_ulong mask; 1860 TCGv t1; 1861 1862 #if defined(TARGET_PPC64) 1863 mb += 32; 1864 me += 32; 1865 #endif 1866 mask = MASK(mb, me); 1867 1868 t1 = tcg_temp_new(); 1869 if (mask <= 0xffffffffu) { 1870 TCGv_i32 t0 = tcg_temp_new_i32(); 1871 tcg_gen_trunc_tl_i32(t0, t_rs); 1872 tcg_gen_rotli_i32(t0, t0, sh); 1873 tcg_gen_extu_i32_tl(t1, t0); 1874 tcg_temp_free_i32(t0); 1875 } else { 1876 #if defined(TARGET_PPC64) 1877 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1878 tcg_gen_rotli_i64(t1, t1, sh); 1879 #else 1880 g_assert_not_reached(); 1881 #endif 1882 } 1883 1884 tcg_gen_andi_tl(t1, t1, mask); 1885 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1886 tcg_gen_or_tl(t_ra, t_ra, t1); 1887 tcg_temp_free(t1); 1888 } 1889 if (unlikely(Rc(ctx->opcode) != 0)) { 1890 gen_set_Rc0(ctx, t_ra); 1891 } 1892 } 1893 1894 /* rlwinm & rlwinm. */ 1895 static void gen_rlwinm(DisasContext *ctx) 1896 { 1897 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1898 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1899 int sh = SH(ctx->opcode); 1900 int mb = MB(ctx->opcode); 1901 int me = ME(ctx->opcode); 1902 int len = me - mb + 1; 1903 int rsh = (32 - sh) & 31; 1904 1905 if (sh != 0 && len > 0 && me == (31 - sh)) { 1906 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 1907 } else if (me == 31 && rsh + len <= 32) { 1908 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 1909 } else { 1910 target_ulong mask; 1911 #if defined(TARGET_PPC64) 1912 mb += 32; 1913 me += 32; 1914 #endif 1915 mask = MASK(mb, me); 1916 if (sh == 0) { 1917 tcg_gen_andi_tl(t_ra, t_rs, mask); 1918 } else if (mask <= 0xffffffffu) { 1919 TCGv_i32 t0 = tcg_temp_new_i32(); 1920 tcg_gen_trunc_tl_i32(t0, t_rs); 1921 tcg_gen_rotli_i32(t0, t0, sh); 1922 tcg_gen_andi_i32(t0, t0, mask); 1923 tcg_gen_extu_i32_tl(t_ra, t0); 1924 tcg_temp_free_i32(t0); 1925 } else { 1926 #if defined(TARGET_PPC64) 1927 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1928 tcg_gen_rotli_i64(t_ra, t_ra, sh); 1929 tcg_gen_andi_i64(t_ra, t_ra, mask); 1930 #else 1931 g_assert_not_reached(); 1932 #endif 1933 } 1934 } 1935 if (unlikely(Rc(ctx->opcode) != 0)) { 1936 gen_set_Rc0(ctx, t_ra); 1937 } 1938 } 1939 1940 /* rlwnm & rlwnm. */ 1941 static void gen_rlwnm(DisasContext *ctx) 1942 { 1943 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1944 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1945 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 1946 uint32_t mb = MB(ctx->opcode); 1947 uint32_t me = ME(ctx->opcode); 1948 target_ulong mask; 1949 1950 #if defined(TARGET_PPC64) 1951 mb += 32; 1952 me += 32; 1953 #endif 1954 mask = MASK(mb, me); 1955 1956 if (mask <= 0xffffffffu) { 1957 TCGv_i32 t0 = tcg_temp_new_i32(); 1958 TCGv_i32 t1 = tcg_temp_new_i32(); 1959 tcg_gen_trunc_tl_i32(t0, t_rb); 1960 tcg_gen_trunc_tl_i32(t1, t_rs); 1961 tcg_gen_andi_i32(t0, t0, 0x1f); 1962 tcg_gen_rotl_i32(t1, t1, t0); 1963 tcg_gen_extu_i32_tl(t_ra, t1); 1964 tcg_temp_free_i32(t0); 1965 tcg_temp_free_i32(t1); 1966 } else { 1967 #if defined(TARGET_PPC64) 1968 TCGv_i64 t0 = tcg_temp_new_i64(); 1969 tcg_gen_andi_i64(t0, t_rb, 0x1f); 1970 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1971 tcg_gen_rotl_i64(t_ra, t_ra, t0); 1972 tcg_temp_free_i64(t0); 1973 #else 1974 g_assert_not_reached(); 1975 #endif 1976 } 1977 1978 tcg_gen_andi_tl(t_ra, t_ra, mask); 1979 1980 if (unlikely(Rc(ctx->opcode) != 0)) { 1981 gen_set_Rc0(ctx, t_ra); 1982 } 1983 } 1984 1985 #if defined(TARGET_PPC64) 1986 #define GEN_PPC64_R2(name, opc1, opc2) \ 1987 static void glue(gen_, name##0)(DisasContext *ctx) \ 1988 { \ 1989 gen_##name(ctx, 0); \ 1990 } \ 1991 \ 1992 static void glue(gen_, name##1)(DisasContext *ctx) \ 1993 { \ 1994 gen_##name(ctx, 1); \ 1995 } 1996 #define GEN_PPC64_R4(name, opc1, opc2) \ 1997 static void glue(gen_, name##0)(DisasContext *ctx) \ 1998 { \ 1999 gen_##name(ctx, 0, 0); \ 2000 } \ 2001 \ 2002 static void glue(gen_, name##1)(DisasContext *ctx) \ 2003 { \ 2004 gen_##name(ctx, 0, 1); \ 2005 } \ 2006 \ 2007 static void glue(gen_, name##2)(DisasContext *ctx) \ 2008 { \ 2009 gen_##name(ctx, 1, 0); \ 2010 } \ 2011 \ 2012 static void glue(gen_, name##3)(DisasContext *ctx) \ 2013 { \ 2014 gen_##name(ctx, 1, 1); \ 2015 } 2016 2017 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2018 { 2019 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2020 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2021 int len = me - mb + 1; 2022 int rsh = (64 - sh) & 63; 2023 2024 if (sh != 0 && len > 0 && me == (63 - sh)) { 2025 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2026 } else if (me == 63 && rsh + len <= 64) { 2027 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2028 } else { 2029 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2030 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2031 } 2032 if (unlikely(Rc(ctx->opcode) != 0)) { 2033 gen_set_Rc0(ctx, t_ra); 2034 } 2035 } 2036 2037 /* rldicl - rldicl. */ 2038 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2039 { 2040 uint32_t sh, mb; 2041 2042 sh = SH(ctx->opcode) | (shn << 5); 2043 mb = MB(ctx->opcode) | (mbn << 5); 2044 gen_rldinm(ctx, mb, 63, sh); 2045 } 2046 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2047 2048 /* rldicr - rldicr. */ 2049 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2050 { 2051 uint32_t sh, me; 2052 2053 sh = SH(ctx->opcode) | (shn << 5); 2054 me = MB(ctx->opcode) | (men << 5); 2055 gen_rldinm(ctx, 0, me, sh); 2056 } 2057 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2058 2059 /* rldic - rldic. */ 2060 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2061 { 2062 uint32_t sh, mb; 2063 2064 sh = SH(ctx->opcode) | (shn << 5); 2065 mb = MB(ctx->opcode) | (mbn << 5); 2066 gen_rldinm(ctx, mb, 63 - sh, sh); 2067 } 2068 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2069 2070 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2071 { 2072 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2073 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2074 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2075 TCGv t0; 2076 2077 t0 = tcg_temp_new(); 2078 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2079 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2080 tcg_temp_free(t0); 2081 2082 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2083 if (unlikely(Rc(ctx->opcode) != 0)) { 2084 gen_set_Rc0(ctx, t_ra); 2085 } 2086 } 2087 2088 /* rldcl - rldcl. */ 2089 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2090 { 2091 uint32_t mb; 2092 2093 mb = MB(ctx->opcode) | (mbn << 5); 2094 gen_rldnm(ctx, mb, 63); 2095 } 2096 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2097 2098 /* rldcr - rldcr. */ 2099 static inline void gen_rldcr(DisasContext *ctx, int men) 2100 { 2101 uint32_t me; 2102 2103 me = MB(ctx->opcode) | (men << 5); 2104 gen_rldnm(ctx, 0, me); 2105 } 2106 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2107 2108 /* rldimi - rldimi. */ 2109 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2110 { 2111 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2112 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2113 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2114 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2115 uint32_t me = 63 - sh; 2116 2117 if (mb <= me) { 2118 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2119 } else { 2120 target_ulong mask = MASK(mb, me); 2121 TCGv t1 = tcg_temp_new(); 2122 2123 tcg_gen_rotli_tl(t1, t_rs, sh); 2124 tcg_gen_andi_tl(t1, t1, mask); 2125 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2126 tcg_gen_or_tl(t_ra, t_ra, t1); 2127 tcg_temp_free(t1); 2128 } 2129 if (unlikely(Rc(ctx->opcode) != 0)) { 2130 gen_set_Rc0(ctx, t_ra); 2131 } 2132 } 2133 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2134 #endif 2135 2136 /*** Integer shift ***/ 2137 2138 /* slw & slw. */ 2139 static void gen_slw(DisasContext *ctx) 2140 { 2141 TCGv t0, t1; 2142 2143 t0 = tcg_temp_new(); 2144 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2145 #if defined(TARGET_PPC64) 2146 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2147 tcg_gen_sari_tl(t0, t0, 0x3f); 2148 #else 2149 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2150 tcg_gen_sari_tl(t0, t0, 0x1f); 2151 #endif 2152 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2153 t1 = tcg_temp_new(); 2154 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2155 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2156 tcg_temp_free(t1); 2157 tcg_temp_free(t0); 2158 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2159 if (unlikely(Rc(ctx->opcode) != 0)) 2160 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2161 } 2162 2163 /* sraw & sraw. */ 2164 static void gen_sraw(DisasContext *ctx) 2165 { 2166 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2167 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2168 if (unlikely(Rc(ctx->opcode) != 0)) 2169 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2170 } 2171 2172 /* srawi & srawi. */ 2173 static void gen_srawi(DisasContext *ctx) 2174 { 2175 int sh = SH(ctx->opcode); 2176 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2177 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2178 if (sh == 0) { 2179 tcg_gen_ext32s_tl(dst, src); 2180 tcg_gen_movi_tl(cpu_ca, 0); 2181 } else { 2182 TCGv t0; 2183 tcg_gen_ext32s_tl(dst, src); 2184 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2185 t0 = tcg_temp_new(); 2186 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2187 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2188 tcg_temp_free(t0); 2189 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2190 tcg_gen_sari_tl(dst, dst, sh); 2191 } 2192 if (unlikely(Rc(ctx->opcode) != 0)) { 2193 gen_set_Rc0(ctx, dst); 2194 } 2195 } 2196 2197 /* srw & srw. */ 2198 static void gen_srw(DisasContext *ctx) 2199 { 2200 TCGv t0, t1; 2201 2202 t0 = tcg_temp_new(); 2203 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2204 #if defined(TARGET_PPC64) 2205 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2206 tcg_gen_sari_tl(t0, t0, 0x3f); 2207 #else 2208 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2209 tcg_gen_sari_tl(t0, t0, 0x1f); 2210 #endif 2211 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2212 tcg_gen_ext32u_tl(t0, t0); 2213 t1 = tcg_temp_new(); 2214 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2215 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2216 tcg_temp_free(t1); 2217 tcg_temp_free(t0); 2218 if (unlikely(Rc(ctx->opcode) != 0)) 2219 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2220 } 2221 2222 #if defined(TARGET_PPC64) 2223 /* sld & sld. */ 2224 static void gen_sld(DisasContext *ctx) 2225 { 2226 TCGv t0, t1; 2227 2228 t0 = tcg_temp_new(); 2229 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2230 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2231 tcg_gen_sari_tl(t0, t0, 0x3f); 2232 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2233 t1 = tcg_temp_new(); 2234 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2235 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2236 tcg_temp_free(t1); 2237 tcg_temp_free(t0); 2238 if (unlikely(Rc(ctx->opcode) != 0)) 2239 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2240 } 2241 2242 /* srad & srad. */ 2243 static void gen_srad(DisasContext *ctx) 2244 { 2245 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2246 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2247 if (unlikely(Rc(ctx->opcode) != 0)) 2248 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2249 } 2250 /* sradi & sradi. */ 2251 static inline void gen_sradi(DisasContext *ctx, int n) 2252 { 2253 int sh = SH(ctx->opcode) + (n << 5); 2254 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2255 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2256 if (sh == 0) { 2257 tcg_gen_mov_tl(dst, src); 2258 tcg_gen_movi_tl(cpu_ca, 0); 2259 } else { 2260 TCGv t0; 2261 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2262 t0 = tcg_temp_new(); 2263 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2264 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2265 tcg_temp_free(t0); 2266 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2267 tcg_gen_sari_tl(dst, src, sh); 2268 } 2269 if (unlikely(Rc(ctx->opcode) != 0)) { 2270 gen_set_Rc0(ctx, dst); 2271 } 2272 } 2273 2274 static void gen_sradi0(DisasContext *ctx) 2275 { 2276 gen_sradi(ctx, 0); 2277 } 2278 2279 static void gen_sradi1(DisasContext *ctx) 2280 { 2281 gen_sradi(ctx, 1); 2282 } 2283 2284 /* extswsli & extswsli. */ 2285 static inline void gen_extswsli(DisasContext *ctx, int n) 2286 { 2287 int sh = SH(ctx->opcode) + (n << 5); 2288 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2289 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2290 2291 tcg_gen_ext32s_tl(dst, src); 2292 tcg_gen_shli_tl(dst, dst, sh); 2293 if (unlikely(Rc(ctx->opcode) != 0)) { 2294 gen_set_Rc0(ctx, dst); 2295 } 2296 } 2297 2298 static void gen_extswsli0(DisasContext *ctx) 2299 { 2300 gen_extswsli(ctx, 0); 2301 } 2302 2303 static void gen_extswsli1(DisasContext *ctx) 2304 { 2305 gen_extswsli(ctx, 1); 2306 } 2307 2308 /* srd & srd. */ 2309 static void gen_srd(DisasContext *ctx) 2310 { 2311 TCGv t0, t1; 2312 2313 t0 = tcg_temp_new(); 2314 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2315 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2316 tcg_gen_sari_tl(t0, t0, 0x3f); 2317 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2318 t1 = tcg_temp_new(); 2319 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2320 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2321 tcg_temp_free(t1); 2322 tcg_temp_free(t0); 2323 if (unlikely(Rc(ctx->opcode) != 0)) 2324 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2325 } 2326 #endif 2327 2328 /*** Addressing modes ***/ 2329 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2330 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2331 target_long maskl) 2332 { 2333 target_long simm = SIMM(ctx->opcode); 2334 2335 simm &= ~maskl; 2336 if (rA(ctx->opcode) == 0) { 2337 if (NARROW_MODE(ctx)) { 2338 simm = (uint32_t)simm; 2339 } 2340 tcg_gen_movi_tl(EA, simm); 2341 } else if (likely(simm != 0)) { 2342 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2343 if (NARROW_MODE(ctx)) { 2344 tcg_gen_ext32u_tl(EA, EA); 2345 } 2346 } else { 2347 if (NARROW_MODE(ctx)) { 2348 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2349 } else { 2350 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2351 } 2352 } 2353 } 2354 2355 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2356 { 2357 if (rA(ctx->opcode) == 0) { 2358 if (NARROW_MODE(ctx)) { 2359 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2360 } else { 2361 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2362 } 2363 } else { 2364 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2365 if (NARROW_MODE(ctx)) { 2366 tcg_gen_ext32u_tl(EA, EA); 2367 } 2368 } 2369 } 2370 2371 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2372 { 2373 if (rA(ctx->opcode) == 0) { 2374 tcg_gen_movi_tl(EA, 0); 2375 } else if (NARROW_MODE(ctx)) { 2376 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2377 } else { 2378 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2379 } 2380 } 2381 2382 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2383 target_long val) 2384 { 2385 tcg_gen_addi_tl(ret, arg1, val); 2386 if (NARROW_MODE(ctx)) { 2387 tcg_gen_ext32u_tl(ret, ret); 2388 } 2389 } 2390 2391 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask) 2392 { 2393 TCGLabel *l1 = gen_new_label(); 2394 TCGv t0 = tcg_temp_new(); 2395 TCGv_i32 t1, t2; 2396 tcg_gen_andi_tl(t0, EA, mask); 2397 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 2398 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN); 2399 t2 = tcg_const_i32(ctx->opcode & 0x03FF0000); 2400 gen_update_nip(ctx, ctx->nip - 4); 2401 gen_helper_raise_exception_err(cpu_env, t1, t2); 2402 tcg_temp_free_i32(t1); 2403 tcg_temp_free_i32(t2); 2404 gen_set_label(l1); 2405 tcg_temp_free(t0); 2406 } 2407 2408 static inline void gen_align_no_le(DisasContext *ctx) 2409 { 2410 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2411 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2412 } 2413 2414 /*** Integer load ***/ 2415 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2416 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2417 2418 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2419 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2420 TCGv val, \ 2421 TCGv addr) \ 2422 { \ 2423 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2424 } 2425 2426 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2427 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2428 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2429 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2430 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2431 2432 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2433 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2434 2435 #define GEN_QEMU_LOAD_64(ldop, op) \ 2436 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2437 TCGv_i64 val, \ 2438 TCGv addr) \ 2439 { \ 2440 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2441 } 2442 2443 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2444 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2445 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2446 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2447 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2448 2449 #if defined(TARGET_PPC64) 2450 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2451 #endif 2452 2453 #define GEN_QEMU_STORE_TL(stop, op) \ 2454 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2455 TCGv val, \ 2456 TCGv addr) \ 2457 { \ 2458 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2459 } 2460 2461 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2462 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2463 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2464 2465 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2466 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2467 2468 #define GEN_QEMU_STORE_64(stop, op) \ 2469 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2470 TCGv_i64 val, \ 2471 TCGv addr) \ 2472 { \ 2473 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2474 } 2475 2476 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2477 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2478 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2479 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2480 2481 #if defined(TARGET_PPC64) 2482 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2483 #endif 2484 2485 #define GEN_LD(name, ldop, opc, type) \ 2486 static void glue(gen_, name)(DisasContext *ctx) \ 2487 { \ 2488 TCGv EA; \ 2489 gen_set_access_type(ctx, ACCESS_INT); \ 2490 EA = tcg_temp_new(); \ 2491 gen_addr_imm_index(ctx, EA, 0); \ 2492 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2493 tcg_temp_free(EA); \ 2494 } 2495 2496 #define GEN_LDU(name, ldop, opc, type) \ 2497 static void glue(gen_, name##u)(DisasContext *ctx) \ 2498 { \ 2499 TCGv EA; \ 2500 if (unlikely(rA(ctx->opcode) == 0 || \ 2501 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2502 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2503 return; \ 2504 } \ 2505 gen_set_access_type(ctx, ACCESS_INT); \ 2506 EA = tcg_temp_new(); \ 2507 if (type == PPC_64B) \ 2508 gen_addr_imm_index(ctx, EA, 0x03); \ 2509 else \ 2510 gen_addr_imm_index(ctx, EA, 0); \ 2511 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2512 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2513 tcg_temp_free(EA); \ 2514 } 2515 2516 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2517 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2518 { \ 2519 TCGv EA; \ 2520 if (unlikely(rA(ctx->opcode) == 0 || \ 2521 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2522 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2523 return; \ 2524 } \ 2525 gen_set_access_type(ctx, ACCESS_INT); \ 2526 EA = tcg_temp_new(); \ 2527 gen_addr_reg_index(ctx, EA); \ 2528 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2529 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2530 tcg_temp_free(EA); \ 2531 } 2532 2533 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2534 static void glue(gen_, name##x)(DisasContext *ctx) \ 2535 { \ 2536 TCGv EA; \ 2537 chk; \ 2538 gen_set_access_type(ctx, ACCESS_INT); \ 2539 EA = tcg_temp_new(); \ 2540 gen_addr_reg_index(ctx, EA); \ 2541 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2542 tcg_temp_free(EA); \ 2543 } 2544 2545 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2546 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2547 2548 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2549 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2550 2551 #define GEN_LDS(name, ldop, op, type) \ 2552 GEN_LD(name, ldop, op | 0x20, type); \ 2553 GEN_LDU(name, ldop, op | 0x21, type); \ 2554 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2555 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2556 2557 /* lbz lbzu lbzux lbzx */ 2558 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2559 /* lha lhau lhaux lhax */ 2560 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2561 /* lhz lhzu lhzux lhzx */ 2562 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2563 /* lwz lwzu lwzux lwzx */ 2564 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2565 #if defined(TARGET_PPC64) 2566 /* lwaux */ 2567 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2568 /* lwax */ 2569 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2570 /* ldux */ 2571 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2572 /* ldx */ 2573 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2574 2575 /* CI load/store variants */ 2576 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2577 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2578 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2579 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2580 2581 static void gen_ld(DisasContext *ctx) 2582 { 2583 TCGv EA; 2584 if (Rc(ctx->opcode)) { 2585 if (unlikely(rA(ctx->opcode) == 0 || 2586 rA(ctx->opcode) == rD(ctx->opcode))) { 2587 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2588 return; 2589 } 2590 } 2591 gen_set_access_type(ctx, ACCESS_INT); 2592 EA = tcg_temp_new(); 2593 gen_addr_imm_index(ctx, EA, 0x03); 2594 if (ctx->opcode & 0x02) { 2595 /* lwa (lwau is undefined) */ 2596 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2597 } else { 2598 /* ld - ldu */ 2599 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2600 } 2601 if (Rc(ctx->opcode)) 2602 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2603 tcg_temp_free(EA); 2604 } 2605 2606 /* lq */ 2607 static void gen_lq(DisasContext *ctx) 2608 { 2609 int ra, rd; 2610 TCGv EA; 2611 2612 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2613 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2614 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2615 2616 if (!legal_in_user_mode && ctx->pr) { 2617 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2618 return; 2619 } 2620 2621 if (!le_is_supported && ctx->le_mode) { 2622 gen_align_no_le(ctx); 2623 return; 2624 } 2625 ra = rA(ctx->opcode); 2626 rd = rD(ctx->opcode); 2627 if (unlikely((rd & 1) || rd == ra)) { 2628 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2629 return; 2630 } 2631 2632 gen_set_access_type(ctx, ACCESS_INT); 2633 EA = tcg_temp_new(); 2634 gen_addr_imm_index(ctx, EA, 0x0F); 2635 2636 /* We only need to swap high and low halves. gen_qemu_ld64_i64 does 2637 necessary 64-bit byteswap already. */ 2638 if (unlikely(ctx->le_mode)) { 2639 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA); 2640 gen_addr_add(ctx, EA, EA, 8); 2641 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA); 2642 } else { 2643 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA); 2644 gen_addr_add(ctx, EA, EA, 8); 2645 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA); 2646 } 2647 tcg_temp_free(EA); 2648 } 2649 #endif 2650 2651 /*** Integer store ***/ 2652 #define GEN_ST(name, stop, opc, type) \ 2653 static void glue(gen_, name)(DisasContext *ctx) \ 2654 { \ 2655 TCGv EA; \ 2656 gen_set_access_type(ctx, ACCESS_INT); \ 2657 EA = tcg_temp_new(); \ 2658 gen_addr_imm_index(ctx, EA, 0); \ 2659 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2660 tcg_temp_free(EA); \ 2661 } 2662 2663 #define GEN_STU(name, stop, opc, type) \ 2664 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2665 { \ 2666 TCGv EA; \ 2667 if (unlikely(rA(ctx->opcode) == 0)) { \ 2668 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2669 return; \ 2670 } \ 2671 gen_set_access_type(ctx, ACCESS_INT); \ 2672 EA = tcg_temp_new(); \ 2673 if (type == PPC_64B) \ 2674 gen_addr_imm_index(ctx, EA, 0x03); \ 2675 else \ 2676 gen_addr_imm_index(ctx, EA, 0); \ 2677 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2678 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2679 tcg_temp_free(EA); \ 2680 } 2681 2682 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2683 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2684 { \ 2685 TCGv EA; \ 2686 if (unlikely(rA(ctx->opcode) == 0)) { \ 2687 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2688 return; \ 2689 } \ 2690 gen_set_access_type(ctx, ACCESS_INT); \ 2691 EA = tcg_temp_new(); \ 2692 gen_addr_reg_index(ctx, EA); \ 2693 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2694 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2695 tcg_temp_free(EA); \ 2696 } 2697 2698 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2699 static void glue(gen_, name##x)(DisasContext *ctx) \ 2700 { \ 2701 TCGv EA; \ 2702 chk; \ 2703 gen_set_access_type(ctx, ACCESS_INT); \ 2704 EA = tcg_temp_new(); \ 2705 gen_addr_reg_index(ctx, EA); \ 2706 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2707 tcg_temp_free(EA); \ 2708 } 2709 #define GEN_STX(name, stop, opc2, opc3, type) \ 2710 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2711 2712 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2713 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2714 2715 #define GEN_STS(name, stop, op, type) \ 2716 GEN_ST(name, stop, op | 0x20, type); \ 2717 GEN_STU(name, stop, op | 0x21, type); \ 2718 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2719 GEN_STX(name, stop, 0x17, op | 0x00, type) 2720 2721 /* stb stbu stbux stbx */ 2722 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2723 /* sth sthu sthux sthx */ 2724 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2725 /* stw stwu stwux stwx */ 2726 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2727 #if defined(TARGET_PPC64) 2728 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2729 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2730 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2731 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2732 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2733 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2734 2735 static void gen_std(DisasContext *ctx) 2736 { 2737 int rs; 2738 TCGv EA; 2739 2740 rs = rS(ctx->opcode); 2741 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2742 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2743 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2744 2745 if (!(ctx->insns_flags & PPC_64BX)) { 2746 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2747 } 2748 2749 if (!legal_in_user_mode && ctx->pr) { 2750 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2751 return; 2752 } 2753 2754 if (!le_is_supported && ctx->le_mode) { 2755 gen_align_no_le(ctx); 2756 return; 2757 } 2758 2759 if (unlikely(rs & 1)) { 2760 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2761 return; 2762 } 2763 gen_set_access_type(ctx, ACCESS_INT); 2764 EA = tcg_temp_new(); 2765 gen_addr_imm_index(ctx, EA, 0x03); 2766 2767 /* We only need to swap high and low halves. gen_qemu_st64_i64 does 2768 necessary 64-bit byteswap already. */ 2769 if (unlikely(ctx->le_mode)) { 2770 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA); 2771 gen_addr_add(ctx, EA, EA, 8); 2772 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2773 } else { 2774 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2775 gen_addr_add(ctx, EA, EA, 8); 2776 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA); 2777 } 2778 tcg_temp_free(EA); 2779 } else { 2780 /* std / stdu*/ 2781 if (Rc(ctx->opcode)) { 2782 if (unlikely(rA(ctx->opcode) == 0)) { 2783 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2784 return; 2785 } 2786 } 2787 gen_set_access_type(ctx, ACCESS_INT); 2788 EA = tcg_temp_new(); 2789 gen_addr_imm_index(ctx, EA, 0x03); 2790 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2791 if (Rc(ctx->opcode)) 2792 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2793 tcg_temp_free(EA); 2794 } 2795 } 2796 #endif 2797 /*** Integer load and store with byte reverse ***/ 2798 2799 /* lhbrx */ 2800 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2801 2802 /* lwbrx */ 2803 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2804 2805 #if defined(TARGET_PPC64) 2806 /* ldbrx */ 2807 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2808 /* stdbrx */ 2809 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2810 #endif /* TARGET_PPC64 */ 2811 2812 /* sthbrx */ 2813 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2814 /* stwbrx */ 2815 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2816 2817 /*** Integer load and store multiple ***/ 2818 2819 /* lmw */ 2820 static void gen_lmw(DisasContext *ctx) 2821 { 2822 TCGv t0; 2823 TCGv_i32 t1; 2824 2825 if (ctx->le_mode) { 2826 gen_align_no_le(ctx); 2827 return; 2828 } 2829 gen_set_access_type(ctx, ACCESS_INT); 2830 t0 = tcg_temp_new(); 2831 t1 = tcg_const_i32(rD(ctx->opcode)); 2832 gen_addr_imm_index(ctx, t0, 0); 2833 gen_helper_lmw(cpu_env, t0, t1); 2834 tcg_temp_free(t0); 2835 tcg_temp_free_i32(t1); 2836 } 2837 2838 /* stmw */ 2839 static void gen_stmw(DisasContext *ctx) 2840 { 2841 TCGv t0; 2842 TCGv_i32 t1; 2843 2844 if (ctx->le_mode) { 2845 gen_align_no_le(ctx); 2846 return; 2847 } 2848 gen_set_access_type(ctx, ACCESS_INT); 2849 t0 = tcg_temp_new(); 2850 t1 = tcg_const_i32(rS(ctx->opcode)); 2851 gen_addr_imm_index(ctx, t0, 0); 2852 gen_helper_stmw(cpu_env, t0, t1); 2853 tcg_temp_free(t0); 2854 tcg_temp_free_i32(t1); 2855 } 2856 2857 /*** Integer load and store strings ***/ 2858 2859 /* lswi */ 2860 /* PowerPC32 specification says we must generate an exception if 2861 * rA is in the range of registers to be loaded. 2862 * In an other hand, IBM says this is valid, but rA won't be loaded. 2863 * For now, I'll follow the spec... 2864 */ 2865 static void gen_lswi(DisasContext *ctx) 2866 { 2867 TCGv t0; 2868 TCGv_i32 t1, t2; 2869 int nb = NB(ctx->opcode); 2870 int start = rD(ctx->opcode); 2871 int ra = rA(ctx->opcode); 2872 int nr; 2873 2874 if (ctx->le_mode) { 2875 gen_align_no_le(ctx); 2876 return; 2877 } 2878 if (nb == 0) 2879 nb = 32; 2880 nr = (nb + 3) / 4; 2881 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2882 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2883 return; 2884 } 2885 gen_set_access_type(ctx, ACCESS_INT); 2886 t0 = tcg_temp_new(); 2887 gen_addr_register(ctx, t0); 2888 t1 = tcg_const_i32(nb); 2889 t2 = tcg_const_i32(start); 2890 gen_helper_lsw(cpu_env, t0, t1, t2); 2891 tcg_temp_free(t0); 2892 tcg_temp_free_i32(t1); 2893 tcg_temp_free_i32(t2); 2894 } 2895 2896 /* lswx */ 2897 static void gen_lswx(DisasContext *ctx) 2898 { 2899 TCGv t0; 2900 TCGv_i32 t1, t2, t3; 2901 2902 if (ctx->le_mode) { 2903 gen_align_no_le(ctx); 2904 return; 2905 } 2906 gen_set_access_type(ctx, ACCESS_INT); 2907 t0 = tcg_temp_new(); 2908 gen_addr_reg_index(ctx, t0); 2909 t1 = tcg_const_i32(rD(ctx->opcode)); 2910 t2 = tcg_const_i32(rA(ctx->opcode)); 2911 t3 = tcg_const_i32(rB(ctx->opcode)); 2912 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 2913 tcg_temp_free(t0); 2914 tcg_temp_free_i32(t1); 2915 tcg_temp_free_i32(t2); 2916 tcg_temp_free_i32(t3); 2917 } 2918 2919 /* stswi */ 2920 static void gen_stswi(DisasContext *ctx) 2921 { 2922 TCGv t0; 2923 TCGv_i32 t1, t2; 2924 int nb = NB(ctx->opcode); 2925 2926 if (ctx->le_mode) { 2927 gen_align_no_le(ctx); 2928 return; 2929 } 2930 gen_set_access_type(ctx, ACCESS_INT); 2931 t0 = tcg_temp_new(); 2932 gen_addr_register(ctx, t0); 2933 if (nb == 0) 2934 nb = 32; 2935 t1 = tcg_const_i32(nb); 2936 t2 = tcg_const_i32(rS(ctx->opcode)); 2937 gen_helper_stsw(cpu_env, t0, t1, t2); 2938 tcg_temp_free(t0); 2939 tcg_temp_free_i32(t1); 2940 tcg_temp_free_i32(t2); 2941 } 2942 2943 /* stswx */ 2944 static void gen_stswx(DisasContext *ctx) 2945 { 2946 TCGv t0; 2947 TCGv_i32 t1, t2; 2948 2949 if (ctx->le_mode) { 2950 gen_align_no_le(ctx); 2951 return; 2952 } 2953 gen_set_access_type(ctx, ACCESS_INT); 2954 t0 = tcg_temp_new(); 2955 gen_addr_reg_index(ctx, t0); 2956 t1 = tcg_temp_new_i32(); 2957 tcg_gen_trunc_tl_i32(t1, cpu_xer); 2958 tcg_gen_andi_i32(t1, t1, 0x7F); 2959 t2 = tcg_const_i32(rS(ctx->opcode)); 2960 gen_helper_stsw(cpu_env, t0, t1, t2); 2961 tcg_temp_free(t0); 2962 tcg_temp_free_i32(t1); 2963 tcg_temp_free_i32(t2); 2964 } 2965 2966 /*** Memory synchronisation ***/ 2967 /* eieio */ 2968 static void gen_eieio(DisasContext *ctx) 2969 { 2970 } 2971 2972 #if !defined(CONFIG_USER_ONLY) 2973 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 2974 { 2975 TCGv_i32 t; 2976 TCGLabel *l; 2977 2978 if (!ctx->lazy_tlb_flush) { 2979 return; 2980 } 2981 l = gen_new_label(); 2982 t = tcg_temp_new_i32(); 2983 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 2984 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 2985 if (global) { 2986 gen_helper_check_tlb_flush_global(cpu_env); 2987 } else { 2988 gen_helper_check_tlb_flush_local(cpu_env); 2989 } 2990 gen_set_label(l); 2991 tcg_temp_free_i32(t); 2992 } 2993 #else 2994 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 2995 #endif 2996 2997 /* isync */ 2998 static void gen_isync(DisasContext *ctx) 2999 { 3000 /* 3001 * We need to check for a pending TLB flush. This can only happen in 3002 * kernel mode however so check MSR_PR 3003 */ 3004 if (!ctx->pr) { 3005 gen_check_tlb_flush(ctx, false); 3006 } 3007 gen_stop_exception(ctx); 3008 } 3009 3010 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3011 3012 #define LARX(name, memop) \ 3013 static void gen_##name(DisasContext *ctx) \ 3014 { \ 3015 TCGv t0; \ 3016 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; \ 3017 int len = MEMOP_GET_SIZE(memop); \ 3018 gen_set_access_type(ctx, ACCESS_RES); \ 3019 t0 = tcg_temp_local_new(); \ 3020 gen_addr_reg_index(ctx, t0); \ 3021 if ((len) > 1) { \ 3022 gen_check_align(ctx, t0, (len)-1); \ 3023 } \ 3024 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop); \ 3025 tcg_gen_mov_tl(cpu_reserve, t0); \ 3026 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val)); \ 3027 tcg_temp_free(t0); \ 3028 } 3029 3030 /* lwarx */ 3031 LARX(lbarx, DEF_MEMOP(MO_UB)) 3032 LARX(lharx, DEF_MEMOP(MO_UW)) 3033 LARX(lwarx, DEF_MEMOP(MO_UL)) 3034 3035 #define LD_ATOMIC(name, memop, tp, op, eop) \ 3036 static void gen_##name(DisasContext *ctx) \ 3037 { \ 3038 int len = MEMOP_GET_SIZE(memop); \ 3039 uint32_t gpr_FC = FC(ctx->opcode); \ 3040 TCGv EA = tcg_temp_local_new(); \ 3041 TCGv_##tp t0, t1; \ 3042 \ 3043 gen_addr_register(ctx, EA); \ 3044 if (len > 1) { \ 3045 gen_check_align(ctx, EA, len - 1); \ 3046 } \ 3047 t0 = tcg_temp_new_##tp(); \ 3048 t1 = tcg_temp_new_##tp(); \ 3049 tcg_gen_##op(t0, cpu_gpr[rD(ctx->opcode) + 1]); \ 3050 \ 3051 switch (gpr_FC) { \ 3052 case 0: /* Fetch and add */ \ 3053 tcg_gen_atomic_fetch_add_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3054 break; \ 3055 case 1: /* Fetch and xor */ \ 3056 tcg_gen_atomic_fetch_xor_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3057 break; \ 3058 case 2: /* Fetch and or */ \ 3059 tcg_gen_atomic_fetch_or_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3060 break; \ 3061 case 3: /* Fetch and 'and' */ \ 3062 tcg_gen_atomic_fetch_and_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3063 break; \ 3064 case 8: /* Swap */ \ 3065 tcg_gen_atomic_xchg_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3066 break; \ 3067 case 4: /* Fetch and max unsigned */ \ 3068 case 5: /* Fetch and max signed */ \ 3069 case 6: /* Fetch and min unsigned */ \ 3070 case 7: /* Fetch and min signed */ \ 3071 case 16: /* compare and swap not equal */ \ 3072 case 24: /* Fetch and increment bounded */ \ 3073 case 25: /* Fetch and increment equal */ \ 3074 case 28: /* Fetch and decrement bounded */ \ 3075 gen_invalid(ctx); \ 3076 break; \ 3077 default: \ 3078 /* invoke data storage error handler */ \ 3079 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); \ 3080 } \ 3081 tcg_gen_##eop(cpu_gpr[rD(ctx->opcode)], t1); \ 3082 tcg_temp_free_##tp(t0); \ 3083 tcg_temp_free_##tp(t1); \ 3084 tcg_temp_free(EA); \ 3085 } 3086 3087 LD_ATOMIC(lwat, DEF_MEMOP(MO_UL), i32, trunc_tl_i32, extu_i32_tl) 3088 #if defined(TARGET_PPC64) 3089 LD_ATOMIC(ldat, DEF_MEMOP(MO_Q), i64, mov_i64, mov_i64) 3090 #endif 3091 3092 #define ST_ATOMIC(name, memop, tp, op) \ 3093 static void gen_##name(DisasContext *ctx) \ 3094 { \ 3095 int len = MEMOP_GET_SIZE(memop); \ 3096 uint32_t gpr_FC = FC(ctx->opcode); \ 3097 TCGv EA = tcg_temp_local_new(); \ 3098 TCGv_##tp t0, t1; \ 3099 \ 3100 gen_addr_register(ctx, EA); \ 3101 if (len > 1) { \ 3102 gen_check_align(ctx, EA, len - 1); \ 3103 } \ 3104 t0 = tcg_temp_new_##tp(); \ 3105 t1 = tcg_temp_new_##tp(); \ 3106 tcg_gen_##op(t0, cpu_gpr[rD(ctx->opcode) + 1]); \ 3107 \ 3108 switch (gpr_FC) { \ 3109 case 0: /* add and Store */ \ 3110 tcg_gen_atomic_add_fetch_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3111 break; \ 3112 case 1: /* xor and Store */ \ 3113 tcg_gen_atomic_xor_fetch_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3114 break; \ 3115 case 2: /* Or and Store */ \ 3116 tcg_gen_atomic_or_fetch_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3117 break; \ 3118 case 3: /* 'and' and Store */ \ 3119 tcg_gen_atomic_and_fetch_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3120 break; \ 3121 case 4: /* Store max unsigned */ \ 3122 case 5: /* Store max signed */ \ 3123 case 6: /* Store min unsigned */ \ 3124 case 7: /* Store min signed */ \ 3125 case 24: /* Store twin */ \ 3126 gen_invalid(ctx); \ 3127 break; \ 3128 default: \ 3129 /* invoke data storage error handler */ \ 3130 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); \ 3131 } \ 3132 tcg_temp_free_##tp(t0); \ 3133 tcg_temp_free_##tp(t1); \ 3134 tcg_temp_free(EA); \ 3135 } 3136 3137 ST_ATOMIC(stwat, DEF_MEMOP(MO_UL), i32, trunc_tl_i32) 3138 #if defined(TARGET_PPC64) 3139 ST_ATOMIC(stdat, DEF_MEMOP(MO_Q), i64, mov_i64) 3140 #endif 3141 3142 #if defined(CONFIG_USER_ONLY) 3143 static void gen_conditional_store(DisasContext *ctx, TCGv EA, 3144 int reg, int memop) 3145 { 3146 TCGv t0 = tcg_temp_new(); 3147 3148 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUPPCState, reserve_ea)); 3149 tcg_gen_movi_tl(t0, (MEMOP_GET_SIZE(memop) << 5) | reg); 3150 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, reserve_info)); 3151 tcg_temp_free(t0); 3152 gen_exception_err(ctx, POWERPC_EXCP_STCX, 0); 3153 } 3154 #else 3155 static void gen_conditional_store(DisasContext *ctx, TCGv EA, 3156 int reg, int memop) 3157 { 3158 TCGLabel *l1; 3159 3160 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3161 l1 = gen_new_label(); 3162 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1); 3163 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 3164 tcg_gen_qemu_st_tl(cpu_gpr[reg], EA, ctx->mem_idx, memop); 3165 gen_set_label(l1); 3166 tcg_gen_movi_tl(cpu_reserve, -1); 3167 } 3168 #endif 3169 3170 #define STCX(name, memop) \ 3171 static void gen_##name(DisasContext *ctx) \ 3172 { \ 3173 TCGv t0; \ 3174 int len = MEMOP_GET_SIZE(memop); \ 3175 gen_set_access_type(ctx, ACCESS_RES); \ 3176 t0 = tcg_temp_local_new(); \ 3177 gen_addr_reg_index(ctx, t0); \ 3178 if (len > 1) { \ 3179 gen_check_align(ctx, t0, (len) - 1); \ 3180 } \ 3181 gen_conditional_store(ctx, t0, rS(ctx->opcode), memop); \ 3182 tcg_temp_free(t0); \ 3183 } 3184 3185 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3186 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3187 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3188 3189 #if defined(TARGET_PPC64) 3190 /* ldarx */ 3191 LARX(ldarx, DEF_MEMOP(MO_Q)) 3192 /* stdcx. */ 3193 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3194 3195 /* lqarx */ 3196 static void gen_lqarx(DisasContext *ctx) 3197 { 3198 TCGv EA; 3199 int rd = rD(ctx->opcode); 3200 TCGv gpr1, gpr2; 3201 3202 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3203 (rd == rB(ctx->opcode)))) { 3204 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3205 return; 3206 } 3207 3208 gen_set_access_type(ctx, ACCESS_RES); 3209 EA = tcg_temp_local_new(); 3210 gen_addr_reg_index(ctx, EA); 3211 gen_check_align(ctx, EA, 15); 3212 if (unlikely(ctx->le_mode)) { 3213 gpr1 = cpu_gpr[rd+1]; 3214 gpr2 = cpu_gpr[rd]; 3215 } else { 3216 gpr1 = cpu_gpr[rd]; 3217 gpr2 = cpu_gpr[rd+1]; 3218 } 3219 tcg_gen_qemu_ld_i64(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3220 tcg_gen_mov_tl(cpu_reserve, EA); 3221 gen_addr_add(ctx, EA, EA, 8); 3222 tcg_gen_qemu_ld_i64(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3223 3224 tcg_gen_st_tl(gpr1, cpu_env, offsetof(CPUPPCState, reserve_val)); 3225 tcg_gen_st_tl(gpr2, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3226 tcg_temp_free(EA); 3227 } 3228 3229 /* stqcx. */ 3230 static void gen_stqcx_(DisasContext *ctx) 3231 { 3232 TCGv EA; 3233 int reg = rS(ctx->opcode); 3234 int len = 16; 3235 #if !defined(CONFIG_USER_ONLY) 3236 TCGLabel *l1; 3237 TCGv gpr1, gpr2; 3238 #endif 3239 3240 if (unlikely((rD(ctx->opcode) & 1))) { 3241 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3242 return; 3243 } 3244 gen_set_access_type(ctx, ACCESS_RES); 3245 EA = tcg_temp_local_new(); 3246 gen_addr_reg_index(ctx, EA); 3247 if (len > 1) { 3248 gen_check_align(ctx, EA, (len) - 1); 3249 } 3250 3251 #if defined(CONFIG_USER_ONLY) 3252 gen_conditional_store(ctx, EA, reg, 16); 3253 #else 3254 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3255 l1 = gen_new_label(); 3256 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1); 3257 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 3258 3259 if (unlikely(ctx->le_mode)) { 3260 gpr1 = cpu_gpr[reg + 1]; 3261 gpr2 = cpu_gpr[reg]; 3262 } else { 3263 gpr1 = cpu_gpr[reg]; 3264 gpr2 = cpu_gpr[reg + 1]; 3265 } 3266 tcg_gen_qemu_st_tl(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3267 gen_addr_add(ctx, EA, EA, 8); 3268 tcg_gen_qemu_st_tl(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3269 3270 gen_set_label(l1); 3271 tcg_gen_movi_tl(cpu_reserve, -1); 3272 #endif 3273 tcg_temp_free(EA); 3274 } 3275 3276 #endif /* defined(TARGET_PPC64) */ 3277 3278 /* sync */ 3279 static void gen_sync(DisasContext *ctx) 3280 { 3281 uint32_t l = (ctx->opcode >> 21) & 3; 3282 3283 /* 3284 * We may need to check for a pending TLB flush. 3285 * 3286 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3287 * 3288 * Additionally, this can only happen in kernel mode however so 3289 * check MSR_PR as well. 3290 */ 3291 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3292 gen_check_tlb_flush(ctx, true); 3293 } 3294 } 3295 3296 /* wait */ 3297 static void gen_wait(DisasContext *ctx) 3298 { 3299 TCGv_i32 t0 = tcg_const_i32(1); 3300 tcg_gen_st_i32(t0, cpu_env, 3301 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3302 tcg_temp_free_i32(t0); 3303 /* Stop translation, as the CPU is supposed to sleep from now */ 3304 gen_exception_nip(ctx, EXCP_HLT, ctx->nip); 3305 } 3306 3307 #if defined(TARGET_PPC64) 3308 static void gen_doze(DisasContext *ctx) 3309 { 3310 #if defined(CONFIG_USER_ONLY) 3311 GEN_PRIV; 3312 #else 3313 TCGv_i32 t; 3314 3315 CHK_HV; 3316 t = tcg_const_i32(PPC_PM_DOZE); 3317 gen_helper_pminsn(cpu_env, t); 3318 tcg_temp_free_i32(t); 3319 gen_stop_exception(ctx); 3320 #endif /* defined(CONFIG_USER_ONLY) */ 3321 } 3322 3323 static void gen_nap(DisasContext *ctx) 3324 { 3325 #if defined(CONFIG_USER_ONLY) 3326 GEN_PRIV; 3327 #else 3328 TCGv_i32 t; 3329 3330 CHK_HV; 3331 t = tcg_const_i32(PPC_PM_NAP); 3332 gen_helper_pminsn(cpu_env, t); 3333 tcg_temp_free_i32(t); 3334 gen_stop_exception(ctx); 3335 #endif /* defined(CONFIG_USER_ONLY) */ 3336 } 3337 3338 static void gen_stop(DisasContext *ctx) 3339 { 3340 gen_nap(ctx); 3341 } 3342 3343 static void gen_sleep(DisasContext *ctx) 3344 { 3345 #if defined(CONFIG_USER_ONLY) 3346 GEN_PRIV; 3347 #else 3348 TCGv_i32 t; 3349 3350 CHK_HV; 3351 t = tcg_const_i32(PPC_PM_SLEEP); 3352 gen_helper_pminsn(cpu_env, t); 3353 tcg_temp_free_i32(t); 3354 gen_stop_exception(ctx); 3355 #endif /* defined(CONFIG_USER_ONLY) */ 3356 } 3357 3358 static void gen_rvwinkle(DisasContext *ctx) 3359 { 3360 #if defined(CONFIG_USER_ONLY) 3361 GEN_PRIV; 3362 #else 3363 TCGv_i32 t; 3364 3365 CHK_HV; 3366 t = tcg_const_i32(PPC_PM_RVWINKLE); 3367 gen_helper_pminsn(cpu_env, t); 3368 tcg_temp_free_i32(t); 3369 gen_stop_exception(ctx); 3370 #endif /* defined(CONFIG_USER_ONLY) */ 3371 } 3372 #endif /* #if defined(TARGET_PPC64) */ 3373 3374 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3375 { 3376 #if defined(TARGET_PPC64) 3377 if (ctx->has_cfar) 3378 tcg_gen_movi_tl(cpu_cfar, nip); 3379 #endif 3380 } 3381 3382 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3383 { 3384 if (unlikely(ctx->singlestep_enabled)) { 3385 return false; 3386 } 3387 3388 #ifndef CONFIG_USER_ONLY 3389 return (ctx->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3390 #else 3391 return true; 3392 #endif 3393 } 3394 3395 /*** Branch ***/ 3396 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3397 { 3398 if (NARROW_MODE(ctx)) { 3399 dest = (uint32_t) dest; 3400 } 3401 if (use_goto_tb(ctx, dest)) { 3402 tcg_gen_goto_tb(n); 3403 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3404 tcg_gen_exit_tb((uintptr_t)ctx->tb + n); 3405 } else { 3406 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3407 if (unlikely(ctx->singlestep_enabled)) { 3408 if ((ctx->singlestep_enabled & 3409 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) && 3410 (ctx->exception == POWERPC_EXCP_BRANCH || 3411 ctx->exception == POWERPC_EXCP_TRACE)) { 3412 gen_exception_nip(ctx, POWERPC_EXCP_TRACE, dest); 3413 } 3414 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) { 3415 gen_debug_exception(ctx); 3416 } 3417 } 3418 tcg_gen_exit_tb(0); 3419 } 3420 } 3421 3422 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3423 { 3424 if (NARROW_MODE(ctx)) { 3425 nip = (uint32_t)nip; 3426 } 3427 tcg_gen_movi_tl(cpu_lr, nip); 3428 } 3429 3430 /* b ba bl bla */ 3431 static void gen_b(DisasContext *ctx) 3432 { 3433 target_ulong li, target; 3434 3435 ctx->exception = POWERPC_EXCP_BRANCH; 3436 /* sign extend LI */ 3437 li = LI(ctx->opcode); 3438 li = (li ^ 0x02000000) - 0x02000000; 3439 if (likely(AA(ctx->opcode) == 0)) { 3440 target = ctx->nip + li - 4; 3441 } else { 3442 target = li; 3443 } 3444 if (LK(ctx->opcode)) { 3445 gen_setlr(ctx, ctx->nip); 3446 } 3447 gen_update_cfar(ctx, ctx->nip - 4); 3448 gen_goto_tb(ctx, 0, target); 3449 } 3450 3451 #define BCOND_IM 0 3452 #define BCOND_LR 1 3453 #define BCOND_CTR 2 3454 #define BCOND_TAR 3 3455 3456 static inline void gen_bcond(DisasContext *ctx, int type) 3457 { 3458 uint32_t bo = BO(ctx->opcode); 3459 TCGLabel *l1; 3460 TCGv target; 3461 3462 ctx->exception = POWERPC_EXCP_BRANCH; 3463 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3464 target = tcg_temp_local_new(); 3465 if (type == BCOND_CTR) 3466 tcg_gen_mov_tl(target, cpu_ctr); 3467 else if (type == BCOND_TAR) 3468 gen_load_spr(target, SPR_TAR); 3469 else 3470 tcg_gen_mov_tl(target, cpu_lr); 3471 } else { 3472 TCGV_UNUSED(target); 3473 } 3474 if (LK(ctx->opcode)) 3475 gen_setlr(ctx, ctx->nip); 3476 l1 = gen_new_label(); 3477 if ((bo & 0x4) == 0) { 3478 /* Decrement and test CTR */ 3479 TCGv temp = tcg_temp_new(); 3480 if (unlikely(type == BCOND_CTR)) { 3481 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3482 return; 3483 } 3484 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3485 if (NARROW_MODE(ctx)) { 3486 tcg_gen_ext32u_tl(temp, cpu_ctr); 3487 } else { 3488 tcg_gen_mov_tl(temp, cpu_ctr); 3489 } 3490 if (bo & 0x2) { 3491 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3492 } else { 3493 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3494 } 3495 tcg_temp_free(temp); 3496 } 3497 if ((bo & 0x10) == 0) { 3498 /* Test CR */ 3499 uint32_t bi = BI(ctx->opcode); 3500 uint32_t mask = 0x08 >> (bi & 0x03); 3501 TCGv_i32 temp = tcg_temp_new_i32(); 3502 3503 if (bo & 0x8) { 3504 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3505 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3506 } else { 3507 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3508 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3509 } 3510 tcg_temp_free_i32(temp); 3511 } 3512 gen_update_cfar(ctx, ctx->nip - 4); 3513 if (type == BCOND_IM) { 3514 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3515 if (likely(AA(ctx->opcode) == 0)) { 3516 gen_goto_tb(ctx, 0, ctx->nip + li - 4); 3517 } else { 3518 gen_goto_tb(ctx, 0, li); 3519 } 3520 if ((bo & 0x14) != 0x14) { 3521 gen_set_label(l1); 3522 gen_goto_tb(ctx, 1, ctx->nip); 3523 } 3524 } else { 3525 if (NARROW_MODE(ctx)) { 3526 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3527 } else { 3528 tcg_gen_andi_tl(cpu_nip, target, ~3); 3529 } 3530 tcg_gen_exit_tb(0); 3531 if ((bo & 0x14) != 0x14) { 3532 gen_set_label(l1); 3533 gen_update_nip(ctx, ctx->nip); 3534 tcg_gen_exit_tb(0); 3535 } 3536 } 3537 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3538 tcg_temp_free(target); 3539 } 3540 } 3541 3542 static void gen_bc(DisasContext *ctx) 3543 { 3544 gen_bcond(ctx, BCOND_IM); 3545 } 3546 3547 static void gen_bcctr(DisasContext *ctx) 3548 { 3549 gen_bcond(ctx, BCOND_CTR); 3550 } 3551 3552 static void gen_bclr(DisasContext *ctx) 3553 { 3554 gen_bcond(ctx, BCOND_LR); 3555 } 3556 3557 static void gen_bctar(DisasContext *ctx) 3558 { 3559 gen_bcond(ctx, BCOND_TAR); 3560 } 3561 3562 /*** Condition register logical ***/ 3563 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3564 static void glue(gen_, name)(DisasContext *ctx) \ 3565 { \ 3566 uint8_t bitmask; \ 3567 int sh; \ 3568 TCGv_i32 t0, t1; \ 3569 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3570 t0 = tcg_temp_new_i32(); \ 3571 if (sh > 0) \ 3572 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3573 else if (sh < 0) \ 3574 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3575 else \ 3576 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3577 t1 = tcg_temp_new_i32(); \ 3578 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3579 if (sh > 0) \ 3580 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3581 else if (sh < 0) \ 3582 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3583 else \ 3584 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3585 tcg_op(t0, t0, t1); \ 3586 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3587 tcg_gen_andi_i32(t0, t0, bitmask); \ 3588 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3589 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3590 tcg_temp_free_i32(t0); \ 3591 tcg_temp_free_i32(t1); \ 3592 } 3593 3594 /* crand */ 3595 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3596 /* crandc */ 3597 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3598 /* creqv */ 3599 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3600 /* crnand */ 3601 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3602 /* crnor */ 3603 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3604 /* cror */ 3605 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3606 /* crorc */ 3607 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3608 /* crxor */ 3609 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3610 3611 /* mcrf */ 3612 static void gen_mcrf(DisasContext *ctx) 3613 { 3614 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3615 } 3616 3617 /*** System linkage ***/ 3618 3619 /* rfi (supervisor only) */ 3620 static void gen_rfi(DisasContext *ctx) 3621 { 3622 #if defined(CONFIG_USER_ONLY) 3623 GEN_PRIV; 3624 #else 3625 /* This instruction doesn't exist anymore on 64-bit server 3626 * processors compliant with arch 2.x 3627 */ 3628 if (ctx->insns_flags & PPC_SEGMENT_64B) { 3629 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3630 return; 3631 } 3632 /* Restore CPU state */ 3633 CHK_SV; 3634 gen_update_cfar(ctx, ctx->nip - 4); 3635 gen_helper_rfi(cpu_env); 3636 gen_sync_exception(ctx); 3637 #endif 3638 } 3639 3640 #if defined(TARGET_PPC64) 3641 static void gen_rfid(DisasContext *ctx) 3642 { 3643 #if defined(CONFIG_USER_ONLY) 3644 GEN_PRIV; 3645 #else 3646 /* Restore CPU state */ 3647 CHK_SV; 3648 gen_update_cfar(ctx, ctx->nip - 4); 3649 gen_helper_rfid(cpu_env); 3650 gen_sync_exception(ctx); 3651 #endif 3652 } 3653 3654 static void gen_hrfid(DisasContext *ctx) 3655 { 3656 #if defined(CONFIG_USER_ONLY) 3657 GEN_PRIV; 3658 #else 3659 /* Restore CPU state */ 3660 CHK_HV; 3661 gen_helper_hrfid(cpu_env); 3662 gen_sync_exception(ctx); 3663 #endif 3664 } 3665 #endif 3666 3667 /* sc */ 3668 #if defined(CONFIG_USER_ONLY) 3669 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3670 #else 3671 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3672 #endif 3673 static void gen_sc(DisasContext *ctx) 3674 { 3675 uint32_t lev; 3676 3677 lev = (ctx->opcode >> 5) & 0x7F; 3678 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3679 } 3680 3681 /*** Trap ***/ 3682 3683 /* Check for unconditional traps (always or never) */ 3684 static bool check_unconditional_trap(DisasContext *ctx) 3685 { 3686 /* Trap never */ 3687 if (TO(ctx->opcode) == 0) { 3688 return true; 3689 } 3690 /* Trap always */ 3691 if (TO(ctx->opcode) == 31) { 3692 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 3693 return true; 3694 } 3695 return false; 3696 } 3697 3698 /* tw */ 3699 static void gen_tw(DisasContext *ctx) 3700 { 3701 TCGv_i32 t0; 3702 3703 if (check_unconditional_trap(ctx)) { 3704 return; 3705 } 3706 t0 = tcg_const_i32(TO(ctx->opcode)); 3707 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3708 t0); 3709 tcg_temp_free_i32(t0); 3710 } 3711 3712 /* twi */ 3713 static void gen_twi(DisasContext *ctx) 3714 { 3715 TCGv t0; 3716 TCGv_i32 t1; 3717 3718 if (check_unconditional_trap(ctx)) { 3719 return; 3720 } 3721 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3722 t1 = tcg_const_i32(TO(ctx->opcode)); 3723 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3724 tcg_temp_free(t0); 3725 tcg_temp_free_i32(t1); 3726 } 3727 3728 #if defined(TARGET_PPC64) 3729 /* td */ 3730 static void gen_td(DisasContext *ctx) 3731 { 3732 TCGv_i32 t0; 3733 3734 if (check_unconditional_trap(ctx)) { 3735 return; 3736 } 3737 t0 = tcg_const_i32(TO(ctx->opcode)); 3738 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3739 t0); 3740 tcg_temp_free_i32(t0); 3741 } 3742 3743 /* tdi */ 3744 static void gen_tdi(DisasContext *ctx) 3745 { 3746 TCGv t0; 3747 TCGv_i32 t1; 3748 3749 if (check_unconditional_trap(ctx)) { 3750 return; 3751 } 3752 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3753 t1 = tcg_const_i32(TO(ctx->opcode)); 3754 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3755 tcg_temp_free(t0); 3756 tcg_temp_free_i32(t1); 3757 } 3758 #endif 3759 3760 /*** Processor control ***/ 3761 3762 static void gen_read_xer(DisasContext *ctx, TCGv dst) 3763 { 3764 TCGv t0 = tcg_temp_new(); 3765 TCGv t1 = tcg_temp_new(); 3766 TCGv t2 = tcg_temp_new(); 3767 tcg_gen_mov_tl(dst, cpu_xer); 3768 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 3769 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 3770 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 3771 tcg_gen_or_tl(t0, t0, t1); 3772 tcg_gen_or_tl(dst, dst, t2); 3773 tcg_gen_or_tl(dst, dst, t0); 3774 if (is_isa300(ctx)) { 3775 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 3776 tcg_gen_or_tl(dst, dst, t0); 3777 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 3778 tcg_gen_or_tl(dst, dst, t0); 3779 } 3780 tcg_temp_free(t0); 3781 tcg_temp_free(t1); 3782 tcg_temp_free(t2); 3783 } 3784 3785 static void gen_write_xer(TCGv src) 3786 { 3787 /* Write all flags, while reading back check for isa300 */ 3788 tcg_gen_andi_tl(cpu_xer, src, 3789 ~((1u << XER_SO) | 3790 (1u << XER_OV) | (1u << XER_OV32) | 3791 (1u << XER_CA) | (1u << XER_CA32))); 3792 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 3793 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 3794 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 3795 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 3796 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 3797 } 3798 3799 /* mcrxr */ 3800 static void gen_mcrxr(DisasContext *ctx) 3801 { 3802 TCGv_i32 t0 = tcg_temp_new_i32(); 3803 TCGv_i32 t1 = tcg_temp_new_i32(); 3804 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 3805 3806 tcg_gen_trunc_tl_i32(t0, cpu_so); 3807 tcg_gen_trunc_tl_i32(t1, cpu_ov); 3808 tcg_gen_trunc_tl_i32(dst, cpu_ca); 3809 tcg_gen_shli_i32(t0, t0, 3); 3810 tcg_gen_shli_i32(t1, t1, 2); 3811 tcg_gen_shli_i32(dst, dst, 1); 3812 tcg_gen_or_i32(dst, dst, t0); 3813 tcg_gen_or_i32(dst, dst, t1); 3814 tcg_temp_free_i32(t0); 3815 tcg_temp_free_i32(t1); 3816 3817 tcg_gen_movi_tl(cpu_so, 0); 3818 tcg_gen_movi_tl(cpu_ov, 0); 3819 tcg_gen_movi_tl(cpu_ca, 0); 3820 } 3821 3822 #ifdef TARGET_PPC64 3823 /* mcrxrx */ 3824 static void gen_mcrxrx(DisasContext *ctx) 3825 { 3826 TCGv t0 = tcg_temp_new(); 3827 TCGv t1 = tcg_temp_new(); 3828 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 3829 3830 /* copy OV and OV32 */ 3831 tcg_gen_shli_tl(t0, cpu_ov, 1); 3832 tcg_gen_or_tl(t0, t0, cpu_ov32); 3833 tcg_gen_shli_tl(t0, t0, 2); 3834 /* copy CA and CA32 */ 3835 tcg_gen_shli_tl(t1, cpu_ca, 1); 3836 tcg_gen_or_tl(t1, t1, cpu_ca32); 3837 tcg_gen_or_tl(t0, t0, t1); 3838 tcg_gen_trunc_tl_i32(dst, t0); 3839 tcg_temp_free(t0); 3840 tcg_temp_free(t1); 3841 } 3842 #endif 3843 3844 /* mfcr mfocrf */ 3845 static void gen_mfcr(DisasContext *ctx) 3846 { 3847 uint32_t crm, crn; 3848 3849 if (likely(ctx->opcode & 0x00100000)) { 3850 crm = CRM(ctx->opcode); 3851 if (likely(crm && ((crm & (crm - 1)) == 0))) { 3852 crn = ctz32 (crm); 3853 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 3854 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 3855 cpu_gpr[rD(ctx->opcode)], crn * 4); 3856 } 3857 } else { 3858 TCGv_i32 t0 = tcg_temp_new_i32(); 3859 tcg_gen_mov_i32(t0, cpu_crf[0]); 3860 tcg_gen_shli_i32(t0, t0, 4); 3861 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 3862 tcg_gen_shli_i32(t0, t0, 4); 3863 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 3864 tcg_gen_shli_i32(t0, t0, 4); 3865 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 3866 tcg_gen_shli_i32(t0, t0, 4); 3867 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 3868 tcg_gen_shli_i32(t0, t0, 4); 3869 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 3870 tcg_gen_shli_i32(t0, t0, 4); 3871 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 3872 tcg_gen_shli_i32(t0, t0, 4); 3873 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 3874 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 3875 tcg_temp_free_i32(t0); 3876 } 3877 } 3878 3879 /* mfmsr */ 3880 static void gen_mfmsr(DisasContext *ctx) 3881 { 3882 CHK_SV; 3883 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 3884 } 3885 3886 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 3887 { 3888 #if 0 3889 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 3890 printf("ERROR: try to access SPR %d !\n", sprn); 3891 #endif 3892 } 3893 #define SPR_NOACCESS (&spr_noaccess) 3894 3895 /* mfspr */ 3896 static inline void gen_op_mfspr(DisasContext *ctx) 3897 { 3898 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 3899 uint32_t sprn = SPR(ctx->opcode); 3900 3901 #if defined(CONFIG_USER_ONLY) 3902 read_cb = ctx->spr_cb[sprn].uea_read; 3903 #else 3904 if (ctx->pr) { 3905 read_cb = ctx->spr_cb[sprn].uea_read; 3906 } else if (ctx->hv) { 3907 read_cb = ctx->spr_cb[sprn].hea_read; 3908 } else { 3909 read_cb = ctx->spr_cb[sprn].oea_read; 3910 } 3911 #endif 3912 if (likely(read_cb != NULL)) { 3913 if (likely(read_cb != SPR_NOACCESS)) { 3914 (*read_cb)(ctx, rD(ctx->opcode), sprn); 3915 } else { 3916 /* Privilege exception */ 3917 /* This is a hack to avoid warnings when running Linux: 3918 * this OS breaks the PowerPC virtualisation model, 3919 * allowing userland application to read the PVR 3920 */ 3921 if (sprn != SPR_PVR) { 3922 fprintf(stderr, "Trying to read privileged spr %d (0x%03x) at " 3923 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3924 if (qemu_log_separate()) { 3925 qemu_log("Trying to read privileged spr %d (0x%03x) at " 3926 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3927 } 3928 } 3929 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 3930 } 3931 } else { 3932 /* ISA 2.07 defines these as no-ops */ 3933 if ((ctx->insns_flags2 & PPC2_ISA207S) && 3934 (sprn >= 808 && sprn <= 811)) { 3935 /* This is a nop */ 3936 return; 3937 } 3938 /* Not defined */ 3939 fprintf(stderr, "Trying to read invalid spr %d (0x%03x) at " 3940 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3941 if (qemu_log_separate()) { 3942 qemu_log("Trying to read invalid spr %d (0x%03x) at " 3943 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3944 } 3945 3946 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 3947 * it can generate a priv, a hv emu or a no-op 3948 */ 3949 if (sprn & 0x10) { 3950 if (ctx->pr) { 3951 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 3952 } 3953 } else { 3954 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 3955 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 3956 } 3957 } 3958 } 3959 } 3960 3961 static void gen_mfspr(DisasContext *ctx) 3962 { 3963 gen_op_mfspr(ctx); 3964 } 3965 3966 /* mftb */ 3967 static void gen_mftb(DisasContext *ctx) 3968 { 3969 gen_op_mfspr(ctx); 3970 } 3971 3972 /* mtcrf mtocrf*/ 3973 static void gen_mtcrf(DisasContext *ctx) 3974 { 3975 uint32_t crm, crn; 3976 3977 crm = CRM(ctx->opcode); 3978 if (likely((ctx->opcode & 0x00100000))) { 3979 if (crm && ((crm & (crm - 1)) == 0)) { 3980 TCGv_i32 temp = tcg_temp_new_i32(); 3981 crn = ctz32 (crm); 3982 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 3983 tcg_gen_shri_i32(temp, temp, crn * 4); 3984 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 3985 tcg_temp_free_i32(temp); 3986 } 3987 } else { 3988 TCGv_i32 temp = tcg_temp_new_i32(); 3989 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 3990 for (crn = 0 ; crn < 8 ; crn++) { 3991 if (crm & (1 << crn)) { 3992 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 3993 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 3994 } 3995 } 3996 tcg_temp_free_i32(temp); 3997 } 3998 } 3999 4000 /* mtmsr */ 4001 #if defined(TARGET_PPC64) 4002 static void gen_mtmsrd(DisasContext *ctx) 4003 { 4004 CHK_SV; 4005 4006 #if !defined(CONFIG_USER_ONLY) 4007 if (ctx->opcode & 0x00010000) { 4008 /* Special form that does not need any synchronisation */ 4009 TCGv t0 = tcg_temp_new(); 4010 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4011 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4012 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4013 tcg_temp_free(t0); 4014 } else { 4015 /* XXX: we need to update nip before the store 4016 * if we enter power saving mode, we will exit the loop 4017 * directly from ppc_store_msr 4018 */ 4019 gen_update_nip(ctx, ctx->nip); 4020 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4021 /* Must stop the translation as machine state (may have) changed */ 4022 /* Note that mtmsr is not always defined as context-synchronizing */ 4023 gen_stop_exception(ctx); 4024 } 4025 #endif /* !defined(CONFIG_USER_ONLY) */ 4026 } 4027 #endif /* defined(TARGET_PPC64) */ 4028 4029 static void gen_mtmsr(DisasContext *ctx) 4030 { 4031 CHK_SV; 4032 4033 #if !defined(CONFIG_USER_ONLY) 4034 if (ctx->opcode & 0x00010000) { 4035 /* Special form that does not need any synchronisation */ 4036 TCGv t0 = tcg_temp_new(); 4037 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4038 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4039 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4040 tcg_temp_free(t0); 4041 } else { 4042 TCGv msr = tcg_temp_new(); 4043 4044 /* XXX: we need to update nip before the store 4045 * if we enter power saving mode, we will exit the loop 4046 * directly from ppc_store_msr 4047 */ 4048 gen_update_nip(ctx, ctx->nip); 4049 #if defined(TARGET_PPC64) 4050 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4051 #else 4052 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4053 #endif 4054 gen_helper_store_msr(cpu_env, msr); 4055 tcg_temp_free(msr); 4056 /* Must stop the translation as machine state (may have) changed */ 4057 /* Note that mtmsr is not always defined as context-synchronizing */ 4058 gen_stop_exception(ctx); 4059 } 4060 #endif 4061 } 4062 4063 /* mtspr */ 4064 static void gen_mtspr(DisasContext *ctx) 4065 { 4066 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4067 uint32_t sprn = SPR(ctx->opcode); 4068 4069 #if defined(CONFIG_USER_ONLY) 4070 write_cb = ctx->spr_cb[sprn].uea_write; 4071 #else 4072 if (ctx->pr) { 4073 write_cb = ctx->spr_cb[sprn].uea_write; 4074 } else if (ctx->hv) { 4075 write_cb = ctx->spr_cb[sprn].hea_write; 4076 } else { 4077 write_cb = ctx->spr_cb[sprn].oea_write; 4078 } 4079 #endif 4080 if (likely(write_cb != NULL)) { 4081 if (likely(write_cb != SPR_NOACCESS)) { 4082 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4083 } else { 4084 /* Privilege exception */ 4085 fprintf(stderr, "Trying to write privileged spr %d (0x%03x) at " 4086 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4087 if (qemu_log_separate()) { 4088 qemu_log("Trying to write privileged spr %d (0x%03x) at " 4089 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4090 } 4091 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4092 } 4093 } else { 4094 /* ISA 2.07 defines these as no-ops */ 4095 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4096 (sprn >= 808 && sprn <= 811)) { 4097 /* This is a nop */ 4098 return; 4099 } 4100 4101 /* Not defined */ 4102 if (qemu_log_separate()) { 4103 qemu_log("Trying to write invalid spr %d (0x%03x) at " 4104 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4105 } 4106 fprintf(stderr, "Trying to write invalid spr %d (0x%03x) at " 4107 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4108 4109 4110 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4111 * it can generate a priv, a hv emu or a no-op 4112 */ 4113 if (sprn & 0x10) { 4114 if (ctx->pr) { 4115 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4116 } 4117 } else { 4118 if (ctx->pr || sprn == 0) { 4119 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4120 } 4121 } 4122 } 4123 } 4124 4125 #if defined(TARGET_PPC64) 4126 /* setb */ 4127 static void gen_setb(DisasContext *ctx) 4128 { 4129 TCGv_i32 t0 = tcg_temp_new_i32(); 4130 TCGv_i32 t8 = tcg_temp_new_i32(); 4131 TCGv_i32 tm1 = tcg_temp_new_i32(); 4132 int crf = crfS(ctx->opcode); 4133 4134 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4135 tcg_gen_movi_i32(t8, 8); 4136 tcg_gen_movi_i32(tm1, -1); 4137 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4138 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4139 4140 tcg_temp_free_i32(t0); 4141 tcg_temp_free_i32(t8); 4142 tcg_temp_free_i32(tm1); 4143 } 4144 #endif 4145 4146 /*** Cache management ***/ 4147 4148 /* dcbf */ 4149 static void gen_dcbf(DisasContext *ctx) 4150 { 4151 /* XXX: specification says this is treated as a load by the MMU */ 4152 TCGv t0; 4153 gen_set_access_type(ctx, ACCESS_CACHE); 4154 t0 = tcg_temp_new(); 4155 gen_addr_reg_index(ctx, t0); 4156 gen_qemu_ld8u(ctx, t0, t0); 4157 tcg_temp_free(t0); 4158 } 4159 4160 /* dcbi (Supervisor only) */ 4161 static void gen_dcbi(DisasContext *ctx) 4162 { 4163 #if defined(CONFIG_USER_ONLY) 4164 GEN_PRIV; 4165 #else 4166 TCGv EA, val; 4167 4168 CHK_SV; 4169 EA = tcg_temp_new(); 4170 gen_set_access_type(ctx, ACCESS_CACHE); 4171 gen_addr_reg_index(ctx, EA); 4172 val = tcg_temp_new(); 4173 /* XXX: specification says this should be treated as a store by the MMU */ 4174 gen_qemu_ld8u(ctx, val, EA); 4175 gen_qemu_st8(ctx, val, EA); 4176 tcg_temp_free(val); 4177 tcg_temp_free(EA); 4178 #endif /* defined(CONFIG_USER_ONLY) */ 4179 } 4180 4181 /* dcdst */ 4182 static void gen_dcbst(DisasContext *ctx) 4183 { 4184 /* XXX: specification say this is treated as a load by the MMU */ 4185 TCGv t0; 4186 gen_set_access_type(ctx, ACCESS_CACHE); 4187 t0 = tcg_temp_new(); 4188 gen_addr_reg_index(ctx, t0); 4189 gen_qemu_ld8u(ctx, t0, t0); 4190 tcg_temp_free(t0); 4191 } 4192 4193 /* dcbt */ 4194 static void gen_dcbt(DisasContext *ctx) 4195 { 4196 /* interpreted as no-op */ 4197 /* XXX: specification say this is treated as a load by the MMU 4198 * but does not generate any exception 4199 */ 4200 } 4201 4202 /* dcbtst */ 4203 static void gen_dcbtst(DisasContext *ctx) 4204 { 4205 /* interpreted as no-op */ 4206 /* XXX: specification say this is treated as a load by the MMU 4207 * but does not generate any exception 4208 */ 4209 } 4210 4211 /* dcbtls */ 4212 static void gen_dcbtls(DisasContext *ctx) 4213 { 4214 /* Always fails locking the cache */ 4215 TCGv t0 = tcg_temp_new(); 4216 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4217 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4218 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4219 tcg_temp_free(t0); 4220 } 4221 4222 /* dcbz */ 4223 static void gen_dcbz(DisasContext *ctx) 4224 { 4225 TCGv tcgv_addr; 4226 TCGv_i32 tcgv_op; 4227 4228 gen_set_access_type(ctx, ACCESS_CACHE); 4229 tcgv_addr = tcg_temp_new(); 4230 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4231 gen_addr_reg_index(ctx, tcgv_addr); 4232 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4233 tcg_temp_free(tcgv_addr); 4234 tcg_temp_free_i32(tcgv_op); 4235 } 4236 4237 /* dst / dstt */ 4238 static void gen_dst(DisasContext *ctx) 4239 { 4240 if (rA(ctx->opcode) == 0) { 4241 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4242 } else { 4243 /* interpreted as no-op */ 4244 } 4245 } 4246 4247 /* dstst /dststt */ 4248 static void gen_dstst(DisasContext *ctx) 4249 { 4250 if (rA(ctx->opcode) == 0) { 4251 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4252 } else { 4253 /* interpreted as no-op */ 4254 } 4255 4256 } 4257 4258 /* dss / dssall */ 4259 static void gen_dss(DisasContext *ctx) 4260 { 4261 /* interpreted as no-op */ 4262 } 4263 4264 /* icbi */ 4265 static void gen_icbi(DisasContext *ctx) 4266 { 4267 TCGv t0; 4268 gen_set_access_type(ctx, ACCESS_CACHE); 4269 t0 = tcg_temp_new(); 4270 gen_addr_reg_index(ctx, t0); 4271 gen_helper_icbi(cpu_env, t0); 4272 tcg_temp_free(t0); 4273 } 4274 4275 /* Optional: */ 4276 /* dcba */ 4277 static void gen_dcba(DisasContext *ctx) 4278 { 4279 /* interpreted as no-op */ 4280 /* XXX: specification say this is treated as a store by the MMU 4281 * but does not generate any exception 4282 */ 4283 } 4284 4285 /*** Segment register manipulation ***/ 4286 /* Supervisor only: */ 4287 4288 /* mfsr */ 4289 static void gen_mfsr(DisasContext *ctx) 4290 { 4291 #if defined(CONFIG_USER_ONLY) 4292 GEN_PRIV; 4293 #else 4294 TCGv t0; 4295 4296 CHK_SV; 4297 t0 = tcg_const_tl(SR(ctx->opcode)); 4298 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4299 tcg_temp_free(t0); 4300 #endif /* defined(CONFIG_USER_ONLY) */ 4301 } 4302 4303 /* mfsrin */ 4304 static void gen_mfsrin(DisasContext *ctx) 4305 { 4306 #if defined(CONFIG_USER_ONLY) 4307 GEN_PRIV; 4308 #else 4309 TCGv t0; 4310 4311 CHK_SV; 4312 t0 = tcg_temp_new(); 4313 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4314 tcg_gen_andi_tl(t0, t0, 0xF); 4315 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4316 tcg_temp_free(t0); 4317 #endif /* defined(CONFIG_USER_ONLY) */ 4318 } 4319 4320 /* mtsr */ 4321 static void gen_mtsr(DisasContext *ctx) 4322 { 4323 #if defined(CONFIG_USER_ONLY) 4324 GEN_PRIV; 4325 #else 4326 TCGv t0; 4327 4328 CHK_SV; 4329 t0 = tcg_const_tl(SR(ctx->opcode)); 4330 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4331 tcg_temp_free(t0); 4332 #endif /* defined(CONFIG_USER_ONLY) */ 4333 } 4334 4335 /* mtsrin */ 4336 static void gen_mtsrin(DisasContext *ctx) 4337 { 4338 #if defined(CONFIG_USER_ONLY) 4339 GEN_PRIV; 4340 #else 4341 TCGv t0; 4342 CHK_SV; 4343 4344 t0 = tcg_temp_new(); 4345 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4346 tcg_gen_andi_tl(t0, t0, 0xF); 4347 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4348 tcg_temp_free(t0); 4349 #endif /* defined(CONFIG_USER_ONLY) */ 4350 } 4351 4352 #if defined(TARGET_PPC64) 4353 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4354 4355 /* mfsr */ 4356 static void gen_mfsr_64b(DisasContext *ctx) 4357 { 4358 #if defined(CONFIG_USER_ONLY) 4359 GEN_PRIV; 4360 #else 4361 TCGv t0; 4362 4363 CHK_SV; 4364 t0 = tcg_const_tl(SR(ctx->opcode)); 4365 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4366 tcg_temp_free(t0); 4367 #endif /* defined(CONFIG_USER_ONLY) */ 4368 } 4369 4370 /* mfsrin */ 4371 static void gen_mfsrin_64b(DisasContext *ctx) 4372 { 4373 #if defined(CONFIG_USER_ONLY) 4374 GEN_PRIV; 4375 #else 4376 TCGv t0; 4377 4378 CHK_SV; 4379 t0 = tcg_temp_new(); 4380 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4381 tcg_gen_andi_tl(t0, t0, 0xF); 4382 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4383 tcg_temp_free(t0); 4384 #endif /* defined(CONFIG_USER_ONLY) */ 4385 } 4386 4387 /* mtsr */ 4388 static void gen_mtsr_64b(DisasContext *ctx) 4389 { 4390 #if defined(CONFIG_USER_ONLY) 4391 GEN_PRIV; 4392 #else 4393 TCGv t0; 4394 4395 CHK_SV; 4396 t0 = tcg_const_tl(SR(ctx->opcode)); 4397 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4398 tcg_temp_free(t0); 4399 #endif /* defined(CONFIG_USER_ONLY) */ 4400 } 4401 4402 /* mtsrin */ 4403 static void gen_mtsrin_64b(DisasContext *ctx) 4404 { 4405 #if defined(CONFIG_USER_ONLY) 4406 GEN_PRIV; 4407 #else 4408 TCGv t0; 4409 4410 CHK_SV; 4411 t0 = tcg_temp_new(); 4412 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4413 tcg_gen_andi_tl(t0, t0, 0xF); 4414 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4415 tcg_temp_free(t0); 4416 #endif /* defined(CONFIG_USER_ONLY) */ 4417 } 4418 4419 /* slbmte */ 4420 static void gen_slbmte(DisasContext *ctx) 4421 { 4422 #if defined(CONFIG_USER_ONLY) 4423 GEN_PRIV; 4424 #else 4425 CHK_SV; 4426 4427 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4428 cpu_gpr[rS(ctx->opcode)]); 4429 #endif /* defined(CONFIG_USER_ONLY) */ 4430 } 4431 4432 static void gen_slbmfee(DisasContext *ctx) 4433 { 4434 #if defined(CONFIG_USER_ONLY) 4435 GEN_PRIV; 4436 #else 4437 CHK_SV; 4438 4439 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4440 cpu_gpr[rB(ctx->opcode)]); 4441 #endif /* defined(CONFIG_USER_ONLY) */ 4442 } 4443 4444 static void gen_slbmfev(DisasContext *ctx) 4445 { 4446 #if defined(CONFIG_USER_ONLY) 4447 GEN_PRIV; 4448 #else 4449 CHK_SV; 4450 4451 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4452 cpu_gpr[rB(ctx->opcode)]); 4453 #endif /* defined(CONFIG_USER_ONLY) */ 4454 } 4455 4456 static void gen_slbfee_(DisasContext *ctx) 4457 { 4458 #if defined(CONFIG_USER_ONLY) 4459 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4460 #else 4461 TCGLabel *l1, *l2; 4462 4463 if (unlikely(ctx->pr)) { 4464 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4465 return; 4466 } 4467 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4468 cpu_gpr[rB(ctx->opcode)]); 4469 l1 = gen_new_label(); 4470 l2 = gen_new_label(); 4471 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4472 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4473 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4474 tcg_gen_br(l2); 4475 gen_set_label(l1); 4476 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4477 gen_set_label(l2); 4478 #endif 4479 } 4480 #endif /* defined(TARGET_PPC64) */ 4481 4482 /*** Lookaside buffer management ***/ 4483 /* Optional & supervisor only: */ 4484 4485 /* tlbia */ 4486 static void gen_tlbia(DisasContext *ctx) 4487 { 4488 #if defined(CONFIG_USER_ONLY) 4489 GEN_PRIV; 4490 #else 4491 CHK_HV; 4492 4493 gen_helper_tlbia(cpu_env); 4494 #endif /* defined(CONFIG_USER_ONLY) */ 4495 } 4496 4497 /* tlbiel */ 4498 static void gen_tlbiel(DisasContext *ctx) 4499 { 4500 #if defined(CONFIG_USER_ONLY) 4501 GEN_PRIV; 4502 #else 4503 CHK_SV; 4504 4505 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4506 #endif /* defined(CONFIG_USER_ONLY) */ 4507 } 4508 4509 /* tlbie */ 4510 static void gen_tlbie(DisasContext *ctx) 4511 { 4512 #if defined(CONFIG_USER_ONLY) 4513 GEN_PRIV; 4514 #else 4515 TCGv_i32 t1; 4516 CHK_HV; 4517 4518 if (NARROW_MODE(ctx)) { 4519 TCGv t0 = tcg_temp_new(); 4520 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4521 gen_helper_tlbie(cpu_env, t0); 4522 tcg_temp_free(t0); 4523 } else { 4524 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4525 } 4526 t1 = tcg_temp_new_i32(); 4527 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4528 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4529 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4530 tcg_temp_free_i32(t1); 4531 #endif /* defined(CONFIG_USER_ONLY) */ 4532 } 4533 4534 /* tlbsync */ 4535 static void gen_tlbsync(DisasContext *ctx) 4536 { 4537 #if defined(CONFIG_USER_ONLY) 4538 GEN_PRIV; 4539 #else 4540 CHK_HV; 4541 4542 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4543 if (ctx->insns_flags & PPC_BOOKE) { 4544 gen_check_tlb_flush(ctx, true); 4545 } 4546 #endif /* defined(CONFIG_USER_ONLY) */ 4547 } 4548 4549 #if defined(TARGET_PPC64) 4550 /* slbia */ 4551 static void gen_slbia(DisasContext *ctx) 4552 { 4553 #if defined(CONFIG_USER_ONLY) 4554 GEN_PRIV; 4555 #else 4556 CHK_SV; 4557 4558 gen_helper_slbia(cpu_env); 4559 #endif /* defined(CONFIG_USER_ONLY) */ 4560 } 4561 4562 /* slbie */ 4563 static void gen_slbie(DisasContext *ctx) 4564 { 4565 #if defined(CONFIG_USER_ONLY) 4566 GEN_PRIV; 4567 #else 4568 CHK_SV; 4569 4570 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4571 #endif /* defined(CONFIG_USER_ONLY) */ 4572 } 4573 4574 /* slbieg */ 4575 static void gen_slbieg(DisasContext *ctx) 4576 { 4577 #if defined(CONFIG_USER_ONLY) 4578 GEN_PRIV; 4579 #else 4580 CHK_SV; 4581 4582 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4583 #endif /* defined(CONFIG_USER_ONLY) */ 4584 } 4585 4586 /* slbsync */ 4587 static void gen_slbsync(DisasContext *ctx) 4588 { 4589 #if defined(CONFIG_USER_ONLY) 4590 GEN_PRIV; 4591 #else 4592 CHK_SV; 4593 gen_check_tlb_flush(ctx, true); 4594 #endif /* defined(CONFIG_USER_ONLY) */ 4595 } 4596 4597 #endif /* defined(TARGET_PPC64) */ 4598 4599 /*** External control ***/ 4600 /* Optional: */ 4601 4602 /* eciwx */ 4603 static void gen_eciwx(DisasContext *ctx) 4604 { 4605 TCGv t0; 4606 /* Should check EAR[E] ! */ 4607 gen_set_access_type(ctx, ACCESS_EXT); 4608 t0 = tcg_temp_new(); 4609 gen_addr_reg_index(ctx, t0); 4610 gen_check_align(ctx, t0, 0x03); 4611 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0); 4612 tcg_temp_free(t0); 4613 } 4614 4615 /* ecowx */ 4616 static void gen_ecowx(DisasContext *ctx) 4617 { 4618 TCGv t0; 4619 /* Should check EAR[E] ! */ 4620 gen_set_access_type(ctx, ACCESS_EXT); 4621 t0 = tcg_temp_new(); 4622 gen_addr_reg_index(ctx, t0); 4623 gen_check_align(ctx, t0, 0x03); 4624 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0); 4625 tcg_temp_free(t0); 4626 } 4627 4628 /* PowerPC 601 specific instructions */ 4629 4630 /* abs - abs. */ 4631 static void gen_abs(DisasContext *ctx) 4632 { 4633 TCGLabel *l1 = gen_new_label(); 4634 TCGLabel *l2 = gen_new_label(); 4635 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 4636 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4637 tcg_gen_br(l2); 4638 gen_set_label(l1); 4639 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4640 gen_set_label(l2); 4641 if (unlikely(Rc(ctx->opcode) != 0)) 4642 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4643 } 4644 4645 /* abso - abso. */ 4646 static void gen_abso(DisasContext *ctx) 4647 { 4648 TCGLabel *l1 = gen_new_label(); 4649 TCGLabel *l2 = gen_new_label(); 4650 TCGLabel *l3 = gen_new_label(); 4651 /* Start with XER OV disabled, the most likely case */ 4652 tcg_gen_movi_tl(cpu_ov, 0); 4653 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 4654 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 4655 tcg_gen_movi_tl(cpu_ov, 1); 4656 tcg_gen_movi_tl(cpu_so, 1); 4657 tcg_gen_br(l2); 4658 gen_set_label(l1); 4659 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4660 tcg_gen_br(l3); 4661 gen_set_label(l2); 4662 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4663 gen_set_label(l3); 4664 if (unlikely(Rc(ctx->opcode) != 0)) 4665 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4666 } 4667 4668 /* clcs */ 4669 static void gen_clcs(DisasContext *ctx) 4670 { 4671 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 4672 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4673 tcg_temp_free_i32(t0); 4674 /* Rc=1 sets CR0 to an undefined state */ 4675 } 4676 4677 /* div - div. */ 4678 static void gen_div(DisasContext *ctx) 4679 { 4680 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4681 cpu_gpr[rB(ctx->opcode)]); 4682 if (unlikely(Rc(ctx->opcode) != 0)) 4683 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4684 } 4685 4686 /* divo - divo. */ 4687 static void gen_divo(DisasContext *ctx) 4688 { 4689 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4690 cpu_gpr[rB(ctx->opcode)]); 4691 if (unlikely(Rc(ctx->opcode) != 0)) 4692 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4693 } 4694 4695 /* divs - divs. */ 4696 static void gen_divs(DisasContext *ctx) 4697 { 4698 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4699 cpu_gpr[rB(ctx->opcode)]); 4700 if (unlikely(Rc(ctx->opcode) != 0)) 4701 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4702 } 4703 4704 /* divso - divso. */ 4705 static void gen_divso(DisasContext *ctx) 4706 { 4707 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 4708 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4709 if (unlikely(Rc(ctx->opcode) != 0)) 4710 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4711 } 4712 4713 /* doz - doz. */ 4714 static void gen_doz(DisasContext *ctx) 4715 { 4716 TCGLabel *l1 = gen_new_label(); 4717 TCGLabel *l2 = gen_new_label(); 4718 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4719 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4720 tcg_gen_br(l2); 4721 gen_set_label(l1); 4722 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4723 gen_set_label(l2); 4724 if (unlikely(Rc(ctx->opcode) != 0)) 4725 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4726 } 4727 4728 /* dozo - dozo. */ 4729 static void gen_dozo(DisasContext *ctx) 4730 { 4731 TCGLabel *l1 = gen_new_label(); 4732 TCGLabel *l2 = gen_new_label(); 4733 TCGv t0 = tcg_temp_new(); 4734 TCGv t1 = tcg_temp_new(); 4735 TCGv t2 = tcg_temp_new(); 4736 /* Start with XER OV disabled, the most likely case */ 4737 tcg_gen_movi_tl(cpu_ov, 0); 4738 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4739 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4740 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4741 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 4742 tcg_gen_andc_tl(t1, t1, t2); 4743 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 4744 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 4745 tcg_gen_movi_tl(cpu_ov, 1); 4746 tcg_gen_movi_tl(cpu_so, 1); 4747 tcg_gen_br(l2); 4748 gen_set_label(l1); 4749 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4750 gen_set_label(l2); 4751 tcg_temp_free(t0); 4752 tcg_temp_free(t1); 4753 tcg_temp_free(t2); 4754 if (unlikely(Rc(ctx->opcode) != 0)) 4755 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4756 } 4757 4758 /* dozi */ 4759 static void gen_dozi(DisasContext *ctx) 4760 { 4761 target_long simm = SIMM(ctx->opcode); 4762 TCGLabel *l1 = gen_new_label(); 4763 TCGLabel *l2 = gen_new_label(); 4764 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 4765 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 4766 tcg_gen_br(l2); 4767 gen_set_label(l1); 4768 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4769 gen_set_label(l2); 4770 if (unlikely(Rc(ctx->opcode) != 0)) 4771 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4772 } 4773 4774 /* lscbx - lscbx. */ 4775 static void gen_lscbx(DisasContext *ctx) 4776 { 4777 TCGv t0 = tcg_temp_new(); 4778 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 4779 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 4780 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 4781 4782 gen_addr_reg_index(ctx, t0); 4783 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 4784 tcg_temp_free_i32(t1); 4785 tcg_temp_free_i32(t2); 4786 tcg_temp_free_i32(t3); 4787 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 4788 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 4789 if (unlikely(Rc(ctx->opcode) != 0)) 4790 gen_set_Rc0(ctx, t0); 4791 tcg_temp_free(t0); 4792 } 4793 4794 /* maskg - maskg. */ 4795 static void gen_maskg(DisasContext *ctx) 4796 { 4797 TCGLabel *l1 = gen_new_label(); 4798 TCGv t0 = tcg_temp_new(); 4799 TCGv t1 = tcg_temp_new(); 4800 TCGv t2 = tcg_temp_new(); 4801 TCGv t3 = tcg_temp_new(); 4802 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 4803 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4804 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 4805 tcg_gen_addi_tl(t2, t0, 1); 4806 tcg_gen_shr_tl(t2, t3, t2); 4807 tcg_gen_shr_tl(t3, t3, t1); 4808 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 4809 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 4810 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4811 gen_set_label(l1); 4812 tcg_temp_free(t0); 4813 tcg_temp_free(t1); 4814 tcg_temp_free(t2); 4815 tcg_temp_free(t3); 4816 if (unlikely(Rc(ctx->opcode) != 0)) 4817 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4818 } 4819 4820 /* maskir - maskir. */ 4821 static void gen_maskir(DisasContext *ctx) 4822 { 4823 TCGv t0 = tcg_temp_new(); 4824 TCGv t1 = tcg_temp_new(); 4825 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4826 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4827 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4828 tcg_temp_free(t0); 4829 tcg_temp_free(t1); 4830 if (unlikely(Rc(ctx->opcode) != 0)) 4831 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4832 } 4833 4834 /* mul - mul. */ 4835 static void gen_mul(DisasContext *ctx) 4836 { 4837 TCGv_i64 t0 = tcg_temp_new_i64(); 4838 TCGv_i64 t1 = tcg_temp_new_i64(); 4839 TCGv t2 = tcg_temp_new(); 4840 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 4841 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 4842 tcg_gen_mul_i64(t0, t0, t1); 4843 tcg_gen_trunc_i64_tl(t2, t0); 4844 gen_store_spr(SPR_MQ, t2); 4845 tcg_gen_shri_i64(t1, t0, 32); 4846 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 4847 tcg_temp_free_i64(t0); 4848 tcg_temp_free_i64(t1); 4849 tcg_temp_free(t2); 4850 if (unlikely(Rc(ctx->opcode) != 0)) 4851 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4852 } 4853 4854 /* mulo - mulo. */ 4855 static void gen_mulo(DisasContext *ctx) 4856 { 4857 TCGLabel *l1 = gen_new_label(); 4858 TCGv_i64 t0 = tcg_temp_new_i64(); 4859 TCGv_i64 t1 = tcg_temp_new_i64(); 4860 TCGv t2 = tcg_temp_new(); 4861 /* Start with XER OV disabled, the most likely case */ 4862 tcg_gen_movi_tl(cpu_ov, 0); 4863 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 4864 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 4865 tcg_gen_mul_i64(t0, t0, t1); 4866 tcg_gen_trunc_i64_tl(t2, t0); 4867 gen_store_spr(SPR_MQ, t2); 4868 tcg_gen_shri_i64(t1, t0, 32); 4869 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 4870 tcg_gen_ext32s_i64(t1, t0); 4871 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 4872 tcg_gen_movi_tl(cpu_ov, 1); 4873 tcg_gen_movi_tl(cpu_so, 1); 4874 gen_set_label(l1); 4875 tcg_temp_free_i64(t0); 4876 tcg_temp_free_i64(t1); 4877 tcg_temp_free(t2); 4878 if (unlikely(Rc(ctx->opcode) != 0)) 4879 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4880 } 4881 4882 /* nabs - nabs. */ 4883 static void gen_nabs(DisasContext *ctx) 4884 { 4885 TCGLabel *l1 = gen_new_label(); 4886 TCGLabel *l2 = gen_new_label(); 4887 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 4888 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4889 tcg_gen_br(l2); 4890 gen_set_label(l1); 4891 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4892 gen_set_label(l2); 4893 if (unlikely(Rc(ctx->opcode) != 0)) 4894 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4895 } 4896 4897 /* nabso - nabso. */ 4898 static void gen_nabso(DisasContext *ctx) 4899 { 4900 TCGLabel *l1 = gen_new_label(); 4901 TCGLabel *l2 = gen_new_label(); 4902 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 4903 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4904 tcg_gen_br(l2); 4905 gen_set_label(l1); 4906 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4907 gen_set_label(l2); 4908 /* nabs never overflows */ 4909 tcg_gen_movi_tl(cpu_ov, 0); 4910 if (unlikely(Rc(ctx->opcode) != 0)) 4911 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4912 } 4913 4914 /* rlmi - rlmi. */ 4915 static void gen_rlmi(DisasContext *ctx) 4916 { 4917 uint32_t mb = MB(ctx->opcode); 4918 uint32_t me = ME(ctx->opcode); 4919 TCGv t0 = tcg_temp_new(); 4920 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4921 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4922 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 4923 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 4924 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 4925 tcg_temp_free(t0); 4926 if (unlikely(Rc(ctx->opcode) != 0)) 4927 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4928 } 4929 4930 /* rrib - rrib. */ 4931 static void gen_rrib(DisasContext *ctx) 4932 { 4933 TCGv t0 = tcg_temp_new(); 4934 TCGv t1 = tcg_temp_new(); 4935 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4936 tcg_gen_movi_tl(t1, 0x80000000); 4937 tcg_gen_shr_tl(t1, t1, t0); 4938 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4939 tcg_gen_and_tl(t0, t0, t1); 4940 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 4941 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4942 tcg_temp_free(t0); 4943 tcg_temp_free(t1); 4944 if (unlikely(Rc(ctx->opcode) != 0)) 4945 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4946 } 4947 4948 /* sle - sle. */ 4949 static void gen_sle(DisasContext *ctx) 4950 { 4951 TCGv t0 = tcg_temp_new(); 4952 TCGv t1 = tcg_temp_new(); 4953 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 4954 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 4955 tcg_gen_subfi_tl(t1, 32, t1); 4956 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 4957 tcg_gen_or_tl(t1, t0, t1); 4958 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 4959 gen_store_spr(SPR_MQ, t1); 4960 tcg_temp_free(t0); 4961 tcg_temp_free(t1); 4962 if (unlikely(Rc(ctx->opcode) != 0)) 4963 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4964 } 4965 4966 /* sleq - sleq. */ 4967 static void gen_sleq(DisasContext *ctx) 4968 { 4969 TCGv t0 = tcg_temp_new(); 4970 TCGv t1 = tcg_temp_new(); 4971 TCGv t2 = tcg_temp_new(); 4972 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4973 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 4974 tcg_gen_shl_tl(t2, t2, t0); 4975 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4976 gen_load_spr(t1, SPR_MQ); 4977 gen_store_spr(SPR_MQ, t0); 4978 tcg_gen_and_tl(t0, t0, t2); 4979 tcg_gen_andc_tl(t1, t1, t2); 4980 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4981 tcg_temp_free(t0); 4982 tcg_temp_free(t1); 4983 tcg_temp_free(t2); 4984 if (unlikely(Rc(ctx->opcode) != 0)) 4985 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4986 } 4987 4988 /* sliq - sliq. */ 4989 static void gen_sliq(DisasContext *ctx) 4990 { 4991 int sh = SH(ctx->opcode); 4992 TCGv t0 = tcg_temp_new(); 4993 TCGv t1 = tcg_temp_new(); 4994 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 4995 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 4996 tcg_gen_or_tl(t1, t0, t1); 4997 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 4998 gen_store_spr(SPR_MQ, t1); 4999 tcg_temp_free(t0); 5000 tcg_temp_free(t1); 5001 if (unlikely(Rc(ctx->opcode) != 0)) 5002 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5003 } 5004 5005 /* slliq - slliq. */ 5006 static void gen_slliq(DisasContext *ctx) 5007 { 5008 int sh = SH(ctx->opcode); 5009 TCGv t0 = tcg_temp_new(); 5010 TCGv t1 = tcg_temp_new(); 5011 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5012 gen_load_spr(t1, SPR_MQ); 5013 gen_store_spr(SPR_MQ, t0); 5014 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 5015 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 5016 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5017 tcg_temp_free(t0); 5018 tcg_temp_free(t1); 5019 if (unlikely(Rc(ctx->opcode) != 0)) 5020 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5021 } 5022 5023 /* sllq - sllq. */ 5024 static void gen_sllq(DisasContext *ctx) 5025 { 5026 TCGLabel *l1 = gen_new_label(); 5027 TCGLabel *l2 = gen_new_label(); 5028 TCGv t0 = tcg_temp_local_new(); 5029 TCGv t1 = tcg_temp_local_new(); 5030 TCGv t2 = tcg_temp_local_new(); 5031 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5032 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5033 tcg_gen_shl_tl(t1, t1, t2); 5034 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5035 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5036 gen_load_spr(t0, SPR_MQ); 5037 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5038 tcg_gen_br(l2); 5039 gen_set_label(l1); 5040 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5041 gen_load_spr(t2, SPR_MQ); 5042 tcg_gen_andc_tl(t1, t2, t1); 5043 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5044 gen_set_label(l2); 5045 tcg_temp_free(t0); 5046 tcg_temp_free(t1); 5047 tcg_temp_free(t2); 5048 if (unlikely(Rc(ctx->opcode) != 0)) 5049 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5050 } 5051 5052 /* slq - slq. */ 5053 static void gen_slq(DisasContext *ctx) 5054 { 5055 TCGLabel *l1 = gen_new_label(); 5056 TCGv t0 = tcg_temp_new(); 5057 TCGv t1 = tcg_temp_new(); 5058 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5059 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5060 tcg_gen_subfi_tl(t1, 32, t1); 5061 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5062 tcg_gen_or_tl(t1, t0, t1); 5063 gen_store_spr(SPR_MQ, t1); 5064 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5065 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5066 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5067 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5068 gen_set_label(l1); 5069 tcg_temp_free(t0); 5070 tcg_temp_free(t1); 5071 if (unlikely(Rc(ctx->opcode) != 0)) 5072 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5073 } 5074 5075 /* sraiq - sraiq. */ 5076 static void gen_sraiq(DisasContext *ctx) 5077 { 5078 int sh = SH(ctx->opcode); 5079 TCGLabel *l1 = gen_new_label(); 5080 TCGv t0 = tcg_temp_new(); 5081 TCGv t1 = tcg_temp_new(); 5082 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5083 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5084 tcg_gen_or_tl(t0, t0, t1); 5085 gen_store_spr(SPR_MQ, t0); 5086 tcg_gen_movi_tl(cpu_ca, 0); 5087 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5088 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5089 tcg_gen_movi_tl(cpu_ca, 1); 5090 gen_set_label(l1); 5091 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5092 tcg_temp_free(t0); 5093 tcg_temp_free(t1); 5094 if (unlikely(Rc(ctx->opcode) != 0)) 5095 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5096 } 5097 5098 /* sraq - sraq. */ 5099 static void gen_sraq(DisasContext *ctx) 5100 { 5101 TCGLabel *l1 = gen_new_label(); 5102 TCGLabel *l2 = gen_new_label(); 5103 TCGv t0 = tcg_temp_new(); 5104 TCGv t1 = tcg_temp_local_new(); 5105 TCGv t2 = tcg_temp_local_new(); 5106 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5107 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5108 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5109 tcg_gen_subfi_tl(t2, 32, t2); 5110 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5111 tcg_gen_or_tl(t0, t0, t2); 5112 gen_store_spr(SPR_MQ, t0); 5113 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5114 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5115 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5116 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5117 gen_set_label(l1); 5118 tcg_temp_free(t0); 5119 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5120 tcg_gen_movi_tl(cpu_ca, 0); 5121 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5122 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5123 tcg_gen_movi_tl(cpu_ca, 1); 5124 gen_set_label(l2); 5125 tcg_temp_free(t1); 5126 tcg_temp_free(t2); 5127 if (unlikely(Rc(ctx->opcode) != 0)) 5128 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5129 } 5130 5131 /* sre - sre. */ 5132 static void gen_sre(DisasContext *ctx) 5133 { 5134 TCGv t0 = tcg_temp_new(); 5135 TCGv t1 = tcg_temp_new(); 5136 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5137 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5138 tcg_gen_subfi_tl(t1, 32, t1); 5139 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5140 tcg_gen_or_tl(t1, t0, t1); 5141 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5142 gen_store_spr(SPR_MQ, t1); 5143 tcg_temp_free(t0); 5144 tcg_temp_free(t1); 5145 if (unlikely(Rc(ctx->opcode) != 0)) 5146 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5147 } 5148 5149 /* srea - srea. */ 5150 static void gen_srea(DisasContext *ctx) 5151 { 5152 TCGv t0 = tcg_temp_new(); 5153 TCGv t1 = tcg_temp_new(); 5154 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5155 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5156 gen_store_spr(SPR_MQ, t0); 5157 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5158 tcg_temp_free(t0); 5159 tcg_temp_free(t1); 5160 if (unlikely(Rc(ctx->opcode) != 0)) 5161 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5162 } 5163 5164 /* sreq */ 5165 static void gen_sreq(DisasContext *ctx) 5166 { 5167 TCGv t0 = tcg_temp_new(); 5168 TCGv t1 = tcg_temp_new(); 5169 TCGv t2 = tcg_temp_new(); 5170 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5171 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5172 tcg_gen_shr_tl(t1, t1, t0); 5173 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5174 gen_load_spr(t2, SPR_MQ); 5175 gen_store_spr(SPR_MQ, t0); 5176 tcg_gen_and_tl(t0, t0, t1); 5177 tcg_gen_andc_tl(t2, t2, t1); 5178 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5179 tcg_temp_free(t0); 5180 tcg_temp_free(t1); 5181 tcg_temp_free(t2); 5182 if (unlikely(Rc(ctx->opcode) != 0)) 5183 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5184 } 5185 5186 /* sriq */ 5187 static void gen_sriq(DisasContext *ctx) 5188 { 5189 int sh = SH(ctx->opcode); 5190 TCGv t0 = tcg_temp_new(); 5191 TCGv t1 = tcg_temp_new(); 5192 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5193 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5194 tcg_gen_or_tl(t1, t0, t1); 5195 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5196 gen_store_spr(SPR_MQ, t1); 5197 tcg_temp_free(t0); 5198 tcg_temp_free(t1); 5199 if (unlikely(Rc(ctx->opcode) != 0)) 5200 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5201 } 5202 5203 /* srliq */ 5204 static void gen_srliq(DisasContext *ctx) 5205 { 5206 int sh = SH(ctx->opcode); 5207 TCGv t0 = tcg_temp_new(); 5208 TCGv t1 = tcg_temp_new(); 5209 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5210 gen_load_spr(t1, SPR_MQ); 5211 gen_store_spr(SPR_MQ, t0); 5212 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5213 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5214 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5215 tcg_temp_free(t0); 5216 tcg_temp_free(t1); 5217 if (unlikely(Rc(ctx->opcode) != 0)) 5218 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5219 } 5220 5221 /* srlq */ 5222 static void gen_srlq(DisasContext *ctx) 5223 { 5224 TCGLabel *l1 = gen_new_label(); 5225 TCGLabel *l2 = gen_new_label(); 5226 TCGv t0 = tcg_temp_local_new(); 5227 TCGv t1 = tcg_temp_local_new(); 5228 TCGv t2 = tcg_temp_local_new(); 5229 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5230 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5231 tcg_gen_shr_tl(t2, t1, t2); 5232 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5233 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5234 gen_load_spr(t0, SPR_MQ); 5235 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5236 tcg_gen_br(l2); 5237 gen_set_label(l1); 5238 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5239 tcg_gen_and_tl(t0, t0, t2); 5240 gen_load_spr(t1, SPR_MQ); 5241 tcg_gen_andc_tl(t1, t1, t2); 5242 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5243 gen_set_label(l2); 5244 tcg_temp_free(t0); 5245 tcg_temp_free(t1); 5246 tcg_temp_free(t2); 5247 if (unlikely(Rc(ctx->opcode) != 0)) 5248 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5249 } 5250 5251 /* srq */ 5252 static void gen_srq(DisasContext *ctx) 5253 { 5254 TCGLabel *l1 = gen_new_label(); 5255 TCGv t0 = tcg_temp_new(); 5256 TCGv t1 = tcg_temp_new(); 5257 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5258 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5259 tcg_gen_subfi_tl(t1, 32, t1); 5260 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5261 tcg_gen_or_tl(t1, t0, t1); 5262 gen_store_spr(SPR_MQ, t1); 5263 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5264 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5265 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5266 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5267 gen_set_label(l1); 5268 tcg_temp_free(t0); 5269 tcg_temp_free(t1); 5270 if (unlikely(Rc(ctx->opcode) != 0)) 5271 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5272 } 5273 5274 /* PowerPC 602 specific instructions */ 5275 5276 /* dsa */ 5277 static void gen_dsa(DisasContext *ctx) 5278 { 5279 /* XXX: TODO */ 5280 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5281 } 5282 5283 /* esa */ 5284 static void gen_esa(DisasContext *ctx) 5285 { 5286 /* XXX: TODO */ 5287 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5288 } 5289 5290 /* mfrom */ 5291 static void gen_mfrom(DisasContext *ctx) 5292 { 5293 #if defined(CONFIG_USER_ONLY) 5294 GEN_PRIV; 5295 #else 5296 CHK_SV; 5297 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5298 #endif /* defined(CONFIG_USER_ONLY) */ 5299 } 5300 5301 /* 602 - 603 - G2 TLB management */ 5302 5303 /* tlbld */ 5304 static void gen_tlbld_6xx(DisasContext *ctx) 5305 { 5306 #if defined(CONFIG_USER_ONLY) 5307 GEN_PRIV; 5308 #else 5309 CHK_SV; 5310 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5311 #endif /* defined(CONFIG_USER_ONLY) */ 5312 } 5313 5314 /* tlbli */ 5315 static void gen_tlbli_6xx(DisasContext *ctx) 5316 { 5317 #if defined(CONFIG_USER_ONLY) 5318 GEN_PRIV; 5319 #else 5320 CHK_SV; 5321 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5322 #endif /* defined(CONFIG_USER_ONLY) */ 5323 } 5324 5325 /* 74xx TLB management */ 5326 5327 /* tlbld */ 5328 static void gen_tlbld_74xx(DisasContext *ctx) 5329 { 5330 #if defined(CONFIG_USER_ONLY) 5331 GEN_PRIV; 5332 #else 5333 CHK_SV; 5334 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5335 #endif /* defined(CONFIG_USER_ONLY) */ 5336 } 5337 5338 /* tlbli */ 5339 static void gen_tlbli_74xx(DisasContext *ctx) 5340 { 5341 #if defined(CONFIG_USER_ONLY) 5342 GEN_PRIV; 5343 #else 5344 CHK_SV; 5345 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5346 #endif /* defined(CONFIG_USER_ONLY) */ 5347 } 5348 5349 /* POWER instructions not in PowerPC 601 */ 5350 5351 /* clf */ 5352 static void gen_clf(DisasContext *ctx) 5353 { 5354 /* Cache line flush: implemented as no-op */ 5355 } 5356 5357 /* cli */ 5358 static void gen_cli(DisasContext *ctx) 5359 { 5360 #if defined(CONFIG_USER_ONLY) 5361 GEN_PRIV; 5362 #else 5363 /* Cache line invalidate: privileged and treated as no-op */ 5364 CHK_SV; 5365 #endif /* defined(CONFIG_USER_ONLY) */ 5366 } 5367 5368 /* dclst */ 5369 static void gen_dclst(DisasContext *ctx) 5370 { 5371 /* Data cache line store: treated as no-op */ 5372 } 5373 5374 static void gen_mfsri(DisasContext *ctx) 5375 { 5376 #if defined(CONFIG_USER_ONLY) 5377 GEN_PRIV; 5378 #else 5379 int ra = rA(ctx->opcode); 5380 int rd = rD(ctx->opcode); 5381 TCGv t0; 5382 5383 CHK_SV; 5384 t0 = tcg_temp_new(); 5385 gen_addr_reg_index(ctx, t0); 5386 tcg_gen_shri_tl(t0, t0, 28); 5387 tcg_gen_andi_tl(t0, t0, 0xF); 5388 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5389 tcg_temp_free(t0); 5390 if (ra != 0 && ra != rd) 5391 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5392 #endif /* defined(CONFIG_USER_ONLY) */ 5393 } 5394 5395 static void gen_rac(DisasContext *ctx) 5396 { 5397 #if defined(CONFIG_USER_ONLY) 5398 GEN_PRIV; 5399 #else 5400 TCGv t0; 5401 5402 CHK_SV; 5403 t0 = tcg_temp_new(); 5404 gen_addr_reg_index(ctx, t0); 5405 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5406 tcg_temp_free(t0); 5407 #endif /* defined(CONFIG_USER_ONLY) */ 5408 } 5409 5410 static void gen_rfsvc(DisasContext *ctx) 5411 { 5412 #if defined(CONFIG_USER_ONLY) 5413 GEN_PRIV; 5414 #else 5415 CHK_SV; 5416 5417 gen_helper_rfsvc(cpu_env); 5418 gen_sync_exception(ctx); 5419 #endif /* defined(CONFIG_USER_ONLY) */ 5420 } 5421 5422 /* svc is not implemented for now */ 5423 5424 /* BookE specific instructions */ 5425 5426 /* XXX: not implemented on 440 ? */ 5427 static void gen_mfapidi(DisasContext *ctx) 5428 { 5429 /* XXX: TODO */ 5430 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5431 } 5432 5433 /* XXX: not implemented on 440 ? */ 5434 static void gen_tlbiva(DisasContext *ctx) 5435 { 5436 #if defined(CONFIG_USER_ONLY) 5437 GEN_PRIV; 5438 #else 5439 TCGv t0; 5440 5441 CHK_SV; 5442 t0 = tcg_temp_new(); 5443 gen_addr_reg_index(ctx, t0); 5444 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5445 tcg_temp_free(t0); 5446 #endif /* defined(CONFIG_USER_ONLY) */ 5447 } 5448 5449 /* All 405 MAC instructions are translated here */ 5450 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5451 int ra, int rb, int rt, int Rc) 5452 { 5453 TCGv t0, t1; 5454 5455 t0 = tcg_temp_local_new(); 5456 t1 = tcg_temp_local_new(); 5457 5458 switch (opc3 & 0x0D) { 5459 case 0x05: 5460 /* macchw - macchw. - macchwo - macchwo. */ 5461 /* macchws - macchws. - macchwso - macchwso. */ 5462 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5463 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5464 /* mulchw - mulchw. */ 5465 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5466 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5467 tcg_gen_ext16s_tl(t1, t1); 5468 break; 5469 case 0x04: 5470 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5471 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5472 /* mulchwu - mulchwu. */ 5473 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5474 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5475 tcg_gen_ext16u_tl(t1, t1); 5476 break; 5477 case 0x01: 5478 /* machhw - machhw. - machhwo - machhwo. */ 5479 /* machhws - machhws. - machhwso - machhwso. */ 5480 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5481 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5482 /* mulhhw - mulhhw. */ 5483 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5484 tcg_gen_ext16s_tl(t0, t0); 5485 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5486 tcg_gen_ext16s_tl(t1, t1); 5487 break; 5488 case 0x00: 5489 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5490 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5491 /* mulhhwu - mulhhwu. */ 5492 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5493 tcg_gen_ext16u_tl(t0, t0); 5494 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5495 tcg_gen_ext16u_tl(t1, t1); 5496 break; 5497 case 0x0D: 5498 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5499 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5500 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5501 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5502 /* mullhw - mullhw. */ 5503 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5504 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5505 break; 5506 case 0x0C: 5507 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5508 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5509 /* mullhwu - mullhwu. */ 5510 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5511 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5512 break; 5513 } 5514 if (opc2 & 0x04) { 5515 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5516 tcg_gen_mul_tl(t1, t0, t1); 5517 if (opc2 & 0x02) { 5518 /* nmultiply-and-accumulate (0x0E) */ 5519 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5520 } else { 5521 /* multiply-and-accumulate (0x0C) */ 5522 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5523 } 5524 5525 if (opc3 & 0x12) { 5526 /* Check overflow and/or saturate */ 5527 TCGLabel *l1 = gen_new_label(); 5528 5529 if (opc3 & 0x10) { 5530 /* Start with XER OV disabled, the most likely case */ 5531 tcg_gen_movi_tl(cpu_ov, 0); 5532 } 5533 if (opc3 & 0x01) { 5534 /* Signed */ 5535 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5536 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5537 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5538 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5539 if (opc3 & 0x02) { 5540 /* Saturate */ 5541 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5542 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5543 } 5544 } else { 5545 /* Unsigned */ 5546 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5547 if (opc3 & 0x02) { 5548 /* Saturate */ 5549 tcg_gen_movi_tl(t0, UINT32_MAX); 5550 } 5551 } 5552 if (opc3 & 0x10) { 5553 /* Check overflow */ 5554 tcg_gen_movi_tl(cpu_ov, 1); 5555 tcg_gen_movi_tl(cpu_so, 1); 5556 } 5557 gen_set_label(l1); 5558 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5559 } 5560 } else { 5561 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5562 } 5563 tcg_temp_free(t0); 5564 tcg_temp_free(t1); 5565 if (unlikely(Rc) != 0) { 5566 /* Update Rc0 */ 5567 gen_set_Rc0(ctx, cpu_gpr[rt]); 5568 } 5569 } 5570 5571 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5572 static void glue(gen_, name)(DisasContext *ctx) \ 5573 { \ 5574 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5575 rD(ctx->opcode), Rc(ctx->opcode)); \ 5576 } 5577 5578 /* macchw - macchw. */ 5579 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5580 /* macchwo - macchwo. */ 5581 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5582 /* macchws - macchws. */ 5583 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5584 /* macchwso - macchwso. */ 5585 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5586 /* macchwsu - macchwsu. */ 5587 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5588 /* macchwsuo - macchwsuo. */ 5589 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5590 /* macchwu - macchwu. */ 5591 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5592 /* macchwuo - macchwuo. */ 5593 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5594 /* machhw - machhw. */ 5595 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5596 /* machhwo - machhwo. */ 5597 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5598 /* machhws - machhws. */ 5599 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5600 /* machhwso - machhwso. */ 5601 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5602 /* machhwsu - machhwsu. */ 5603 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5604 /* machhwsuo - machhwsuo. */ 5605 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5606 /* machhwu - machhwu. */ 5607 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5608 /* machhwuo - machhwuo. */ 5609 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5610 /* maclhw - maclhw. */ 5611 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5612 /* maclhwo - maclhwo. */ 5613 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5614 /* maclhws - maclhws. */ 5615 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5616 /* maclhwso - maclhwso. */ 5617 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 5618 /* maclhwu - maclhwu. */ 5619 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 5620 /* maclhwuo - maclhwuo. */ 5621 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 5622 /* maclhwsu - maclhwsu. */ 5623 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 5624 /* maclhwsuo - maclhwsuo. */ 5625 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 5626 /* nmacchw - nmacchw. */ 5627 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 5628 /* nmacchwo - nmacchwo. */ 5629 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 5630 /* nmacchws - nmacchws. */ 5631 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 5632 /* nmacchwso - nmacchwso. */ 5633 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 5634 /* nmachhw - nmachhw. */ 5635 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 5636 /* nmachhwo - nmachhwo. */ 5637 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 5638 /* nmachhws - nmachhws. */ 5639 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 5640 /* nmachhwso - nmachhwso. */ 5641 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 5642 /* nmaclhw - nmaclhw. */ 5643 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 5644 /* nmaclhwo - nmaclhwo. */ 5645 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 5646 /* nmaclhws - nmaclhws. */ 5647 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 5648 /* nmaclhwso - nmaclhwso. */ 5649 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 5650 5651 /* mulchw - mulchw. */ 5652 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 5653 /* mulchwu - mulchwu. */ 5654 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 5655 /* mulhhw - mulhhw. */ 5656 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 5657 /* mulhhwu - mulhhwu. */ 5658 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 5659 /* mullhw - mullhw. */ 5660 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 5661 /* mullhwu - mullhwu. */ 5662 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 5663 5664 /* mfdcr */ 5665 static void gen_mfdcr(DisasContext *ctx) 5666 { 5667 #if defined(CONFIG_USER_ONLY) 5668 GEN_PRIV; 5669 #else 5670 TCGv dcrn; 5671 5672 CHK_SV; 5673 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5674 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 5675 tcg_temp_free(dcrn); 5676 #endif /* defined(CONFIG_USER_ONLY) */ 5677 } 5678 5679 /* mtdcr */ 5680 static void gen_mtdcr(DisasContext *ctx) 5681 { 5682 #if defined(CONFIG_USER_ONLY) 5683 GEN_PRIV; 5684 #else 5685 TCGv dcrn; 5686 5687 CHK_SV; 5688 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5689 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 5690 tcg_temp_free(dcrn); 5691 #endif /* defined(CONFIG_USER_ONLY) */ 5692 } 5693 5694 /* mfdcrx */ 5695 /* XXX: not implemented on 440 ? */ 5696 static void gen_mfdcrx(DisasContext *ctx) 5697 { 5698 #if defined(CONFIG_USER_ONLY) 5699 GEN_PRIV; 5700 #else 5701 CHK_SV; 5702 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5703 cpu_gpr[rA(ctx->opcode)]); 5704 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5705 #endif /* defined(CONFIG_USER_ONLY) */ 5706 } 5707 5708 /* mtdcrx */ 5709 /* XXX: not implemented on 440 ? */ 5710 static void gen_mtdcrx(DisasContext *ctx) 5711 { 5712 #if defined(CONFIG_USER_ONLY) 5713 GEN_PRIV; 5714 #else 5715 CHK_SV; 5716 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5717 cpu_gpr[rS(ctx->opcode)]); 5718 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5719 #endif /* defined(CONFIG_USER_ONLY) */ 5720 } 5721 5722 /* mfdcrux (PPC 460) : user-mode access to DCR */ 5723 static void gen_mfdcrux(DisasContext *ctx) 5724 { 5725 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5726 cpu_gpr[rA(ctx->opcode)]); 5727 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5728 } 5729 5730 /* mtdcrux (PPC 460) : user-mode access to DCR */ 5731 static void gen_mtdcrux(DisasContext *ctx) 5732 { 5733 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5734 cpu_gpr[rS(ctx->opcode)]); 5735 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5736 } 5737 5738 /* dccci */ 5739 static void gen_dccci(DisasContext *ctx) 5740 { 5741 CHK_SV; 5742 /* interpreted as no-op */ 5743 } 5744 5745 /* dcread */ 5746 static void gen_dcread(DisasContext *ctx) 5747 { 5748 #if defined(CONFIG_USER_ONLY) 5749 GEN_PRIV; 5750 #else 5751 TCGv EA, val; 5752 5753 CHK_SV; 5754 gen_set_access_type(ctx, ACCESS_CACHE); 5755 EA = tcg_temp_new(); 5756 gen_addr_reg_index(ctx, EA); 5757 val = tcg_temp_new(); 5758 gen_qemu_ld32u(ctx, val, EA); 5759 tcg_temp_free(val); 5760 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 5761 tcg_temp_free(EA); 5762 #endif /* defined(CONFIG_USER_ONLY) */ 5763 } 5764 5765 /* icbt */ 5766 static void gen_icbt_40x(DisasContext *ctx) 5767 { 5768 /* interpreted as no-op */ 5769 /* XXX: specification say this is treated as a load by the MMU 5770 * but does not generate any exception 5771 */ 5772 } 5773 5774 /* iccci */ 5775 static void gen_iccci(DisasContext *ctx) 5776 { 5777 CHK_SV; 5778 /* interpreted as no-op */ 5779 } 5780 5781 /* icread */ 5782 static void gen_icread(DisasContext *ctx) 5783 { 5784 CHK_SV; 5785 /* interpreted as no-op */ 5786 } 5787 5788 /* rfci (supervisor only) */ 5789 static void gen_rfci_40x(DisasContext *ctx) 5790 { 5791 #if defined(CONFIG_USER_ONLY) 5792 GEN_PRIV; 5793 #else 5794 CHK_SV; 5795 /* Restore CPU state */ 5796 gen_helper_40x_rfci(cpu_env); 5797 gen_sync_exception(ctx); 5798 #endif /* defined(CONFIG_USER_ONLY) */ 5799 } 5800 5801 static void gen_rfci(DisasContext *ctx) 5802 { 5803 #if defined(CONFIG_USER_ONLY) 5804 GEN_PRIV; 5805 #else 5806 CHK_SV; 5807 /* Restore CPU state */ 5808 gen_helper_rfci(cpu_env); 5809 gen_sync_exception(ctx); 5810 #endif /* defined(CONFIG_USER_ONLY) */ 5811 } 5812 5813 /* BookE specific */ 5814 5815 /* XXX: not implemented on 440 ? */ 5816 static void gen_rfdi(DisasContext *ctx) 5817 { 5818 #if defined(CONFIG_USER_ONLY) 5819 GEN_PRIV; 5820 #else 5821 CHK_SV; 5822 /* Restore CPU state */ 5823 gen_helper_rfdi(cpu_env); 5824 gen_sync_exception(ctx); 5825 #endif /* defined(CONFIG_USER_ONLY) */ 5826 } 5827 5828 /* XXX: not implemented on 440 ? */ 5829 static void gen_rfmci(DisasContext *ctx) 5830 { 5831 #if defined(CONFIG_USER_ONLY) 5832 GEN_PRIV; 5833 #else 5834 CHK_SV; 5835 /* Restore CPU state */ 5836 gen_helper_rfmci(cpu_env); 5837 gen_sync_exception(ctx); 5838 #endif /* defined(CONFIG_USER_ONLY) */ 5839 } 5840 5841 /* TLB management - PowerPC 405 implementation */ 5842 5843 /* tlbre */ 5844 static void gen_tlbre_40x(DisasContext *ctx) 5845 { 5846 #if defined(CONFIG_USER_ONLY) 5847 GEN_PRIV; 5848 #else 5849 CHK_SV; 5850 switch (rB(ctx->opcode)) { 5851 case 0: 5852 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 5853 cpu_gpr[rA(ctx->opcode)]); 5854 break; 5855 case 1: 5856 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 5857 cpu_gpr[rA(ctx->opcode)]); 5858 break; 5859 default: 5860 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5861 break; 5862 } 5863 #endif /* defined(CONFIG_USER_ONLY) */ 5864 } 5865 5866 /* tlbsx - tlbsx. */ 5867 static void gen_tlbsx_40x(DisasContext *ctx) 5868 { 5869 #if defined(CONFIG_USER_ONLY) 5870 GEN_PRIV; 5871 #else 5872 TCGv t0; 5873 5874 CHK_SV; 5875 t0 = tcg_temp_new(); 5876 gen_addr_reg_index(ctx, t0); 5877 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5878 tcg_temp_free(t0); 5879 if (Rc(ctx->opcode)) { 5880 TCGLabel *l1 = gen_new_label(); 5881 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5882 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 5883 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 5884 gen_set_label(l1); 5885 } 5886 #endif /* defined(CONFIG_USER_ONLY) */ 5887 } 5888 5889 /* tlbwe */ 5890 static void gen_tlbwe_40x(DisasContext *ctx) 5891 { 5892 #if defined(CONFIG_USER_ONLY) 5893 GEN_PRIV; 5894 #else 5895 CHK_SV; 5896 5897 switch (rB(ctx->opcode)) { 5898 case 0: 5899 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 5900 cpu_gpr[rS(ctx->opcode)]); 5901 break; 5902 case 1: 5903 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 5904 cpu_gpr[rS(ctx->opcode)]); 5905 break; 5906 default: 5907 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5908 break; 5909 } 5910 #endif /* defined(CONFIG_USER_ONLY) */ 5911 } 5912 5913 /* TLB management - PowerPC 440 implementation */ 5914 5915 /* tlbre */ 5916 static void gen_tlbre_440(DisasContext *ctx) 5917 { 5918 #if defined(CONFIG_USER_ONLY) 5919 GEN_PRIV; 5920 #else 5921 CHK_SV; 5922 5923 switch (rB(ctx->opcode)) { 5924 case 0: 5925 case 1: 5926 case 2: 5927 { 5928 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 5929 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 5930 t0, cpu_gpr[rA(ctx->opcode)]); 5931 tcg_temp_free_i32(t0); 5932 } 5933 break; 5934 default: 5935 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5936 break; 5937 } 5938 #endif /* defined(CONFIG_USER_ONLY) */ 5939 } 5940 5941 /* tlbsx - tlbsx. */ 5942 static void gen_tlbsx_440(DisasContext *ctx) 5943 { 5944 #if defined(CONFIG_USER_ONLY) 5945 GEN_PRIV; 5946 #else 5947 TCGv t0; 5948 5949 CHK_SV; 5950 t0 = tcg_temp_new(); 5951 gen_addr_reg_index(ctx, t0); 5952 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5953 tcg_temp_free(t0); 5954 if (Rc(ctx->opcode)) { 5955 TCGLabel *l1 = gen_new_label(); 5956 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5957 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 5958 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 5959 gen_set_label(l1); 5960 } 5961 #endif /* defined(CONFIG_USER_ONLY) */ 5962 } 5963 5964 /* tlbwe */ 5965 static void gen_tlbwe_440(DisasContext *ctx) 5966 { 5967 #if defined(CONFIG_USER_ONLY) 5968 GEN_PRIV; 5969 #else 5970 CHK_SV; 5971 switch (rB(ctx->opcode)) { 5972 case 0: 5973 case 1: 5974 case 2: 5975 { 5976 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 5977 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 5978 cpu_gpr[rS(ctx->opcode)]); 5979 tcg_temp_free_i32(t0); 5980 } 5981 break; 5982 default: 5983 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5984 break; 5985 } 5986 #endif /* defined(CONFIG_USER_ONLY) */ 5987 } 5988 5989 /* TLB management - PowerPC BookE 2.06 implementation */ 5990 5991 /* tlbre */ 5992 static void gen_tlbre_booke206(DisasContext *ctx) 5993 { 5994 #if defined(CONFIG_USER_ONLY) 5995 GEN_PRIV; 5996 #else 5997 CHK_SV; 5998 gen_helper_booke206_tlbre(cpu_env); 5999 #endif /* defined(CONFIG_USER_ONLY) */ 6000 } 6001 6002 /* tlbsx - tlbsx. */ 6003 static void gen_tlbsx_booke206(DisasContext *ctx) 6004 { 6005 #if defined(CONFIG_USER_ONLY) 6006 GEN_PRIV; 6007 #else 6008 TCGv t0; 6009 6010 CHK_SV; 6011 if (rA(ctx->opcode)) { 6012 t0 = tcg_temp_new(); 6013 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6014 } else { 6015 t0 = tcg_const_tl(0); 6016 } 6017 6018 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6019 gen_helper_booke206_tlbsx(cpu_env, t0); 6020 tcg_temp_free(t0); 6021 #endif /* defined(CONFIG_USER_ONLY) */ 6022 } 6023 6024 /* tlbwe */ 6025 static void gen_tlbwe_booke206(DisasContext *ctx) 6026 { 6027 #if defined(CONFIG_USER_ONLY) 6028 GEN_PRIV; 6029 #else 6030 CHK_SV; 6031 gen_helper_booke206_tlbwe(cpu_env); 6032 #endif /* defined(CONFIG_USER_ONLY) */ 6033 } 6034 6035 static void gen_tlbivax_booke206(DisasContext *ctx) 6036 { 6037 #if defined(CONFIG_USER_ONLY) 6038 GEN_PRIV; 6039 #else 6040 TCGv t0; 6041 6042 CHK_SV; 6043 t0 = tcg_temp_new(); 6044 gen_addr_reg_index(ctx, t0); 6045 gen_helper_booke206_tlbivax(cpu_env, t0); 6046 tcg_temp_free(t0); 6047 #endif /* defined(CONFIG_USER_ONLY) */ 6048 } 6049 6050 static void gen_tlbilx_booke206(DisasContext *ctx) 6051 { 6052 #if defined(CONFIG_USER_ONLY) 6053 GEN_PRIV; 6054 #else 6055 TCGv t0; 6056 6057 CHK_SV; 6058 t0 = tcg_temp_new(); 6059 gen_addr_reg_index(ctx, t0); 6060 6061 switch((ctx->opcode >> 21) & 0x3) { 6062 case 0: 6063 gen_helper_booke206_tlbilx0(cpu_env, t0); 6064 break; 6065 case 1: 6066 gen_helper_booke206_tlbilx1(cpu_env, t0); 6067 break; 6068 case 3: 6069 gen_helper_booke206_tlbilx3(cpu_env, t0); 6070 break; 6071 default: 6072 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6073 break; 6074 } 6075 6076 tcg_temp_free(t0); 6077 #endif /* defined(CONFIG_USER_ONLY) */ 6078 } 6079 6080 6081 /* wrtee */ 6082 static void gen_wrtee(DisasContext *ctx) 6083 { 6084 #if defined(CONFIG_USER_ONLY) 6085 GEN_PRIV; 6086 #else 6087 TCGv t0; 6088 6089 CHK_SV; 6090 t0 = tcg_temp_new(); 6091 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6092 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6093 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6094 tcg_temp_free(t0); 6095 /* Stop translation to have a chance to raise an exception 6096 * if we just set msr_ee to 1 6097 */ 6098 gen_stop_exception(ctx); 6099 #endif /* defined(CONFIG_USER_ONLY) */ 6100 } 6101 6102 /* wrteei */ 6103 static void gen_wrteei(DisasContext *ctx) 6104 { 6105 #if defined(CONFIG_USER_ONLY) 6106 GEN_PRIV; 6107 #else 6108 CHK_SV; 6109 if (ctx->opcode & 0x00008000) { 6110 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6111 /* Stop translation to have a chance to raise an exception */ 6112 gen_stop_exception(ctx); 6113 } else { 6114 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6115 } 6116 #endif /* defined(CONFIG_USER_ONLY) */ 6117 } 6118 6119 /* PowerPC 440 specific instructions */ 6120 6121 /* dlmzb */ 6122 static void gen_dlmzb(DisasContext *ctx) 6123 { 6124 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6125 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6126 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6127 tcg_temp_free_i32(t0); 6128 } 6129 6130 /* mbar replaces eieio on 440 */ 6131 static void gen_mbar(DisasContext *ctx) 6132 { 6133 /* interpreted as no-op */ 6134 } 6135 6136 /* msync replaces sync on 440 */ 6137 static void gen_msync_4xx(DisasContext *ctx) 6138 { 6139 /* interpreted as no-op */ 6140 } 6141 6142 /* icbt */ 6143 static void gen_icbt_440(DisasContext *ctx) 6144 { 6145 /* interpreted as no-op */ 6146 /* XXX: specification say this is treated as a load by the MMU 6147 * but does not generate any exception 6148 */ 6149 } 6150 6151 /* Embedded.Processor Control */ 6152 6153 static void gen_msgclr(DisasContext *ctx) 6154 { 6155 #if defined(CONFIG_USER_ONLY) 6156 GEN_PRIV; 6157 #else 6158 CHK_SV; 6159 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6160 #endif /* defined(CONFIG_USER_ONLY) */ 6161 } 6162 6163 static void gen_msgsnd(DisasContext *ctx) 6164 { 6165 #if defined(CONFIG_USER_ONLY) 6166 GEN_PRIV; 6167 #else 6168 CHK_SV; 6169 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6170 #endif /* defined(CONFIG_USER_ONLY) */ 6171 } 6172 6173 6174 #if defined(TARGET_PPC64) 6175 static void gen_maddld(DisasContext *ctx) 6176 { 6177 TCGv_i64 t1 = tcg_temp_new_i64(); 6178 6179 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6180 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6181 tcg_temp_free_i64(t1); 6182 } 6183 6184 /* maddhd maddhdu */ 6185 static void gen_maddhd_maddhdu(DisasContext *ctx) 6186 { 6187 TCGv_i64 lo = tcg_temp_new_i64(); 6188 TCGv_i64 hi = tcg_temp_new_i64(); 6189 TCGv_i64 t1 = tcg_temp_new_i64(); 6190 6191 if (Rc(ctx->opcode)) { 6192 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6193 cpu_gpr[rB(ctx->opcode)]); 6194 tcg_gen_movi_i64(t1, 0); 6195 } else { 6196 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6197 cpu_gpr[rB(ctx->opcode)]); 6198 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6199 } 6200 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6201 cpu_gpr[rC(ctx->opcode)], t1); 6202 tcg_temp_free_i64(lo); 6203 tcg_temp_free_i64(hi); 6204 tcg_temp_free_i64(t1); 6205 } 6206 #endif /* defined(TARGET_PPC64) */ 6207 6208 static void gen_tbegin(DisasContext *ctx) 6209 { 6210 if (unlikely(!ctx->tm_enabled)) { 6211 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6212 return; 6213 } 6214 gen_helper_tbegin(cpu_env); 6215 } 6216 6217 #define GEN_TM_NOOP(name) \ 6218 static inline void gen_##name(DisasContext *ctx) \ 6219 { \ 6220 if (unlikely(!ctx->tm_enabled)) { \ 6221 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6222 return; \ 6223 } \ 6224 /* Because tbegin always fails in QEMU, these user \ 6225 * space instructions all have a simple implementation: \ 6226 * \ 6227 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6228 * = 0b0 || 0b00 || 0b0 \ 6229 */ \ 6230 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6231 } 6232 6233 GEN_TM_NOOP(tend); 6234 GEN_TM_NOOP(tabort); 6235 GEN_TM_NOOP(tabortwc); 6236 GEN_TM_NOOP(tabortwci); 6237 GEN_TM_NOOP(tabortdc); 6238 GEN_TM_NOOP(tabortdci); 6239 GEN_TM_NOOP(tsr); 6240 static inline void gen_cp_abort(DisasContext *ctx) 6241 { 6242 // Do Nothing 6243 } 6244 6245 #define GEN_CP_PASTE_NOOP(name) \ 6246 static inline void gen_##name(DisasContext *ctx) \ 6247 { \ 6248 /* Generate invalid exception until \ 6249 * we have an implementation of the copy \ 6250 * paste facility \ 6251 */ \ 6252 gen_invalid(ctx); \ 6253 } 6254 6255 GEN_CP_PASTE_NOOP(copy) 6256 GEN_CP_PASTE_NOOP(paste) 6257 6258 static void gen_tcheck(DisasContext *ctx) 6259 { 6260 if (unlikely(!ctx->tm_enabled)) { 6261 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6262 return; 6263 } 6264 /* Because tbegin always fails, the tcheck implementation 6265 * is simple: 6266 * 6267 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6268 * = 0b1 || 0b00 || 0b0 6269 */ 6270 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6271 } 6272 6273 #if defined(CONFIG_USER_ONLY) 6274 #define GEN_TM_PRIV_NOOP(name) \ 6275 static inline void gen_##name(DisasContext *ctx) \ 6276 { \ 6277 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6278 } 6279 6280 #else 6281 6282 #define GEN_TM_PRIV_NOOP(name) \ 6283 static inline void gen_##name(DisasContext *ctx) \ 6284 { \ 6285 CHK_SV; \ 6286 if (unlikely(!ctx->tm_enabled)) { \ 6287 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6288 return; \ 6289 } \ 6290 /* Because tbegin always fails, the implementation is \ 6291 * simple: \ 6292 * \ 6293 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6294 * = 0b0 || 0b00 | 0b0 \ 6295 */ \ 6296 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6297 } 6298 6299 #endif 6300 6301 GEN_TM_PRIV_NOOP(treclaim); 6302 GEN_TM_PRIV_NOOP(trechkpt); 6303 6304 #include "translate/fp-impl.inc.c" 6305 6306 #include "translate/vmx-impl.inc.c" 6307 6308 #include "translate/vsx-impl.inc.c" 6309 6310 #include "translate/dfp-impl.inc.c" 6311 6312 #include "translate/spe-impl.inc.c" 6313 6314 /* Handles lfdp, lxsd, lxssp */ 6315 static void gen_dform39(DisasContext *ctx) 6316 { 6317 switch (ctx->opcode & 0x3) { 6318 case 0: /* lfdp */ 6319 if (ctx->insns_flags2 & PPC2_ISA205) { 6320 return gen_lfdp(ctx); 6321 } 6322 break; 6323 case 2: /* lxsd */ 6324 if (ctx->insns_flags2 & PPC2_ISA300) { 6325 return gen_lxsd(ctx); 6326 } 6327 break; 6328 case 3: /* lxssp */ 6329 if (ctx->insns_flags2 & PPC2_ISA300) { 6330 return gen_lxssp(ctx); 6331 } 6332 break; 6333 } 6334 return gen_invalid(ctx); 6335 } 6336 6337 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 6338 static void gen_dform3D(DisasContext *ctx) 6339 { 6340 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 6341 switch (ctx->opcode & 0x7) { 6342 case 1: /* lxv */ 6343 if (ctx->insns_flags2 & PPC2_ISA300) { 6344 return gen_lxv(ctx); 6345 } 6346 break; 6347 case 5: /* stxv */ 6348 if (ctx->insns_flags2 & PPC2_ISA300) { 6349 return gen_stxv(ctx); 6350 } 6351 break; 6352 } 6353 } else { /* DS-FORM */ 6354 switch (ctx->opcode & 0x3) { 6355 case 0: /* stfdp */ 6356 if (ctx->insns_flags2 & PPC2_ISA205) { 6357 return gen_stfdp(ctx); 6358 } 6359 break; 6360 case 2: /* stxsd */ 6361 if (ctx->insns_flags2 & PPC2_ISA300) { 6362 return gen_stxsd(ctx); 6363 } 6364 break; 6365 case 3: /* stxssp */ 6366 if (ctx->insns_flags2 & PPC2_ISA300) { 6367 return gen_stxssp(ctx); 6368 } 6369 break; 6370 } 6371 } 6372 return gen_invalid(ctx); 6373 } 6374 6375 static opcode_t opcodes[] = { 6376 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6377 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6378 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6379 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6380 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6381 #if defined(TARGET_PPC64) 6382 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6383 #endif 6384 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6385 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6386 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6387 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6388 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6389 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6390 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6391 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6392 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6393 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6394 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6395 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6396 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6397 #if defined(TARGET_PPC64) 6398 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6399 #endif 6400 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6401 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6402 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6403 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6404 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6405 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6406 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6407 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 6408 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6409 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 6410 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6411 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6412 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6413 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6414 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6415 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6416 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6417 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6418 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6419 #if defined(TARGET_PPC64) 6420 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6421 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6422 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6423 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6424 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6425 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6426 #endif 6427 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6428 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6429 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6430 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6431 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6432 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6433 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6434 #if defined(TARGET_PPC64) 6435 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6436 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6437 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6438 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6439 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6440 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6441 PPC_NONE, PPC2_ISA300), 6442 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6443 PPC_NONE, PPC2_ISA300), 6444 #endif 6445 #if defined(TARGET_PPC64) 6446 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6447 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6448 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6449 #endif 6450 /* handles lfdp, lxsd, lxssp */ 6451 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6452 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 6453 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6454 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6455 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6456 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6457 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6458 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6459 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6460 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO), 6461 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6462 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6463 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6464 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6465 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 6466 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 6467 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6468 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6469 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6470 #if defined(TARGET_PPC64) 6471 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 6472 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 6473 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6474 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6475 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6476 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6477 #endif 6478 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6479 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6480 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 6481 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6482 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6483 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6484 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6485 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6486 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6487 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6488 #if defined(TARGET_PPC64) 6489 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6490 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6491 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6492 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6493 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6494 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6495 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6496 #endif 6497 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6498 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6499 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6500 #if defined(TARGET_PPC64) 6501 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6502 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6503 #endif 6504 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6505 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6506 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6507 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6508 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6509 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6510 #if defined(TARGET_PPC64) 6511 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6512 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6513 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 6514 #endif 6515 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6516 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6517 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6518 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6519 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6520 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6521 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6522 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6523 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6524 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6525 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC), 6526 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6527 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6528 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6529 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6530 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6531 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6532 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6533 #if defined(TARGET_PPC64) 6534 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6535 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6536 PPC_SEGMENT_64B), 6537 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6538 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6539 PPC_SEGMENT_64B), 6540 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6541 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6542 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6543 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6544 #endif 6545 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6546 /* XXX Those instructions will need to be handled differently for 6547 * different ISA versions */ 6548 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6549 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6550 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6551 #if defined(TARGET_PPC64) 6552 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6553 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6554 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 6555 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6556 #endif 6557 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6558 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6559 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6560 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6561 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6562 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6563 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6564 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6565 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6566 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6567 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 6568 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6569 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 6570 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 6571 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 6572 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 6573 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 6574 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 6575 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 6576 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6577 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 6578 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 6579 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 6580 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 6581 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 6582 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 6583 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 6584 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 6585 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 6586 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 6587 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 6588 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 6589 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 6590 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 6591 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 6592 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 6593 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 6594 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 6595 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 6596 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 6597 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 6598 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 6599 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 6600 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 6601 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 6602 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 6603 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 6604 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 6605 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 6606 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6607 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6608 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 6609 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 6610 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6611 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6612 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 6613 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 6614 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 6615 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 6616 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 6617 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 6618 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 6619 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 6620 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 6621 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 6622 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 6623 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 6624 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 6625 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 6626 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 6627 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 6628 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 6629 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 6630 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 6631 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 6632 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 6633 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 6634 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 6635 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 6636 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 6637 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 6638 PPC_NONE, PPC2_BOOKE206), 6639 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 6640 PPC_NONE, PPC2_BOOKE206), 6641 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 6642 PPC_NONE, PPC2_BOOKE206), 6643 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 6644 PPC_NONE, PPC2_BOOKE206), 6645 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 6646 PPC_NONE, PPC2_BOOKE206), 6647 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 6648 PPC_NONE, PPC2_PRCNTL), 6649 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 6650 PPC_NONE, PPC2_PRCNTL), 6651 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 6652 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 6653 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 6654 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 6655 PPC_BOOKE, PPC2_BOOKE206), 6656 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE), 6657 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 6658 PPC_BOOKE, PPC2_BOOKE206), 6659 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 6660 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 6661 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 6662 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 6663 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 6664 #if defined(TARGET_PPC64) 6665 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 6666 PPC2_ISA300), 6667 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6668 #endif 6669 6670 #undef GEN_INT_ARITH_ADD 6671 #undef GEN_INT_ARITH_ADD_CONST 6672 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 6673 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 6674 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 6675 add_ca, compute_ca, compute_ov) \ 6676 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 6677 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 6678 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 6679 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 6680 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 6681 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 6682 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 6683 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 6684 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 6685 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 6686 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 6687 6688 #undef GEN_INT_ARITH_DIVW 6689 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 6690 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 6691 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 6692 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 6693 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 6694 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 6695 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6696 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6697 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6698 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6699 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6700 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6701 6702 #if defined(TARGET_PPC64) 6703 #undef GEN_INT_ARITH_DIVD 6704 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 6705 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6706 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 6707 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 6708 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 6709 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 6710 6711 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6712 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6713 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6714 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6715 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6716 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6717 6718 #undef GEN_INT_ARITH_MUL_HELPER 6719 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 6720 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6721 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 6722 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 6723 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 6724 #endif 6725 6726 #undef GEN_INT_ARITH_SUBF 6727 #undef GEN_INT_ARITH_SUBF_CONST 6728 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 6729 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 6730 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 6731 add_ca, compute_ca, compute_ov) \ 6732 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 6733 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 6734 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 6735 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 6736 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 6737 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 6738 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 6739 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 6740 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 6741 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 6742 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 6743 6744 #undef GEN_LOGICAL1 6745 #undef GEN_LOGICAL2 6746 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 6747 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 6748 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 6749 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 6750 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 6751 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 6752 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 6753 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 6754 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 6755 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 6756 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 6757 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 6758 #if defined(TARGET_PPC64) 6759 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 6760 #endif 6761 6762 #if defined(TARGET_PPC64) 6763 #undef GEN_PPC64_R2 6764 #undef GEN_PPC64_R4 6765 #define GEN_PPC64_R2(name, opc1, opc2) \ 6766 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 6767 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 6768 PPC_64B) 6769 #define GEN_PPC64_R4(name, opc1, opc2) \ 6770 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 6771 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 6772 PPC_64B), \ 6773 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 6774 PPC_64B), \ 6775 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 6776 PPC_64B) 6777 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 6778 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 6779 GEN_PPC64_R4(rldic, 0x1E, 0x04), 6780 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 6781 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 6782 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 6783 #endif 6784 6785 #undef GEN_LD 6786 #undef GEN_LDU 6787 #undef GEN_LDUX 6788 #undef GEN_LDX_E 6789 #undef GEN_LDS 6790 #define GEN_LD(name, ldop, opc, type) \ 6791 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 6792 #define GEN_LDU(name, ldop, opc, type) \ 6793 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 6794 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 6795 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 6796 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 6797 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 6798 #define GEN_LDS(name, ldop, op, type) \ 6799 GEN_LD(name, ldop, op | 0x20, type) \ 6800 GEN_LDU(name, ldop, op | 0x21, type) \ 6801 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 6802 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 6803 6804 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 6805 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 6806 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 6807 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 6808 #if defined(TARGET_PPC64) 6809 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 6810 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 6811 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 6812 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 6813 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 6814 6815 /* HV/P7 and later only */ 6816 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 6817 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 6818 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 6819 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 6820 #endif 6821 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 6822 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 6823 6824 #undef GEN_ST 6825 #undef GEN_STU 6826 #undef GEN_STUX 6827 #undef GEN_STX_E 6828 #undef GEN_STS 6829 #define GEN_ST(name, stop, opc, type) \ 6830 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 6831 #define GEN_STU(name, stop, opc, type) \ 6832 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 6833 #define GEN_STUX(name, stop, opc2, opc3, type) \ 6834 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 6835 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 6836 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 6837 #define GEN_STS(name, stop, op, type) \ 6838 GEN_ST(name, stop, op | 0x20, type) \ 6839 GEN_STU(name, stop, op | 0x21, type) \ 6840 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 6841 GEN_STX(name, stop, 0x17, op | 0x00, type) 6842 6843 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 6844 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 6845 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 6846 #if defined(TARGET_PPC64) 6847 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 6848 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 6849 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 6850 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 6851 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 6852 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 6853 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 6854 #endif 6855 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 6856 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 6857 6858 #undef GEN_CRLOGIC 6859 #define GEN_CRLOGIC(name, tcg_op, opc) \ 6860 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 6861 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 6862 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 6863 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 6864 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 6865 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 6866 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 6867 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 6868 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 6869 6870 #undef GEN_MAC_HANDLER 6871 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 6872 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 6873 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 6874 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 6875 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 6876 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 6877 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 6878 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 6879 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 6880 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 6881 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 6882 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 6883 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 6884 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 6885 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 6886 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 6887 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 6888 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 6889 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 6890 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 6891 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 6892 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 6893 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 6894 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 6895 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 6896 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 6897 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 6898 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 6899 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 6900 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 6901 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 6902 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 6903 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 6904 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 6905 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 6906 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 6907 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 6908 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 6909 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 6910 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 6911 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 6912 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 6913 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 6914 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 6915 6916 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 6917 PPC_NONE, PPC2_TM), 6918 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 6919 PPC_NONE, PPC2_TM), 6920 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 6921 PPC_NONE, PPC2_TM), 6922 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 6923 PPC_NONE, PPC2_TM), 6924 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 6925 PPC_NONE, PPC2_TM), 6926 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 6927 PPC_NONE, PPC2_TM), 6928 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 6929 PPC_NONE, PPC2_TM), 6930 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 6931 PPC_NONE, PPC2_TM), 6932 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 6933 PPC_NONE, PPC2_TM), 6934 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 6935 PPC_NONE, PPC2_TM), 6936 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 6937 PPC_NONE, PPC2_TM), 6938 6939 #include "translate/fp-ops.inc.c" 6940 6941 #include "translate/vmx-ops.inc.c" 6942 6943 #include "translate/vsx-ops.inc.c" 6944 6945 #include "translate/dfp-ops.inc.c" 6946 6947 #include "translate/spe-ops.inc.c" 6948 }; 6949 6950 #include "helper_regs.h" 6951 #include "translate_init.c" 6952 6953 /*****************************************************************************/ 6954 /* Misc PowerPC helpers */ 6955 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 6956 int flags) 6957 { 6958 #define RGPL 4 6959 #define RFPL 4 6960 6961 PowerPCCPU *cpu = POWERPC_CPU(cs); 6962 CPUPPCState *env = &cpu->env; 6963 int i; 6964 6965 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 6966 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 6967 env->nip, env->lr, env->ctr, cpu_read_xer(env), 6968 cs->cpu_index); 6969 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 6970 TARGET_FMT_lx " iidx %d didx %d\n", 6971 env->msr, env->spr[SPR_HID0], 6972 env->hflags, env->immu_idx, env->dmmu_idx); 6973 #if !defined(NO_TIMER_DUMP) 6974 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 6975 #if !defined(CONFIG_USER_ONLY) 6976 " DECR %08" PRIu32 6977 #endif 6978 "\n", 6979 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 6980 #if !defined(CONFIG_USER_ONLY) 6981 , cpu_ppc_load_decr(env) 6982 #endif 6983 ); 6984 #endif 6985 for (i = 0; i < 32; i++) { 6986 if ((i & (RGPL - 1)) == 0) 6987 cpu_fprintf(f, "GPR%02d", i); 6988 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 6989 if ((i & (RGPL - 1)) == (RGPL - 1)) 6990 cpu_fprintf(f, "\n"); 6991 } 6992 cpu_fprintf(f, "CR "); 6993 for (i = 0; i < 8; i++) 6994 cpu_fprintf(f, "%01x", env->crf[i]); 6995 cpu_fprintf(f, " ["); 6996 for (i = 0; i < 8; i++) { 6997 char a = '-'; 6998 if (env->crf[i] & 0x08) 6999 a = 'L'; 7000 else if (env->crf[i] & 0x04) 7001 a = 'G'; 7002 else if (env->crf[i] & 0x02) 7003 a = 'E'; 7004 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 7005 } 7006 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 7007 env->reserve_addr); 7008 for (i = 0; i < 32; i++) { 7009 if ((i & (RFPL - 1)) == 0) 7010 cpu_fprintf(f, "FPR%02d", i); 7011 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i])); 7012 if ((i & (RFPL - 1)) == (RFPL - 1)) 7013 cpu_fprintf(f, "\n"); 7014 } 7015 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 7016 #if !defined(CONFIG_USER_ONLY) 7017 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 7018 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 7019 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 7020 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 7021 7022 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 7023 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 7024 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 7025 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 7026 7027 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 7028 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 7029 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 7030 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 7031 7032 #if defined(TARGET_PPC64) 7033 if (env->excp_model == POWERPC_EXCP_POWER7 || 7034 env->excp_model == POWERPC_EXCP_POWER8) { 7035 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 7036 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 7037 } 7038 #endif 7039 if (env->excp_model == POWERPC_EXCP_BOOKE) { 7040 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 7041 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 7042 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 7043 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 7044 7045 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 7046 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 7047 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 7048 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 7049 7050 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 7051 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 7052 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 7053 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 7054 7055 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 7056 " EPR " TARGET_FMT_lx "\n", 7057 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 7058 env->spr[SPR_BOOKE_EPR]); 7059 7060 /* FSL-specific */ 7061 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 7062 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 7063 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 7064 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 7065 7066 /* 7067 * IVORs are left out as they are large and do not change often -- 7068 * they can be read with "p $ivor0", "p $ivor1", etc. 7069 */ 7070 } 7071 7072 #if defined(TARGET_PPC64) 7073 if (env->flags & POWERPC_FLAG_CFAR) { 7074 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 7075 } 7076 #endif 7077 7078 if (env->spr_cb[SPR_LPCR].name) 7079 cpu_fprintf(f, " LPCR " TARGET_FMT_lx "\n", env->spr[SPR_LPCR]); 7080 7081 switch (POWERPC_MMU_VER(env->mmu_model)) { 7082 case POWERPC_MMU_32B: 7083 case POWERPC_MMU_601: 7084 case POWERPC_MMU_SOFT_6xx: 7085 case POWERPC_MMU_SOFT_74xx: 7086 #if defined(TARGET_PPC64) 7087 case POWERPC_MMU_VER_64B: 7088 case POWERPC_MMU_VER_2_03: 7089 case POWERPC_MMU_VER_2_06: 7090 case POWERPC_MMU_VER_2_07: 7091 case POWERPC_MMU_VER_3_00: 7092 #endif 7093 if (env->spr_cb[SPR_SDR1].name) { /* SDR1 Exists */ 7094 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " ", env->spr[SPR_SDR1]); 7095 } 7096 cpu_fprintf(f, " DAR " TARGET_FMT_lx " DSISR " TARGET_FMT_lx "\n", 7097 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 7098 break; 7099 case POWERPC_MMU_BOOKE206: 7100 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 7101 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 7102 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 7103 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 7104 7105 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 7106 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 7107 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 7108 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 7109 7110 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 7111 " TLB1CFG " TARGET_FMT_lx "\n", 7112 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 7113 env->spr[SPR_BOOKE_TLB1CFG]); 7114 break; 7115 default: 7116 break; 7117 } 7118 #endif 7119 7120 #undef RGPL 7121 #undef RFPL 7122 } 7123 7124 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 7125 fprintf_function cpu_fprintf, int flags) 7126 { 7127 #if defined(DO_PPC_STATISTICS) 7128 PowerPCCPU *cpu = POWERPC_CPU(cs); 7129 opc_handler_t **t1, **t2, **t3, *handler; 7130 int op1, op2, op3; 7131 7132 t1 = cpu->env.opcodes; 7133 for (op1 = 0; op1 < 64; op1++) { 7134 handler = t1[op1]; 7135 if (is_indirect_opcode(handler)) { 7136 t2 = ind_table(handler); 7137 for (op2 = 0; op2 < 32; op2++) { 7138 handler = t2[op2]; 7139 if (is_indirect_opcode(handler)) { 7140 t3 = ind_table(handler); 7141 for (op3 = 0; op3 < 32; op3++) { 7142 handler = t3[op3]; 7143 if (handler->count == 0) 7144 continue; 7145 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 7146 "%016" PRIx64 " %" PRId64 "\n", 7147 op1, op2, op3, op1, (op3 << 5) | op2, 7148 handler->oname, 7149 handler->count, handler->count); 7150 } 7151 } else { 7152 if (handler->count == 0) 7153 continue; 7154 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 7155 "%016" PRIx64 " %" PRId64 "\n", 7156 op1, op2, op1, op2, handler->oname, 7157 handler->count, handler->count); 7158 } 7159 } 7160 } else { 7161 if (handler->count == 0) 7162 continue; 7163 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 7164 " %" PRId64 "\n", 7165 op1, op1, handler->oname, 7166 handler->count, handler->count); 7167 } 7168 } 7169 #endif 7170 } 7171 7172 /*****************************************************************************/ 7173 void gen_intermediate_code(CPUPPCState *env, struct TranslationBlock *tb) 7174 { 7175 PowerPCCPU *cpu = ppc_env_get_cpu(env); 7176 CPUState *cs = CPU(cpu); 7177 DisasContext ctx, *ctxp = &ctx; 7178 opc_handler_t **table, *handler; 7179 target_ulong pc_start; 7180 int num_insns; 7181 int max_insns; 7182 7183 pc_start = tb->pc; 7184 ctx.nip = pc_start; 7185 ctx.tb = tb; 7186 ctx.exception = POWERPC_EXCP_NONE; 7187 ctx.spr_cb = env->spr_cb; 7188 ctx.pr = msr_pr; 7189 ctx.mem_idx = env->dmmu_idx; 7190 ctx.dr = msr_dr; 7191 #if !defined(CONFIG_USER_ONLY) 7192 ctx.hv = msr_hv || !env->has_hv_mode; 7193 #endif 7194 ctx.insns_flags = env->insns_flags; 7195 ctx.insns_flags2 = env->insns_flags2; 7196 ctx.access_type = -1; 7197 ctx.need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7198 ctx.le_mode = !!(env->hflags & (1 << MSR_LE)); 7199 ctx.default_tcg_memop_mask = ctx.le_mode ? MO_LE : MO_BE; 7200 #if defined(TARGET_PPC64) 7201 ctx.sf_mode = msr_is_64bit(env, env->msr); 7202 ctx.has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7203 #endif 7204 if (env->mmu_model == POWERPC_MMU_32B || 7205 env->mmu_model == POWERPC_MMU_601 || 7206 (env->mmu_model & POWERPC_MMU_64B)) 7207 ctx.lazy_tlb_flush = true; 7208 7209 ctx.fpu_enabled = !!msr_fp; 7210 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7211 ctx.spe_enabled = !!msr_spe; 7212 else 7213 ctx.spe_enabled = false; 7214 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7215 ctx.altivec_enabled = !!msr_vr; 7216 else 7217 ctx.altivec_enabled = false; 7218 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7219 ctx.vsx_enabled = !!msr_vsx; 7220 } else { 7221 ctx.vsx_enabled = false; 7222 } 7223 #if defined(TARGET_PPC64) 7224 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7225 ctx.tm_enabled = !!msr_tm; 7226 } else { 7227 ctx.tm_enabled = false; 7228 } 7229 #endif 7230 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7231 ctx.singlestep_enabled = CPU_SINGLE_STEP; 7232 else 7233 ctx.singlestep_enabled = 0; 7234 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7235 ctx.singlestep_enabled |= CPU_BRANCH_STEP; 7236 if (unlikely(cs->singlestep_enabled)) { 7237 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7238 } 7239 #if defined (DO_SINGLE_STEP) && 0 7240 /* Single step trace mode */ 7241 msr_se = 1; 7242 #endif 7243 num_insns = 0; 7244 max_insns = tb->cflags & CF_COUNT_MASK; 7245 if (max_insns == 0) { 7246 max_insns = CF_COUNT_MASK; 7247 } 7248 if (max_insns > TCG_MAX_INSNS) { 7249 max_insns = TCG_MAX_INSNS; 7250 } 7251 7252 gen_tb_start(tb); 7253 tcg_clear_temp_count(); 7254 /* Set env in case of segfault during code fetch */ 7255 while (ctx.exception == POWERPC_EXCP_NONE && !tcg_op_buf_full()) { 7256 tcg_gen_insn_start(ctx.nip); 7257 num_insns++; 7258 7259 if (unlikely(cpu_breakpoint_test(cs, ctx.nip, BP_ANY))) { 7260 gen_debug_exception(ctxp); 7261 /* The address covered by the breakpoint must be included in 7262 [tb->pc, tb->pc + tb->size) in order to for it to be 7263 properly cleared -- thus we increment the PC here so that 7264 the logic setting tb->size below does the right thing. */ 7265 ctx.nip += 4; 7266 break; 7267 } 7268 7269 LOG_DISAS("----------------\n"); 7270 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7271 ctx.nip, ctx.mem_idx, (int)msr_ir); 7272 if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) 7273 gen_io_start(); 7274 if (unlikely(need_byteswap(&ctx))) { 7275 ctx.opcode = bswap32(cpu_ldl_code(env, ctx.nip)); 7276 } else { 7277 ctx.opcode = cpu_ldl_code(env, ctx.nip); 7278 } 7279 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7280 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode), 7281 opc3(ctx.opcode), opc4(ctx.opcode), 7282 ctx.le_mode ? "little" : "big"); 7283 ctx.nip += 4; 7284 table = env->opcodes; 7285 handler = table[opc1(ctx.opcode)]; 7286 if (is_indirect_opcode(handler)) { 7287 table = ind_table(handler); 7288 handler = table[opc2(ctx.opcode)]; 7289 if (is_indirect_opcode(handler)) { 7290 table = ind_table(handler); 7291 handler = table[opc3(ctx.opcode)]; 7292 if (is_indirect_opcode(handler)) { 7293 table = ind_table(handler); 7294 handler = table[opc4(ctx.opcode)]; 7295 } 7296 } 7297 } 7298 /* Is opcode *REALLY* valid ? */ 7299 if (unlikely(handler->handler == &gen_invalid)) { 7300 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7301 "%02x - %02x - %02x - %02x (%08x) " 7302 TARGET_FMT_lx " %d\n", 7303 opc1(ctx.opcode), opc2(ctx.opcode), 7304 opc3(ctx.opcode), opc4(ctx.opcode), 7305 ctx.opcode, ctx.nip - 4, (int)msr_ir); 7306 } else { 7307 uint32_t inval; 7308 7309 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) && Rc(ctx.opcode))) { 7310 inval = handler->inval2; 7311 } else { 7312 inval = handler->inval1; 7313 } 7314 7315 if (unlikely((ctx.opcode & inval) != 0)) { 7316 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7317 "%02x - %02x - %02x - %02x (%08x) " 7318 TARGET_FMT_lx "\n", ctx.opcode & inval, 7319 opc1(ctx.opcode), opc2(ctx.opcode), 7320 opc3(ctx.opcode), opc4(ctx.opcode), 7321 ctx.opcode, ctx.nip - 4); 7322 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL); 7323 break; 7324 } 7325 } 7326 (*(handler->handler))(&ctx); 7327 #if defined(DO_PPC_STATISTICS) 7328 handler->count++; 7329 #endif 7330 /* Check trace mode exceptions */ 7331 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP && 7332 (ctx.nip <= 0x100 || ctx.nip > 0xF00) && 7333 ctx.exception != POWERPC_SYSCALL && 7334 ctx.exception != POWERPC_EXCP_TRAP && 7335 ctx.exception != POWERPC_EXCP_BRANCH)) { 7336 gen_exception_nip(ctxp, POWERPC_EXCP_TRACE, ctx.nip); 7337 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) || 7338 (cs->singlestep_enabled) || 7339 singlestep || 7340 num_insns >= max_insns)) { 7341 /* if we reach a page boundary or are single stepping, stop 7342 * generation 7343 */ 7344 break; 7345 } 7346 if (tcg_check_temp_count()) { 7347 fprintf(stderr, "Opcode %02x %02x %02x %02x (%08x) leaked " 7348 "temporaries\n", opc1(ctx.opcode), opc2(ctx.opcode), 7349 opc3(ctx.opcode), opc4(ctx.opcode), ctx.opcode); 7350 exit(1); 7351 } 7352 } 7353 if (tb->cflags & CF_LAST_IO) 7354 gen_io_end(); 7355 if (ctx.exception == POWERPC_EXCP_NONE) { 7356 gen_goto_tb(&ctx, 0, ctx.nip); 7357 } else if (ctx.exception != POWERPC_EXCP_BRANCH) { 7358 if (unlikely(cs->singlestep_enabled)) { 7359 gen_debug_exception(ctxp); 7360 } 7361 /* Generate the return instruction */ 7362 tcg_gen_exit_tb(0); 7363 } 7364 gen_tb_end(tb, num_insns); 7365 7366 tb->size = ctx.nip - pc_start; 7367 tb->icount = num_insns; 7368 7369 #if defined(DEBUG_DISAS) 7370 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM) 7371 && qemu_log_in_addr_range(pc_start)) { 7372 int flags; 7373 flags = env->bfd_mach; 7374 flags |= ctx.le_mode << 16; 7375 qemu_log_lock(); 7376 qemu_log("IN: %s\n", lookup_symbol(pc_start)); 7377 log_target_disas(cs, pc_start, ctx.nip - pc_start, flags); 7378 qemu_log("\n"); 7379 qemu_log_unlock(); 7380 } 7381 #endif 7382 } 7383 7384 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7385 target_ulong *data) 7386 { 7387 env->nip = data[0]; 7388 } 7389