1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "qemu/host-utils.h" 28 #include "exec/cpu_ldst.h" 29 30 #include "exec/helper-proto.h" 31 #include "exec/helper-gen.h" 32 33 #include "trace-tcg.h" 34 #include "exec/log.h" 35 36 37 #define CPU_SINGLE_STEP 0x1 38 #define CPU_BRANCH_STEP 0x2 39 #define GDBSTUB_SINGLE_STEP 0x4 40 41 /* Include definitions for instructions classes and implementations flags */ 42 //#define PPC_DEBUG_DISAS 43 //#define DO_PPC_STATISTICS 44 45 #ifdef PPC_DEBUG_DISAS 46 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 47 #else 48 # define LOG_DISAS(...) do { } while (0) 49 #endif 50 /*****************************************************************************/ 51 /* Code translation helpers */ 52 53 /* global register indexes */ 54 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 55 + 10*4 + 22*5 /* SPE GPRh */ 56 + 10*4 + 22*5 /* FPR */ 57 + 2*(10*6 + 22*7) /* AVRh, AVRl */ 58 + 10*5 + 22*6 /* VSR */ 59 + 8*5 /* CRF */]; 60 static TCGv cpu_gpr[32]; 61 static TCGv cpu_gprh[32]; 62 static TCGv_i64 cpu_fpr[32]; 63 static TCGv_i64 cpu_avrh[32], cpu_avrl[32]; 64 static TCGv_i64 cpu_vsr[32]; 65 static TCGv_i32 cpu_crf[8]; 66 static TCGv cpu_nip; 67 static TCGv cpu_msr; 68 static TCGv cpu_ctr; 69 static TCGv cpu_lr; 70 #if defined(TARGET_PPC64) 71 static TCGv cpu_cfar; 72 #endif 73 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 74 static TCGv cpu_reserve; 75 static TCGv cpu_reserve_val; 76 static TCGv cpu_fpscr; 77 static TCGv_i32 cpu_access_type; 78 79 #include "exec/gen-icount.h" 80 81 void ppc_translate_init(void) 82 { 83 int i; 84 char* p; 85 size_t cpu_reg_names_size; 86 87 p = cpu_reg_names; 88 cpu_reg_names_size = sizeof(cpu_reg_names); 89 90 for (i = 0; i < 8; i++) { 91 snprintf(p, cpu_reg_names_size, "crf%d", i); 92 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 93 offsetof(CPUPPCState, crf[i]), p); 94 p += 5; 95 cpu_reg_names_size -= 5; 96 } 97 98 for (i = 0; i < 32; i++) { 99 snprintf(p, cpu_reg_names_size, "r%d", i); 100 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 101 offsetof(CPUPPCState, gpr[i]), p); 102 p += (i < 10) ? 3 : 4; 103 cpu_reg_names_size -= (i < 10) ? 3 : 4; 104 snprintf(p, cpu_reg_names_size, "r%dH", i); 105 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 106 offsetof(CPUPPCState, gprh[i]), p); 107 p += (i < 10) ? 4 : 5; 108 cpu_reg_names_size -= (i < 10) ? 4 : 5; 109 110 snprintf(p, cpu_reg_names_size, "fp%d", i); 111 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env, 112 offsetof(CPUPPCState, fpr[i]), p); 113 p += (i < 10) ? 4 : 5; 114 cpu_reg_names_size -= (i < 10) ? 4 : 5; 115 116 snprintf(p, cpu_reg_names_size, "avr%dH", i); 117 #ifdef HOST_WORDS_BIGENDIAN 118 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 119 offsetof(CPUPPCState, avr[i].u64[0]), p); 120 #else 121 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 122 offsetof(CPUPPCState, avr[i].u64[1]), p); 123 #endif 124 p += (i < 10) ? 6 : 7; 125 cpu_reg_names_size -= (i < 10) ? 6 : 7; 126 127 snprintf(p, cpu_reg_names_size, "avr%dL", i); 128 #ifdef HOST_WORDS_BIGENDIAN 129 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 130 offsetof(CPUPPCState, avr[i].u64[1]), p); 131 #else 132 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 133 offsetof(CPUPPCState, avr[i].u64[0]), p); 134 #endif 135 p += (i < 10) ? 6 : 7; 136 cpu_reg_names_size -= (i < 10) ? 6 : 7; 137 snprintf(p, cpu_reg_names_size, "vsr%d", i); 138 cpu_vsr[i] = tcg_global_mem_new_i64(cpu_env, 139 offsetof(CPUPPCState, vsr[i]), p); 140 p += (i < 10) ? 5 : 6; 141 cpu_reg_names_size -= (i < 10) ? 5 : 6; 142 } 143 144 cpu_nip = tcg_global_mem_new(cpu_env, 145 offsetof(CPUPPCState, nip), "nip"); 146 147 cpu_msr = tcg_global_mem_new(cpu_env, 148 offsetof(CPUPPCState, msr), "msr"); 149 150 cpu_ctr = tcg_global_mem_new(cpu_env, 151 offsetof(CPUPPCState, ctr), "ctr"); 152 153 cpu_lr = tcg_global_mem_new(cpu_env, 154 offsetof(CPUPPCState, lr), "lr"); 155 156 #if defined(TARGET_PPC64) 157 cpu_cfar = tcg_global_mem_new(cpu_env, 158 offsetof(CPUPPCState, cfar), "cfar"); 159 #endif 160 161 cpu_xer = tcg_global_mem_new(cpu_env, 162 offsetof(CPUPPCState, xer), "xer"); 163 cpu_so = tcg_global_mem_new(cpu_env, 164 offsetof(CPUPPCState, so), "SO"); 165 cpu_ov = tcg_global_mem_new(cpu_env, 166 offsetof(CPUPPCState, ov), "OV"); 167 cpu_ca = tcg_global_mem_new(cpu_env, 168 offsetof(CPUPPCState, ca), "CA"); 169 cpu_ov32 = tcg_global_mem_new(cpu_env, 170 offsetof(CPUPPCState, ov32), "OV32"); 171 cpu_ca32 = tcg_global_mem_new(cpu_env, 172 offsetof(CPUPPCState, ca32), "CA32"); 173 174 cpu_reserve = tcg_global_mem_new(cpu_env, 175 offsetof(CPUPPCState, reserve_addr), 176 "reserve_addr"); 177 cpu_reserve_val = tcg_global_mem_new(cpu_env, 178 offsetof(CPUPPCState, reserve_val), 179 "reserve_val"); 180 181 cpu_fpscr = tcg_global_mem_new(cpu_env, 182 offsetof(CPUPPCState, fpscr), "fpscr"); 183 184 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 185 offsetof(CPUPPCState, access_type), "access_type"); 186 } 187 188 /* internal defines */ 189 struct DisasContext { 190 struct TranslationBlock *tb; 191 target_ulong nip; 192 uint32_t opcode; 193 uint32_t exception; 194 /* Routine used to access memory */ 195 bool pr, hv, dr, le_mode; 196 bool lazy_tlb_flush; 197 bool need_access_type; 198 int mem_idx; 199 int access_type; 200 /* Translation flags */ 201 TCGMemOp default_tcg_memop_mask; 202 #if defined(TARGET_PPC64) 203 bool sf_mode; 204 bool has_cfar; 205 #endif 206 bool fpu_enabled; 207 bool altivec_enabled; 208 bool vsx_enabled; 209 bool spe_enabled; 210 bool tm_enabled; 211 bool gtse; 212 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 213 int singlestep_enabled; 214 uint64_t insns_flags; 215 uint64_t insns_flags2; 216 }; 217 218 /* Return true iff byteswap is needed in a scalar memop */ 219 static inline bool need_byteswap(const DisasContext *ctx) 220 { 221 #if defined(TARGET_WORDS_BIGENDIAN) 222 return ctx->le_mode; 223 #else 224 return !ctx->le_mode; 225 #endif 226 } 227 228 /* True when active word size < size of target_long. */ 229 #ifdef TARGET_PPC64 230 # define NARROW_MODE(C) (!(C)->sf_mode) 231 #else 232 # define NARROW_MODE(C) 0 233 #endif 234 235 struct opc_handler_t { 236 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 237 uint32_t inval1; 238 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 239 uint32_t inval2; 240 /* instruction type */ 241 uint64_t type; 242 /* extended instruction type */ 243 uint64_t type2; 244 /* handler */ 245 void (*handler)(DisasContext *ctx); 246 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 247 const char *oname; 248 #endif 249 #if defined(DO_PPC_STATISTICS) 250 uint64_t count; 251 #endif 252 }; 253 254 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 255 { 256 if (ctx->need_access_type && ctx->access_type != access_type) { 257 tcg_gen_movi_i32(cpu_access_type, access_type); 258 ctx->access_type = access_type; 259 } 260 } 261 262 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 263 { 264 if (NARROW_MODE(ctx)) { 265 nip = (uint32_t)nip; 266 } 267 tcg_gen_movi_tl(cpu_nip, nip); 268 } 269 270 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 271 { 272 TCGv_i32 t0, t1; 273 274 /* These are all synchronous exceptions, we set the PC back to 275 * the faulting instruction 276 */ 277 if (ctx->exception == POWERPC_EXCP_NONE) { 278 gen_update_nip(ctx, ctx->nip - 4); 279 } 280 t0 = tcg_const_i32(excp); 281 t1 = tcg_const_i32(error); 282 gen_helper_raise_exception_err(cpu_env, t0, t1); 283 tcg_temp_free_i32(t0); 284 tcg_temp_free_i32(t1); 285 ctx->exception = (excp); 286 } 287 288 static void gen_exception(DisasContext *ctx, uint32_t excp) 289 { 290 TCGv_i32 t0; 291 292 /* These are all synchronous exceptions, we set the PC back to 293 * the faulting instruction 294 */ 295 if (ctx->exception == POWERPC_EXCP_NONE) { 296 gen_update_nip(ctx, ctx->nip - 4); 297 } 298 t0 = tcg_const_i32(excp); 299 gen_helper_raise_exception(cpu_env, t0); 300 tcg_temp_free_i32(t0); 301 ctx->exception = (excp); 302 } 303 304 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 305 target_ulong nip) 306 { 307 TCGv_i32 t0; 308 309 gen_update_nip(ctx, nip); 310 t0 = tcg_const_i32(excp); 311 gen_helper_raise_exception(cpu_env, t0); 312 tcg_temp_free_i32(t0); 313 ctx->exception = (excp); 314 } 315 316 static void gen_debug_exception(DisasContext *ctx) 317 { 318 TCGv_i32 t0; 319 320 /* These are all synchronous exceptions, we set the PC back to 321 * the faulting instruction 322 */ 323 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 324 (ctx->exception != POWERPC_EXCP_SYNC)) { 325 gen_update_nip(ctx, ctx->nip); 326 } 327 t0 = tcg_const_i32(EXCP_DEBUG); 328 gen_helper_raise_exception(cpu_env, t0); 329 tcg_temp_free_i32(t0); 330 } 331 332 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 333 { 334 /* Will be converted to program check if needed */ 335 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 336 } 337 338 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 339 { 340 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 341 } 342 343 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 344 { 345 /* Will be converted to program check if needed */ 346 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 347 } 348 349 /* Stop translation */ 350 static inline void gen_stop_exception(DisasContext *ctx) 351 { 352 gen_update_nip(ctx, ctx->nip); 353 ctx->exception = POWERPC_EXCP_STOP; 354 } 355 356 #ifndef CONFIG_USER_ONLY 357 /* No need to update nip here, as execution flow will change */ 358 static inline void gen_sync_exception(DisasContext *ctx) 359 { 360 ctx->exception = POWERPC_EXCP_SYNC; 361 } 362 #endif 363 364 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 365 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 366 367 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 368 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 369 370 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 371 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 372 373 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 374 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 375 376 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 377 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 378 379 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 380 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 381 382 typedef struct opcode_t { 383 unsigned char opc1, opc2, opc3, opc4; 384 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 385 unsigned char pad[4]; 386 #endif 387 opc_handler_t handler; 388 const char *oname; 389 } opcode_t; 390 391 /* Helpers for priv. check */ 392 #define GEN_PRIV \ 393 do { \ 394 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 395 } while (0) 396 397 #if defined(CONFIG_USER_ONLY) 398 #define CHK_HV GEN_PRIV 399 #define CHK_SV GEN_PRIV 400 #define CHK_HVRM GEN_PRIV 401 #else 402 #define CHK_HV \ 403 do { \ 404 if (unlikely(ctx->pr || !ctx->hv)) { \ 405 GEN_PRIV; \ 406 } \ 407 } while (0) 408 #define CHK_SV \ 409 do { \ 410 if (unlikely(ctx->pr)) { \ 411 GEN_PRIV; \ 412 } \ 413 } while (0) 414 #define CHK_HVRM \ 415 do { \ 416 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 417 GEN_PRIV; \ 418 } \ 419 } while (0) 420 #endif 421 422 #define CHK_NONE 423 424 /*****************************************************************************/ 425 /* PowerPC instructions table */ 426 427 #if defined(DO_PPC_STATISTICS) 428 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 429 { \ 430 .opc1 = op1, \ 431 .opc2 = op2, \ 432 .opc3 = op3, \ 433 .opc4 = 0xff, \ 434 .handler = { \ 435 .inval1 = invl, \ 436 .type = _typ, \ 437 .type2 = _typ2, \ 438 .handler = &gen_##name, \ 439 .oname = stringify(name), \ 440 }, \ 441 .oname = stringify(name), \ 442 } 443 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 444 { \ 445 .opc1 = op1, \ 446 .opc2 = op2, \ 447 .opc3 = op3, \ 448 .opc4 = 0xff, \ 449 .handler = { \ 450 .inval1 = invl1, \ 451 .inval2 = invl2, \ 452 .type = _typ, \ 453 .type2 = _typ2, \ 454 .handler = &gen_##name, \ 455 .oname = stringify(name), \ 456 }, \ 457 .oname = stringify(name), \ 458 } 459 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 460 { \ 461 .opc1 = op1, \ 462 .opc2 = op2, \ 463 .opc3 = op3, \ 464 .opc4 = 0xff, \ 465 .handler = { \ 466 .inval1 = invl, \ 467 .type = _typ, \ 468 .type2 = _typ2, \ 469 .handler = &gen_##name, \ 470 .oname = onam, \ 471 }, \ 472 .oname = onam, \ 473 } 474 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 475 { \ 476 .opc1 = op1, \ 477 .opc2 = op2, \ 478 .opc3 = op3, \ 479 .opc4 = op4, \ 480 .handler = { \ 481 .inval1 = invl, \ 482 .type = _typ, \ 483 .type2 = _typ2, \ 484 .handler = &gen_##name, \ 485 .oname = stringify(name), \ 486 }, \ 487 .oname = stringify(name), \ 488 } 489 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 490 { \ 491 .opc1 = op1, \ 492 .opc2 = op2, \ 493 .opc3 = op3, \ 494 .opc4 = op4, \ 495 .handler = { \ 496 .inval1 = invl, \ 497 .type = _typ, \ 498 .type2 = _typ2, \ 499 .handler = &gen_##name, \ 500 .oname = onam, \ 501 }, \ 502 .oname = onam, \ 503 } 504 #else 505 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 506 { \ 507 .opc1 = op1, \ 508 .opc2 = op2, \ 509 .opc3 = op3, \ 510 .opc4 = 0xff, \ 511 .handler = { \ 512 .inval1 = invl, \ 513 .type = _typ, \ 514 .type2 = _typ2, \ 515 .handler = &gen_##name, \ 516 }, \ 517 .oname = stringify(name), \ 518 } 519 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 520 { \ 521 .opc1 = op1, \ 522 .opc2 = op2, \ 523 .opc3 = op3, \ 524 .opc4 = 0xff, \ 525 .handler = { \ 526 .inval1 = invl1, \ 527 .inval2 = invl2, \ 528 .type = _typ, \ 529 .type2 = _typ2, \ 530 .handler = &gen_##name, \ 531 }, \ 532 .oname = stringify(name), \ 533 } 534 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 535 { \ 536 .opc1 = op1, \ 537 .opc2 = op2, \ 538 .opc3 = op3, \ 539 .opc4 = 0xff, \ 540 .handler = { \ 541 .inval1 = invl, \ 542 .type = _typ, \ 543 .type2 = _typ2, \ 544 .handler = &gen_##name, \ 545 }, \ 546 .oname = onam, \ 547 } 548 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 549 { \ 550 .opc1 = op1, \ 551 .opc2 = op2, \ 552 .opc3 = op3, \ 553 .opc4 = op4, \ 554 .handler = { \ 555 .inval1 = invl, \ 556 .type = _typ, \ 557 .type2 = _typ2, \ 558 .handler = &gen_##name, \ 559 }, \ 560 .oname = stringify(name), \ 561 } 562 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 563 { \ 564 .opc1 = op1, \ 565 .opc2 = op2, \ 566 .opc3 = op3, \ 567 .opc4 = op4, \ 568 .handler = { \ 569 .inval1 = invl, \ 570 .type = _typ, \ 571 .type2 = _typ2, \ 572 .handler = &gen_##name, \ 573 }, \ 574 .oname = onam, \ 575 } 576 #endif 577 578 /* SPR load/store helpers */ 579 static inline void gen_load_spr(TCGv t, int reg) 580 { 581 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 582 } 583 584 static inline void gen_store_spr(int reg, TCGv t) 585 { 586 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 587 } 588 589 /* Invalid instruction */ 590 static void gen_invalid(DisasContext *ctx) 591 { 592 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 593 } 594 595 static opc_handler_t invalid_handler = { 596 .inval1 = 0xFFFFFFFF, 597 .inval2 = 0xFFFFFFFF, 598 .type = PPC_NONE, 599 .type2 = PPC_NONE, 600 .handler = gen_invalid, 601 }; 602 603 /*** Integer comparison ***/ 604 605 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 606 { 607 TCGv t0 = tcg_temp_new(); 608 TCGv_i32 t1 = tcg_temp_new_i32(); 609 610 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 611 612 tcg_gen_setcond_tl((s ? TCG_COND_LT: TCG_COND_LTU), t0, arg0, arg1); 613 tcg_gen_trunc_tl_i32(t1, t0); 614 tcg_gen_shli_i32(t1, t1, CRF_LT_BIT); 615 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 616 617 tcg_gen_setcond_tl((s ? TCG_COND_GT: TCG_COND_GTU), t0, arg0, arg1); 618 tcg_gen_trunc_tl_i32(t1, t0); 619 tcg_gen_shli_i32(t1, t1, CRF_GT_BIT); 620 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 621 622 tcg_gen_setcond_tl(TCG_COND_EQ, t0, arg0, arg1); 623 tcg_gen_trunc_tl_i32(t1, t0); 624 tcg_gen_shli_i32(t1, t1, CRF_EQ_BIT); 625 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 626 627 tcg_temp_free(t0); 628 tcg_temp_free_i32(t1); 629 } 630 631 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 632 { 633 TCGv t0 = tcg_const_tl(arg1); 634 gen_op_cmp(arg0, t0, s, crf); 635 tcg_temp_free(t0); 636 } 637 638 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 639 { 640 TCGv t0, t1; 641 t0 = tcg_temp_new(); 642 t1 = tcg_temp_new(); 643 if (s) { 644 tcg_gen_ext32s_tl(t0, arg0); 645 tcg_gen_ext32s_tl(t1, arg1); 646 } else { 647 tcg_gen_ext32u_tl(t0, arg0); 648 tcg_gen_ext32u_tl(t1, arg1); 649 } 650 gen_op_cmp(t0, t1, s, crf); 651 tcg_temp_free(t1); 652 tcg_temp_free(t0); 653 } 654 655 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 656 { 657 TCGv t0 = tcg_const_tl(arg1); 658 gen_op_cmp32(arg0, t0, s, crf); 659 tcg_temp_free(t0); 660 } 661 662 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 663 { 664 if (NARROW_MODE(ctx)) { 665 gen_op_cmpi32(reg, 0, 1, 0); 666 } else { 667 gen_op_cmpi(reg, 0, 1, 0); 668 } 669 } 670 671 /* cmp */ 672 static void gen_cmp(DisasContext *ctx) 673 { 674 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 675 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 676 1, crfD(ctx->opcode)); 677 } else { 678 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 679 1, crfD(ctx->opcode)); 680 } 681 } 682 683 /* cmpi */ 684 static void gen_cmpi(DisasContext *ctx) 685 { 686 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 687 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 688 1, crfD(ctx->opcode)); 689 } else { 690 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 691 1, crfD(ctx->opcode)); 692 } 693 } 694 695 /* cmpl */ 696 static void gen_cmpl(DisasContext *ctx) 697 { 698 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 699 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 700 0, crfD(ctx->opcode)); 701 } else { 702 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 703 0, crfD(ctx->opcode)); 704 } 705 } 706 707 /* cmpli */ 708 static void gen_cmpli(DisasContext *ctx) 709 { 710 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 711 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 712 0, crfD(ctx->opcode)); 713 } else { 714 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 715 0, crfD(ctx->opcode)); 716 } 717 } 718 719 /* cmprb - range comparison: isupper, isaplha, islower*/ 720 static void gen_cmprb(DisasContext *ctx) 721 { 722 TCGv_i32 src1 = tcg_temp_new_i32(); 723 TCGv_i32 src2 = tcg_temp_new_i32(); 724 TCGv_i32 src2lo = tcg_temp_new_i32(); 725 TCGv_i32 src2hi = tcg_temp_new_i32(); 726 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 727 728 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 729 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 730 731 tcg_gen_andi_i32(src1, src1, 0xFF); 732 tcg_gen_ext8u_i32(src2lo, src2); 733 tcg_gen_shri_i32(src2, src2, 8); 734 tcg_gen_ext8u_i32(src2hi, src2); 735 736 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 737 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 738 tcg_gen_and_i32(crf, src2lo, src2hi); 739 740 if (ctx->opcode & 0x00200000) { 741 tcg_gen_shri_i32(src2, src2, 8); 742 tcg_gen_ext8u_i32(src2lo, src2); 743 tcg_gen_shri_i32(src2, src2, 8); 744 tcg_gen_ext8u_i32(src2hi, src2); 745 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 746 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 747 tcg_gen_and_i32(src2lo, src2lo, src2hi); 748 tcg_gen_or_i32(crf, crf, src2lo); 749 } 750 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 751 tcg_temp_free_i32(src1); 752 tcg_temp_free_i32(src2); 753 tcg_temp_free_i32(src2lo); 754 tcg_temp_free_i32(src2hi); 755 } 756 757 #if defined(TARGET_PPC64) 758 /* cmpeqb */ 759 static void gen_cmpeqb(DisasContext *ctx) 760 { 761 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 762 cpu_gpr[rB(ctx->opcode)]); 763 } 764 #endif 765 766 /* isel (PowerPC 2.03 specification) */ 767 static void gen_isel(DisasContext *ctx) 768 { 769 uint32_t bi = rC(ctx->opcode); 770 uint32_t mask = 0x08 >> (bi & 0x03); 771 TCGv t0 = tcg_temp_new(); 772 TCGv zr; 773 774 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 775 tcg_gen_andi_tl(t0, t0, mask); 776 777 zr = tcg_const_tl(0); 778 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 779 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 780 cpu_gpr[rB(ctx->opcode)]); 781 tcg_temp_free(zr); 782 tcg_temp_free(t0); 783 } 784 785 /* cmpb: PowerPC 2.05 specification */ 786 static void gen_cmpb(DisasContext *ctx) 787 { 788 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 789 cpu_gpr[rB(ctx->opcode)]); 790 } 791 792 /*** Integer arithmetic ***/ 793 794 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 795 TCGv arg1, TCGv arg2, int sub) 796 { 797 TCGv t0 = tcg_temp_new(); 798 799 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 800 tcg_gen_xor_tl(t0, arg1, arg2); 801 if (sub) { 802 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 803 } else { 804 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 805 } 806 tcg_temp_free(t0); 807 if (NARROW_MODE(ctx)) { 808 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 809 if (is_isa300(ctx)) { 810 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 811 } 812 } else { 813 if (is_isa300(ctx)) { 814 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 815 } 816 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 817 } 818 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 819 } 820 821 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 822 TCGv res, TCGv arg0, TCGv arg1, 823 int sub) 824 { 825 TCGv t0; 826 827 if (!is_isa300(ctx)) { 828 return; 829 } 830 831 t0 = tcg_temp_new(); 832 if (sub) { 833 tcg_gen_eqv_tl(t0, arg0, arg1); 834 } else { 835 tcg_gen_xor_tl(t0, arg0, arg1); 836 } 837 tcg_gen_xor_tl(t0, t0, res); 838 tcg_gen_extract_tl(cpu_ca32, t0, 32, 1); 839 tcg_temp_free(t0); 840 } 841 842 /* Common add function */ 843 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 844 TCGv arg2, bool add_ca, bool compute_ca, 845 bool compute_ov, bool compute_rc0) 846 { 847 TCGv t0 = ret; 848 849 if (compute_ca || compute_ov) { 850 t0 = tcg_temp_new(); 851 } 852 853 if (compute_ca) { 854 if (NARROW_MODE(ctx)) { 855 /* Caution: a non-obvious corner case of the spec is that we 856 must produce the *entire* 64-bit addition, but produce the 857 carry into bit 32. */ 858 TCGv t1 = tcg_temp_new(); 859 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 860 tcg_gen_add_tl(t0, arg1, arg2); 861 if (add_ca) { 862 tcg_gen_add_tl(t0, t0, cpu_ca); 863 } 864 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */ 865 tcg_temp_free(t1); 866 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 867 if (is_isa300(ctx)) { 868 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 869 } 870 } else { 871 TCGv zero = tcg_const_tl(0); 872 if (add_ca) { 873 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero); 874 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero); 875 } else { 876 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero); 877 } 878 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, 0); 879 tcg_temp_free(zero); 880 } 881 } else { 882 tcg_gen_add_tl(t0, arg1, arg2); 883 if (add_ca) { 884 tcg_gen_add_tl(t0, t0, cpu_ca); 885 } 886 } 887 888 if (compute_ov) { 889 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 890 } 891 if (unlikely(compute_rc0)) { 892 gen_set_Rc0(ctx, t0); 893 } 894 895 if (t0 != ret) { 896 tcg_gen_mov_tl(ret, t0); 897 tcg_temp_free(t0); 898 } 899 } 900 /* Add functions with two operands */ 901 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 902 static void glue(gen_, name)(DisasContext *ctx) \ 903 { \ 904 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 905 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 906 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 907 } 908 /* Add functions with one operand and one immediate */ 909 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 910 add_ca, compute_ca, compute_ov) \ 911 static void glue(gen_, name)(DisasContext *ctx) \ 912 { \ 913 TCGv t0 = tcg_const_tl(const_val); \ 914 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 915 cpu_gpr[rA(ctx->opcode)], t0, \ 916 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 917 tcg_temp_free(t0); \ 918 } 919 920 /* add add. addo addo. */ 921 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 922 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 923 /* addc addc. addco addco. */ 924 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 925 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 926 /* adde adde. addeo addeo. */ 927 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 928 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 929 /* addme addme. addmeo addmeo. */ 930 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 931 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 932 /* addze addze. addzeo addzeo.*/ 933 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 934 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 935 /* addi */ 936 static void gen_addi(DisasContext *ctx) 937 { 938 target_long simm = SIMM(ctx->opcode); 939 940 if (rA(ctx->opcode) == 0) { 941 /* li case */ 942 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 943 } else { 944 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 945 cpu_gpr[rA(ctx->opcode)], simm); 946 } 947 } 948 /* addic addic.*/ 949 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 950 { 951 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 952 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 953 c, 0, 1, 0, compute_rc0); 954 tcg_temp_free(c); 955 } 956 957 static void gen_addic(DisasContext *ctx) 958 { 959 gen_op_addic(ctx, 0); 960 } 961 962 static void gen_addic_(DisasContext *ctx) 963 { 964 gen_op_addic(ctx, 1); 965 } 966 967 /* addis */ 968 static void gen_addis(DisasContext *ctx) 969 { 970 target_long simm = SIMM(ctx->opcode); 971 972 if (rA(ctx->opcode) == 0) { 973 /* lis case */ 974 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 975 } else { 976 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 977 cpu_gpr[rA(ctx->opcode)], simm << 16); 978 } 979 } 980 981 /* addpcis */ 982 static void gen_addpcis(DisasContext *ctx) 983 { 984 target_long d = DX(ctx->opcode); 985 986 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->nip + (d << 16)); 987 } 988 989 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 990 TCGv arg2, int sign, int compute_ov) 991 { 992 TCGv_i32 t0 = tcg_temp_new_i32(); 993 TCGv_i32 t1 = tcg_temp_new_i32(); 994 TCGv_i32 t2 = tcg_temp_new_i32(); 995 TCGv_i32 t3 = tcg_temp_new_i32(); 996 997 tcg_gen_trunc_tl_i32(t0, arg1); 998 tcg_gen_trunc_tl_i32(t1, arg2); 999 if (sign) { 1000 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1001 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1002 tcg_gen_and_i32(t2, t2, t3); 1003 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1004 tcg_gen_or_i32(t2, t2, t3); 1005 tcg_gen_movi_i32(t3, 0); 1006 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1007 tcg_gen_div_i32(t3, t0, t1); 1008 tcg_gen_extu_i32_tl(ret, t3); 1009 } else { 1010 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1011 tcg_gen_movi_i32(t3, 0); 1012 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1013 tcg_gen_divu_i32(t3, t0, t1); 1014 tcg_gen_extu_i32_tl(ret, t3); 1015 } 1016 if (compute_ov) { 1017 tcg_gen_extu_i32_tl(cpu_ov, t2); 1018 if (is_isa300(ctx)) { 1019 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1020 } 1021 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1022 } 1023 tcg_temp_free_i32(t0); 1024 tcg_temp_free_i32(t1); 1025 tcg_temp_free_i32(t2); 1026 tcg_temp_free_i32(t3); 1027 1028 if (unlikely(Rc(ctx->opcode) != 0)) 1029 gen_set_Rc0(ctx, ret); 1030 } 1031 /* Div functions */ 1032 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1033 static void glue(gen_, name)(DisasContext *ctx) \ 1034 { \ 1035 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1036 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1037 sign, compute_ov); \ 1038 } 1039 /* divwu divwu. divwuo divwuo. */ 1040 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1041 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1042 /* divw divw. divwo divwo. */ 1043 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1044 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1045 1046 /* div[wd]eu[o][.] */ 1047 #define GEN_DIVE(name, hlpr, compute_ov) \ 1048 static void gen_##name(DisasContext *ctx) \ 1049 { \ 1050 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1051 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1052 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1053 tcg_temp_free_i32(t0); \ 1054 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1055 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1056 } \ 1057 } 1058 1059 GEN_DIVE(divweu, divweu, 0); 1060 GEN_DIVE(divweuo, divweu, 1); 1061 GEN_DIVE(divwe, divwe, 0); 1062 GEN_DIVE(divweo, divwe, 1); 1063 1064 #if defined(TARGET_PPC64) 1065 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1066 TCGv arg2, int sign, int compute_ov) 1067 { 1068 TCGv_i64 t0 = tcg_temp_new_i64(); 1069 TCGv_i64 t1 = tcg_temp_new_i64(); 1070 TCGv_i64 t2 = tcg_temp_new_i64(); 1071 TCGv_i64 t3 = tcg_temp_new_i64(); 1072 1073 tcg_gen_mov_i64(t0, arg1); 1074 tcg_gen_mov_i64(t1, arg2); 1075 if (sign) { 1076 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1077 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1078 tcg_gen_and_i64(t2, t2, t3); 1079 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1080 tcg_gen_or_i64(t2, t2, t3); 1081 tcg_gen_movi_i64(t3, 0); 1082 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1083 tcg_gen_div_i64(ret, t0, t1); 1084 } else { 1085 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1086 tcg_gen_movi_i64(t3, 0); 1087 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1088 tcg_gen_divu_i64(ret, t0, t1); 1089 } 1090 if (compute_ov) { 1091 tcg_gen_mov_tl(cpu_ov, t2); 1092 if (is_isa300(ctx)) { 1093 tcg_gen_mov_tl(cpu_ov32, t2); 1094 } 1095 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1096 } 1097 tcg_temp_free_i64(t0); 1098 tcg_temp_free_i64(t1); 1099 tcg_temp_free_i64(t2); 1100 tcg_temp_free_i64(t3); 1101 1102 if (unlikely(Rc(ctx->opcode) != 0)) 1103 gen_set_Rc0(ctx, ret); 1104 } 1105 1106 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1107 static void glue(gen_, name)(DisasContext *ctx) \ 1108 { \ 1109 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1110 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1111 sign, compute_ov); \ 1112 } 1113 /* divdu divdu. divduo divduo. */ 1114 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1115 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1116 /* divd divd. divdo divdo. */ 1117 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1118 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1119 1120 GEN_DIVE(divdeu, divdeu, 0); 1121 GEN_DIVE(divdeuo, divdeu, 1); 1122 GEN_DIVE(divde, divde, 0); 1123 GEN_DIVE(divdeo, divde, 1); 1124 #endif 1125 1126 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1127 TCGv arg2, int sign) 1128 { 1129 TCGv_i32 t0 = tcg_temp_new_i32(); 1130 TCGv_i32 t1 = tcg_temp_new_i32(); 1131 1132 tcg_gen_trunc_tl_i32(t0, arg1); 1133 tcg_gen_trunc_tl_i32(t1, arg2); 1134 if (sign) { 1135 TCGv_i32 t2 = tcg_temp_new_i32(); 1136 TCGv_i32 t3 = tcg_temp_new_i32(); 1137 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1138 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1139 tcg_gen_and_i32(t2, t2, t3); 1140 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1141 tcg_gen_or_i32(t2, t2, t3); 1142 tcg_gen_movi_i32(t3, 0); 1143 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1144 tcg_gen_rem_i32(t3, t0, t1); 1145 tcg_gen_ext_i32_tl(ret, t3); 1146 tcg_temp_free_i32(t2); 1147 tcg_temp_free_i32(t3); 1148 } else { 1149 TCGv_i32 t2 = tcg_const_i32(1); 1150 TCGv_i32 t3 = tcg_const_i32(0); 1151 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1152 tcg_gen_remu_i32(t3, t0, t1); 1153 tcg_gen_extu_i32_tl(ret, t3); 1154 tcg_temp_free_i32(t2); 1155 tcg_temp_free_i32(t3); 1156 } 1157 tcg_temp_free_i32(t0); 1158 tcg_temp_free_i32(t1); 1159 } 1160 1161 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1162 static void glue(gen_, name)(DisasContext *ctx) \ 1163 { \ 1164 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1165 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1166 sign); \ 1167 } 1168 1169 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1170 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1171 1172 #if defined(TARGET_PPC64) 1173 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1174 TCGv arg2, int sign) 1175 { 1176 TCGv_i64 t0 = tcg_temp_new_i64(); 1177 TCGv_i64 t1 = tcg_temp_new_i64(); 1178 1179 tcg_gen_mov_i64(t0, arg1); 1180 tcg_gen_mov_i64(t1, arg2); 1181 if (sign) { 1182 TCGv_i64 t2 = tcg_temp_new_i64(); 1183 TCGv_i64 t3 = tcg_temp_new_i64(); 1184 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1185 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1186 tcg_gen_and_i64(t2, t2, t3); 1187 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1188 tcg_gen_or_i64(t2, t2, t3); 1189 tcg_gen_movi_i64(t3, 0); 1190 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1191 tcg_gen_rem_i64(ret, t0, t1); 1192 tcg_temp_free_i64(t2); 1193 tcg_temp_free_i64(t3); 1194 } else { 1195 TCGv_i64 t2 = tcg_const_i64(1); 1196 TCGv_i64 t3 = tcg_const_i64(0); 1197 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1198 tcg_gen_remu_i64(ret, t0, t1); 1199 tcg_temp_free_i64(t2); 1200 tcg_temp_free_i64(t3); 1201 } 1202 tcg_temp_free_i64(t0); 1203 tcg_temp_free_i64(t1); 1204 } 1205 1206 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1207 static void glue(gen_, name)(DisasContext *ctx) \ 1208 { \ 1209 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1210 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1211 sign); \ 1212 } 1213 1214 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1215 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1216 #endif 1217 1218 /* mulhw mulhw. */ 1219 static void gen_mulhw(DisasContext *ctx) 1220 { 1221 TCGv_i32 t0 = tcg_temp_new_i32(); 1222 TCGv_i32 t1 = tcg_temp_new_i32(); 1223 1224 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1225 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1226 tcg_gen_muls2_i32(t0, t1, t0, t1); 1227 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1228 tcg_temp_free_i32(t0); 1229 tcg_temp_free_i32(t1); 1230 if (unlikely(Rc(ctx->opcode) != 0)) 1231 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1232 } 1233 1234 /* mulhwu mulhwu. */ 1235 static void gen_mulhwu(DisasContext *ctx) 1236 { 1237 TCGv_i32 t0 = tcg_temp_new_i32(); 1238 TCGv_i32 t1 = tcg_temp_new_i32(); 1239 1240 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1241 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1242 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1243 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1244 tcg_temp_free_i32(t0); 1245 tcg_temp_free_i32(t1); 1246 if (unlikely(Rc(ctx->opcode) != 0)) 1247 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1248 } 1249 1250 /* mullw mullw. */ 1251 static void gen_mullw(DisasContext *ctx) 1252 { 1253 #if defined(TARGET_PPC64) 1254 TCGv_i64 t0, t1; 1255 t0 = tcg_temp_new_i64(); 1256 t1 = tcg_temp_new_i64(); 1257 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1258 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1259 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1260 tcg_temp_free(t0); 1261 tcg_temp_free(t1); 1262 #else 1263 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1264 cpu_gpr[rB(ctx->opcode)]); 1265 #endif 1266 if (unlikely(Rc(ctx->opcode) != 0)) 1267 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1268 } 1269 1270 /* mullwo mullwo. */ 1271 static void gen_mullwo(DisasContext *ctx) 1272 { 1273 TCGv_i32 t0 = tcg_temp_new_i32(); 1274 TCGv_i32 t1 = tcg_temp_new_i32(); 1275 1276 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1277 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1278 tcg_gen_muls2_i32(t0, t1, t0, t1); 1279 #if defined(TARGET_PPC64) 1280 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1281 #else 1282 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1283 #endif 1284 1285 tcg_gen_sari_i32(t0, t0, 31); 1286 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1287 tcg_gen_extu_i32_tl(cpu_ov, t0); 1288 if (is_isa300(ctx)) { 1289 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1290 } 1291 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1292 1293 tcg_temp_free_i32(t0); 1294 tcg_temp_free_i32(t1); 1295 if (unlikely(Rc(ctx->opcode) != 0)) 1296 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1297 } 1298 1299 /* mulli */ 1300 static void gen_mulli(DisasContext *ctx) 1301 { 1302 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1303 SIMM(ctx->opcode)); 1304 } 1305 1306 #if defined(TARGET_PPC64) 1307 /* mulhd mulhd. */ 1308 static void gen_mulhd(DisasContext *ctx) 1309 { 1310 TCGv lo = tcg_temp_new(); 1311 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1312 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1313 tcg_temp_free(lo); 1314 if (unlikely(Rc(ctx->opcode) != 0)) { 1315 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1316 } 1317 } 1318 1319 /* mulhdu mulhdu. */ 1320 static void gen_mulhdu(DisasContext *ctx) 1321 { 1322 TCGv lo = tcg_temp_new(); 1323 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1324 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1325 tcg_temp_free(lo); 1326 if (unlikely(Rc(ctx->opcode) != 0)) { 1327 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1328 } 1329 } 1330 1331 /* mulld mulld. */ 1332 static void gen_mulld(DisasContext *ctx) 1333 { 1334 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1335 cpu_gpr[rB(ctx->opcode)]); 1336 if (unlikely(Rc(ctx->opcode) != 0)) 1337 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1338 } 1339 1340 /* mulldo mulldo. */ 1341 static void gen_mulldo(DisasContext *ctx) 1342 { 1343 TCGv_i64 t0 = tcg_temp_new_i64(); 1344 TCGv_i64 t1 = tcg_temp_new_i64(); 1345 1346 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1347 cpu_gpr[rB(ctx->opcode)]); 1348 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1349 1350 tcg_gen_sari_i64(t0, t0, 63); 1351 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1352 if (is_isa300(ctx)) { 1353 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1354 } 1355 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1356 1357 tcg_temp_free_i64(t0); 1358 tcg_temp_free_i64(t1); 1359 1360 if (unlikely(Rc(ctx->opcode) != 0)) { 1361 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1362 } 1363 } 1364 #endif 1365 1366 /* Common subf function */ 1367 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1368 TCGv arg2, bool add_ca, bool compute_ca, 1369 bool compute_ov, bool compute_rc0) 1370 { 1371 TCGv t0 = ret; 1372 1373 if (compute_ca || compute_ov) { 1374 t0 = tcg_temp_new(); 1375 } 1376 1377 if (compute_ca) { 1378 /* dest = ~arg1 + arg2 [+ ca]. */ 1379 if (NARROW_MODE(ctx)) { 1380 /* Caution: a non-obvious corner case of the spec is that we 1381 must produce the *entire* 64-bit addition, but produce the 1382 carry into bit 32. */ 1383 TCGv inv1 = tcg_temp_new(); 1384 TCGv t1 = tcg_temp_new(); 1385 tcg_gen_not_tl(inv1, arg1); 1386 if (add_ca) { 1387 tcg_gen_add_tl(t0, arg2, cpu_ca); 1388 } else { 1389 tcg_gen_addi_tl(t0, arg2, 1); 1390 } 1391 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1392 tcg_gen_add_tl(t0, t0, inv1); 1393 tcg_temp_free(inv1); 1394 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1395 tcg_temp_free(t1); 1396 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 1397 if (is_isa300(ctx)) { 1398 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 1399 } 1400 } else if (add_ca) { 1401 TCGv zero, inv1 = tcg_temp_new(); 1402 tcg_gen_not_tl(inv1, arg1); 1403 zero = tcg_const_tl(0); 1404 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1405 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1406 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, 0); 1407 tcg_temp_free(zero); 1408 tcg_temp_free(inv1); 1409 } else { 1410 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1411 tcg_gen_sub_tl(t0, arg2, arg1); 1412 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, 1); 1413 } 1414 } else if (add_ca) { 1415 /* Since we're ignoring carry-out, we can simplify the 1416 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1417 tcg_gen_sub_tl(t0, arg2, arg1); 1418 tcg_gen_add_tl(t0, t0, cpu_ca); 1419 tcg_gen_subi_tl(t0, t0, 1); 1420 } else { 1421 tcg_gen_sub_tl(t0, arg2, arg1); 1422 } 1423 1424 if (compute_ov) { 1425 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1426 } 1427 if (unlikely(compute_rc0)) { 1428 gen_set_Rc0(ctx, t0); 1429 } 1430 1431 if (t0 != ret) { 1432 tcg_gen_mov_tl(ret, t0); 1433 tcg_temp_free(t0); 1434 } 1435 } 1436 /* Sub functions with Two operands functions */ 1437 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1438 static void glue(gen_, name)(DisasContext *ctx) \ 1439 { \ 1440 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1441 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1442 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1443 } 1444 /* Sub functions with one operand and one immediate */ 1445 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1446 add_ca, compute_ca, compute_ov) \ 1447 static void glue(gen_, name)(DisasContext *ctx) \ 1448 { \ 1449 TCGv t0 = tcg_const_tl(const_val); \ 1450 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1451 cpu_gpr[rA(ctx->opcode)], t0, \ 1452 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1453 tcg_temp_free(t0); \ 1454 } 1455 /* subf subf. subfo subfo. */ 1456 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1457 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1458 /* subfc subfc. subfco subfco. */ 1459 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1460 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1461 /* subfe subfe. subfeo subfo. */ 1462 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1463 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1464 /* subfme subfme. subfmeo subfmeo. */ 1465 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1466 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1467 /* subfze subfze. subfzeo subfzeo.*/ 1468 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1469 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1470 1471 /* subfic */ 1472 static void gen_subfic(DisasContext *ctx) 1473 { 1474 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1475 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1476 c, 0, 1, 0, 0); 1477 tcg_temp_free(c); 1478 } 1479 1480 /* neg neg. nego nego. */ 1481 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1482 { 1483 TCGv zero = tcg_const_tl(0); 1484 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1485 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1486 tcg_temp_free(zero); 1487 } 1488 1489 static void gen_neg(DisasContext *ctx) 1490 { 1491 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 1492 if (unlikely(Rc(ctx->opcode))) { 1493 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1494 } 1495 } 1496 1497 static void gen_nego(DisasContext *ctx) 1498 { 1499 gen_op_arith_neg(ctx, 1); 1500 } 1501 1502 /*** Integer logical ***/ 1503 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1504 static void glue(gen_, name)(DisasContext *ctx) \ 1505 { \ 1506 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1507 cpu_gpr[rB(ctx->opcode)]); \ 1508 if (unlikely(Rc(ctx->opcode) != 0)) \ 1509 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1510 } 1511 1512 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1513 static void glue(gen_, name)(DisasContext *ctx) \ 1514 { \ 1515 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1516 if (unlikely(Rc(ctx->opcode) != 0)) \ 1517 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1518 } 1519 1520 /* and & and. */ 1521 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1522 /* andc & andc. */ 1523 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1524 1525 /* andi. */ 1526 static void gen_andi_(DisasContext *ctx) 1527 { 1528 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1529 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1530 } 1531 1532 /* andis. */ 1533 static void gen_andis_(DisasContext *ctx) 1534 { 1535 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1536 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1537 } 1538 1539 /* cntlzw */ 1540 static void gen_cntlzw(DisasContext *ctx) 1541 { 1542 TCGv_i32 t = tcg_temp_new_i32(); 1543 1544 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1545 tcg_gen_clzi_i32(t, t, 32); 1546 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1547 tcg_temp_free_i32(t); 1548 1549 if (unlikely(Rc(ctx->opcode) != 0)) 1550 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1551 } 1552 1553 /* cnttzw */ 1554 static void gen_cnttzw(DisasContext *ctx) 1555 { 1556 TCGv_i32 t = tcg_temp_new_i32(); 1557 1558 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1559 tcg_gen_ctzi_i32(t, t, 32); 1560 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1561 tcg_temp_free_i32(t); 1562 1563 if (unlikely(Rc(ctx->opcode) != 0)) { 1564 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1565 } 1566 } 1567 1568 /* eqv & eqv. */ 1569 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1570 /* extsb & extsb. */ 1571 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1572 /* extsh & extsh. */ 1573 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1574 /* nand & nand. */ 1575 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1576 /* nor & nor. */ 1577 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1578 1579 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1580 static void gen_pause(DisasContext *ctx) 1581 { 1582 TCGv_i32 t0 = tcg_const_i32(0); 1583 tcg_gen_st_i32(t0, cpu_env, 1584 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1585 tcg_temp_free_i32(t0); 1586 1587 /* Stop translation, this gives other CPUs a chance to run */ 1588 gen_exception_nip(ctx, EXCP_HLT, ctx->nip); 1589 } 1590 #endif /* defined(TARGET_PPC64) */ 1591 1592 /* or & or. */ 1593 static void gen_or(DisasContext *ctx) 1594 { 1595 int rs, ra, rb; 1596 1597 rs = rS(ctx->opcode); 1598 ra = rA(ctx->opcode); 1599 rb = rB(ctx->opcode); 1600 /* Optimisation for mr. ri case */ 1601 if (rs != ra || rs != rb) { 1602 if (rs != rb) 1603 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1604 else 1605 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1606 if (unlikely(Rc(ctx->opcode) != 0)) 1607 gen_set_Rc0(ctx, cpu_gpr[ra]); 1608 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1609 gen_set_Rc0(ctx, cpu_gpr[rs]); 1610 #if defined(TARGET_PPC64) 1611 } else if (rs != 0) { /* 0 is nop */ 1612 int prio = 0; 1613 1614 switch (rs) { 1615 case 1: 1616 /* Set process priority to low */ 1617 prio = 2; 1618 break; 1619 case 6: 1620 /* Set process priority to medium-low */ 1621 prio = 3; 1622 break; 1623 case 2: 1624 /* Set process priority to normal */ 1625 prio = 4; 1626 break; 1627 #if !defined(CONFIG_USER_ONLY) 1628 case 31: 1629 if (!ctx->pr) { 1630 /* Set process priority to very low */ 1631 prio = 1; 1632 } 1633 break; 1634 case 5: 1635 if (!ctx->pr) { 1636 /* Set process priority to medium-hight */ 1637 prio = 5; 1638 } 1639 break; 1640 case 3: 1641 if (!ctx->pr) { 1642 /* Set process priority to high */ 1643 prio = 6; 1644 } 1645 break; 1646 case 7: 1647 if (ctx->hv && !ctx->pr) { 1648 /* Set process priority to very high */ 1649 prio = 7; 1650 } 1651 break; 1652 #endif 1653 default: 1654 break; 1655 } 1656 if (prio) { 1657 TCGv t0 = tcg_temp_new(); 1658 gen_load_spr(t0, SPR_PPR); 1659 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1660 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1661 gen_store_spr(SPR_PPR, t0); 1662 tcg_temp_free(t0); 1663 } 1664 #if !defined(CONFIG_USER_ONLY) 1665 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1666 * CPU and the kernel hangs. This applies to all encodings other 1667 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1668 * and all currently undefined. 1669 */ 1670 gen_pause(ctx); 1671 #endif 1672 #endif 1673 } 1674 } 1675 /* orc & orc. */ 1676 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1677 1678 /* xor & xor. */ 1679 static void gen_xor(DisasContext *ctx) 1680 { 1681 /* Optimisation for "set to zero" case */ 1682 if (rS(ctx->opcode) != rB(ctx->opcode)) 1683 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1684 else 1685 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1686 if (unlikely(Rc(ctx->opcode) != 0)) 1687 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1688 } 1689 1690 /* ori */ 1691 static void gen_ori(DisasContext *ctx) 1692 { 1693 target_ulong uimm = UIMM(ctx->opcode); 1694 1695 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1696 return; 1697 } 1698 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1699 } 1700 1701 /* oris */ 1702 static void gen_oris(DisasContext *ctx) 1703 { 1704 target_ulong uimm = UIMM(ctx->opcode); 1705 1706 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1707 /* NOP */ 1708 return; 1709 } 1710 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1711 } 1712 1713 /* xori */ 1714 static void gen_xori(DisasContext *ctx) 1715 { 1716 target_ulong uimm = UIMM(ctx->opcode); 1717 1718 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1719 /* NOP */ 1720 return; 1721 } 1722 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1723 } 1724 1725 /* xoris */ 1726 static void gen_xoris(DisasContext *ctx) 1727 { 1728 target_ulong uimm = UIMM(ctx->opcode); 1729 1730 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1731 /* NOP */ 1732 return; 1733 } 1734 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1735 } 1736 1737 /* popcntb : PowerPC 2.03 specification */ 1738 static void gen_popcntb(DisasContext *ctx) 1739 { 1740 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1741 } 1742 1743 static void gen_popcntw(DisasContext *ctx) 1744 { 1745 #if defined(TARGET_PPC64) 1746 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1747 #else 1748 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1749 #endif 1750 } 1751 1752 #if defined(TARGET_PPC64) 1753 /* popcntd: PowerPC 2.06 specification */ 1754 static void gen_popcntd(DisasContext *ctx) 1755 { 1756 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1757 } 1758 #endif 1759 1760 /* prtyw: PowerPC 2.05 specification */ 1761 static void gen_prtyw(DisasContext *ctx) 1762 { 1763 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1764 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1765 TCGv t0 = tcg_temp_new(); 1766 tcg_gen_shri_tl(t0, rs, 16); 1767 tcg_gen_xor_tl(ra, rs, t0); 1768 tcg_gen_shri_tl(t0, ra, 8); 1769 tcg_gen_xor_tl(ra, ra, t0); 1770 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1771 tcg_temp_free(t0); 1772 } 1773 1774 #if defined(TARGET_PPC64) 1775 /* prtyd: PowerPC 2.05 specification */ 1776 static void gen_prtyd(DisasContext *ctx) 1777 { 1778 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1779 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1780 TCGv t0 = tcg_temp_new(); 1781 tcg_gen_shri_tl(t0, rs, 32); 1782 tcg_gen_xor_tl(ra, rs, t0); 1783 tcg_gen_shri_tl(t0, ra, 16); 1784 tcg_gen_xor_tl(ra, ra, t0); 1785 tcg_gen_shri_tl(t0, ra, 8); 1786 tcg_gen_xor_tl(ra, ra, t0); 1787 tcg_gen_andi_tl(ra, ra, 1); 1788 tcg_temp_free(t0); 1789 } 1790 #endif 1791 1792 #if defined(TARGET_PPC64) 1793 /* bpermd */ 1794 static void gen_bpermd(DisasContext *ctx) 1795 { 1796 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1797 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1798 } 1799 #endif 1800 1801 #if defined(TARGET_PPC64) 1802 /* extsw & extsw. */ 1803 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1804 1805 /* cntlzd */ 1806 static void gen_cntlzd(DisasContext *ctx) 1807 { 1808 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1809 if (unlikely(Rc(ctx->opcode) != 0)) 1810 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1811 } 1812 1813 /* cnttzd */ 1814 static void gen_cnttzd(DisasContext *ctx) 1815 { 1816 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1817 if (unlikely(Rc(ctx->opcode) != 0)) { 1818 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1819 } 1820 } 1821 1822 /* darn */ 1823 static void gen_darn(DisasContext *ctx) 1824 { 1825 int l = L(ctx->opcode); 1826 1827 if (l == 0) { 1828 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1829 } else if (l <= 2) { 1830 /* Return 64-bit random for both CRN and RRN */ 1831 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1832 } else { 1833 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1834 } 1835 } 1836 #endif 1837 1838 /*** Integer rotate ***/ 1839 1840 /* rlwimi & rlwimi. */ 1841 static void gen_rlwimi(DisasContext *ctx) 1842 { 1843 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1844 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1845 uint32_t sh = SH(ctx->opcode); 1846 uint32_t mb = MB(ctx->opcode); 1847 uint32_t me = ME(ctx->opcode); 1848 1849 if (sh == (31-me) && mb <= me) { 1850 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1851 } else { 1852 target_ulong mask; 1853 TCGv t1; 1854 1855 #if defined(TARGET_PPC64) 1856 mb += 32; 1857 me += 32; 1858 #endif 1859 mask = MASK(mb, me); 1860 1861 t1 = tcg_temp_new(); 1862 if (mask <= 0xffffffffu) { 1863 TCGv_i32 t0 = tcg_temp_new_i32(); 1864 tcg_gen_trunc_tl_i32(t0, t_rs); 1865 tcg_gen_rotli_i32(t0, t0, sh); 1866 tcg_gen_extu_i32_tl(t1, t0); 1867 tcg_temp_free_i32(t0); 1868 } else { 1869 #if defined(TARGET_PPC64) 1870 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1871 tcg_gen_rotli_i64(t1, t1, sh); 1872 #else 1873 g_assert_not_reached(); 1874 #endif 1875 } 1876 1877 tcg_gen_andi_tl(t1, t1, mask); 1878 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1879 tcg_gen_or_tl(t_ra, t_ra, t1); 1880 tcg_temp_free(t1); 1881 } 1882 if (unlikely(Rc(ctx->opcode) != 0)) { 1883 gen_set_Rc0(ctx, t_ra); 1884 } 1885 } 1886 1887 /* rlwinm & rlwinm. */ 1888 static void gen_rlwinm(DisasContext *ctx) 1889 { 1890 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1891 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1892 int sh = SH(ctx->opcode); 1893 int mb = MB(ctx->opcode); 1894 int me = ME(ctx->opcode); 1895 int len = me - mb + 1; 1896 int rsh = (32 - sh) & 31; 1897 1898 if (sh != 0 && len > 0 && me == (31 - sh)) { 1899 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 1900 } else if (me == 31 && rsh + len <= 32) { 1901 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 1902 } else { 1903 target_ulong mask; 1904 #if defined(TARGET_PPC64) 1905 mb += 32; 1906 me += 32; 1907 #endif 1908 mask = MASK(mb, me); 1909 if (sh == 0) { 1910 tcg_gen_andi_tl(t_ra, t_rs, mask); 1911 } else if (mask <= 0xffffffffu) { 1912 TCGv_i32 t0 = tcg_temp_new_i32(); 1913 tcg_gen_trunc_tl_i32(t0, t_rs); 1914 tcg_gen_rotli_i32(t0, t0, sh); 1915 tcg_gen_andi_i32(t0, t0, mask); 1916 tcg_gen_extu_i32_tl(t_ra, t0); 1917 tcg_temp_free_i32(t0); 1918 } else { 1919 #if defined(TARGET_PPC64) 1920 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1921 tcg_gen_rotli_i64(t_ra, t_ra, sh); 1922 tcg_gen_andi_i64(t_ra, t_ra, mask); 1923 #else 1924 g_assert_not_reached(); 1925 #endif 1926 } 1927 } 1928 if (unlikely(Rc(ctx->opcode) != 0)) { 1929 gen_set_Rc0(ctx, t_ra); 1930 } 1931 } 1932 1933 /* rlwnm & rlwnm. */ 1934 static void gen_rlwnm(DisasContext *ctx) 1935 { 1936 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1937 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1938 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 1939 uint32_t mb = MB(ctx->opcode); 1940 uint32_t me = ME(ctx->opcode); 1941 target_ulong mask; 1942 1943 #if defined(TARGET_PPC64) 1944 mb += 32; 1945 me += 32; 1946 #endif 1947 mask = MASK(mb, me); 1948 1949 if (mask <= 0xffffffffu) { 1950 TCGv_i32 t0 = tcg_temp_new_i32(); 1951 TCGv_i32 t1 = tcg_temp_new_i32(); 1952 tcg_gen_trunc_tl_i32(t0, t_rb); 1953 tcg_gen_trunc_tl_i32(t1, t_rs); 1954 tcg_gen_andi_i32(t0, t0, 0x1f); 1955 tcg_gen_rotl_i32(t1, t1, t0); 1956 tcg_gen_extu_i32_tl(t_ra, t1); 1957 tcg_temp_free_i32(t0); 1958 tcg_temp_free_i32(t1); 1959 } else { 1960 #if defined(TARGET_PPC64) 1961 TCGv_i64 t0 = tcg_temp_new_i64(); 1962 tcg_gen_andi_i64(t0, t_rb, 0x1f); 1963 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1964 tcg_gen_rotl_i64(t_ra, t_ra, t0); 1965 tcg_temp_free_i64(t0); 1966 #else 1967 g_assert_not_reached(); 1968 #endif 1969 } 1970 1971 tcg_gen_andi_tl(t_ra, t_ra, mask); 1972 1973 if (unlikely(Rc(ctx->opcode) != 0)) { 1974 gen_set_Rc0(ctx, t_ra); 1975 } 1976 } 1977 1978 #if defined(TARGET_PPC64) 1979 #define GEN_PPC64_R2(name, opc1, opc2) \ 1980 static void glue(gen_, name##0)(DisasContext *ctx) \ 1981 { \ 1982 gen_##name(ctx, 0); \ 1983 } \ 1984 \ 1985 static void glue(gen_, name##1)(DisasContext *ctx) \ 1986 { \ 1987 gen_##name(ctx, 1); \ 1988 } 1989 #define GEN_PPC64_R4(name, opc1, opc2) \ 1990 static void glue(gen_, name##0)(DisasContext *ctx) \ 1991 { \ 1992 gen_##name(ctx, 0, 0); \ 1993 } \ 1994 \ 1995 static void glue(gen_, name##1)(DisasContext *ctx) \ 1996 { \ 1997 gen_##name(ctx, 0, 1); \ 1998 } \ 1999 \ 2000 static void glue(gen_, name##2)(DisasContext *ctx) \ 2001 { \ 2002 gen_##name(ctx, 1, 0); \ 2003 } \ 2004 \ 2005 static void glue(gen_, name##3)(DisasContext *ctx) \ 2006 { \ 2007 gen_##name(ctx, 1, 1); \ 2008 } 2009 2010 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2011 { 2012 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2013 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2014 int len = me - mb + 1; 2015 int rsh = (64 - sh) & 63; 2016 2017 if (sh != 0 && len > 0 && me == (63 - sh)) { 2018 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2019 } else if (me == 63 && rsh + len <= 64) { 2020 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2021 } else { 2022 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2023 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2024 } 2025 if (unlikely(Rc(ctx->opcode) != 0)) { 2026 gen_set_Rc0(ctx, t_ra); 2027 } 2028 } 2029 2030 /* rldicl - rldicl. */ 2031 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2032 { 2033 uint32_t sh, mb; 2034 2035 sh = SH(ctx->opcode) | (shn << 5); 2036 mb = MB(ctx->opcode) | (mbn << 5); 2037 gen_rldinm(ctx, mb, 63, sh); 2038 } 2039 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2040 2041 /* rldicr - rldicr. */ 2042 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2043 { 2044 uint32_t sh, me; 2045 2046 sh = SH(ctx->opcode) | (shn << 5); 2047 me = MB(ctx->opcode) | (men << 5); 2048 gen_rldinm(ctx, 0, me, sh); 2049 } 2050 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2051 2052 /* rldic - rldic. */ 2053 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2054 { 2055 uint32_t sh, mb; 2056 2057 sh = SH(ctx->opcode) | (shn << 5); 2058 mb = MB(ctx->opcode) | (mbn << 5); 2059 gen_rldinm(ctx, mb, 63 - sh, sh); 2060 } 2061 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2062 2063 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2064 { 2065 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2066 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2067 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2068 TCGv t0; 2069 2070 t0 = tcg_temp_new(); 2071 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2072 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2073 tcg_temp_free(t0); 2074 2075 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2076 if (unlikely(Rc(ctx->opcode) != 0)) { 2077 gen_set_Rc0(ctx, t_ra); 2078 } 2079 } 2080 2081 /* rldcl - rldcl. */ 2082 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2083 { 2084 uint32_t mb; 2085 2086 mb = MB(ctx->opcode) | (mbn << 5); 2087 gen_rldnm(ctx, mb, 63); 2088 } 2089 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2090 2091 /* rldcr - rldcr. */ 2092 static inline void gen_rldcr(DisasContext *ctx, int men) 2093 { 2094 uint32_t me; 2095 2096 me = MB(ctx->opcode) | (men << 5); 2097 gen_rldnm(ctx, 0, me); 2098 } 2099 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2100 2101 /* rldimi - rldimi. */ 2102 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2103 { 2104 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2105 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2106 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2107 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2108 uint32_t me = 63 - sh; 2109 2110 if (mb <= me) { 2111 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2112 } else { 2113 target_ulong mask = MASK(mb, me); 2114 TCGv t1 = tcg_temp_new(); 2115 2116 tcg_gen_rotli_tl(t1, t_rs, sh); 2117 tcg_gen_andi_tl(t1, t1, mask); 2118 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2119 tcg_gen_or_tl(t_ra, t_ra, t1); 2120 tcg_temp_free(t1); 2121 } 2122 if (unlikely(Rc(ctx->opcode) != 0)) { 2123 gen_set_Rc0(ctx, t_ra); 2124 } 2125 } 2126 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2127 #endif 2128 2129 /*** Integer shift ***/ 2130 2131 /* slw & slw. */ 2132 static void gen_slw(DisasContext *ctx) 2133 { 2134 TCGv t0, t1; 2135 2136 t0 = tcg_temp_new(); 2137 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2138 #if defined(TARGET_PPC64) 2139 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2140 tcg_gen_sari_tl(t0, t0, 0x3f); 2141 #else 2142 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2143 tcg_gen_sari_tl(t0, t0, 0x1f); 2144 #endif 2145 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2146 t1 = tcg_temp_new(); 2147 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2148 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2149 tcg_temp_free(t1); 2150 tcg_temp_free(t0); 2151 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2152 if (unlikely(Rc(ctx->opcode) != 0)) 2153 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2154 } 2155 2156 /* sraw & sraw. */ 2157 static void gen_sraw(DisasContext *ctx) 2158 { 2159 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2160 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2161 if (unlikely(Rc(ctx->opcode) != 0)) 2162 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2163 } 2164 2165 /* srawi & srawi. */ 2166 static void gen_srawi(DisasContext *ctx) 2167 { 2168 int sh = SH(ctx->opcode); 2169 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2170 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2171 if (sh == 0) { 2172 tcg_gen_ext32s_tl(dst, src); 2173 tcg_gen_movi_tl(cpu_ca, 0); 2174 if (is_isa300(ctx)) { 2175 tcg_gen_movi_tl(cpu_ca32, 0); 2176 } 2177 } else { 2178 TCGv t0; 2179 tcg_gen_ext32s_tl(dst, src); 2180 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2181 t0 = tcg_temp_new(); 2182 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2183 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2184 tcg_temp_free(t0); 2185 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2186 if (is_isa300(ctx)) { 2187 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2188 } 2189 tcg_gen_sari_tl(dst, dst, sh); 2190 } 2191 if (unlikely(Rc(ctx->opcode) != 0)) { 2192 gen_set_Rc0(ctx, dst); 2193 } 2194 } 2195 2196 /* srw & srw. */ 2197 static void gen_srw(DisasContext *ctx) 2198 { 2199 TCGv t0, t1; 2200 2201 t0 = tcg_temp_new(); 2202 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2203 #if defined(TARGET_PPC64) 2204 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2205 tcg_gen_sari_tl(t0, t0, 0x3f); 2206 #else 2207 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2208 tcg_gen_sari_tl(t0, t0, 0x1f); 2209 #endif 2210 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2211 tcg_gen_ext32u_tl(t0, t0); 2212 t1 = tcg_temp_new(); 2213 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2214 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2215 tcg_temp_free(t1); 2216 tcg_temp_free(t0); 2217 if (unlikely(Rc(ctx->opcode) != 0)) 2218 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2219 } 2220 2221 #if defined(TARGET_PPC64) 2222 /* sld & sld. */ 2223 static void gen_sld(DisasContext *ctx) 2224 { 2225 TCGv t0, t1; 2226 2227 t0 = tcg_temp_new(); 2228 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2229 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2230 tcg_gen_sari_tl(t0, t0, 0x3f); 2231 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2232 t1 = tcg_temp_new(); 2233 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2234 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2235 tcg_temp_free(t1); 2236 tcg_temp_free(t0); 2237 if (unlikely(Rc(ctx->opcode) != 0)) 2238 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2239 } 2240 2241 /* srad & srad. */ 2242 static void gen_srad(DisasContext *ctx) 2243 { 2244 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2245 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2246 if (unlikely(Rc(ctx->opcode) != 0)) 2247 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2248 } 2249 /* sradi & sradi. */ 2250 static inline void gen_sradi(DisasContext *ctx, int n) 2251 { 2252 int sh = SH(ctx->opcode) + (n << 5); 2253 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2254 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2255 if (sh == 0) { 2256 tcg_gen_mov_tl(dst, src); 2257 tcg_gen_movi_tl(cpu_ca, 0); 2258 if (is_isa300(ctx)) { 2259 tcg_gen_movi_tl(cpu_ca32, 0); 2260 } 2261 } else { 2262 TCGv t0; 2263 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2264 t0 = tcg_temp_new(); 2265 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2266 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2267 tcg_temp_free(t0); 2268 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2269 if (is_isa300(ctx)) { 2270 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2271 } 2272 tcg_gen_sari_tl(dst, src, sh); 2273 } 2274 if (unlikely(Rc(ctx->opcode) != 0)) { 2275 gen_set_Rc0(ctx, dst); 2276 } 2277 } 2278 2279 static void gen_sradi0(DisasContext *ctx) 2280 { 2281 gen_sradi(ctx, 0); 2282 } 2283 2284 static void gen_sradi1(DisasContext *ctx) 2285 { 2286 gen_sradi(ctx, 1); 2287 } 2288 2289 /* extswsli & extswsli. */ 2290 static inline void gen_extswsli(DisasContext *ctx, int n) 2291 { 2292 int sh = SH(ctx->opcode) + (n << 5); 2293 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2294 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2295 2296 tcg_gen_ext32s_tl(dst, src); 2297 tcg_gen_shli_tl(dst, dst, sh); 2298 if (unlikely(Rc(ctx->opcode) != 0)) { 2299 gen_set_Rc0(ctx, dst); 2300 } 2301 } 2302 2303 static void gen_extswsli0(DisasContext *ctx) 2304 { 2305 gen_extswsli(ctx, 0); 2306 } 2307 2308 static void gen_extswsli1(DisasContext *ctx) 2309 { 2310 gen_extswsli(ctx, 1); 2311 } 2312 2313 /* srd & srd. */ 2314 static void gen_srd(DisasContext *ctx) 2315 { 2316 TCGv t0, t1; 2317 2318 t0 = tcg_temp_new(); 2319 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2320 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2321 tcg_gen_sari_tl(t0, t0, 0x3f); 2322 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2323 t1 = tcg_temp_new(); 2324 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2325 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2326 tcg_temp_free(t1); 2327 tcg_temp_free(t0); 2328 if (unlikely(Rc(ctx->opcode) != 0)) 2329 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2330 } 2331 #endif 2332 2333 /*** Addressing modes ***/ 2334 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2335 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2336 target_long maskl) 2337 { 2338 target_long simm = SIMM(ctx->opcode); 2339 2340 simm &= ~maskl; 2341 if (rA(ctx->opcode) == 0) { 2342 if (NARROW_MODE(ctx)) { 2343 simm = (uint32_t)simm; 2344 } 2345 tcg_gen_movi_tl(EA, simm); 2346 } else if (likely(simm != 0)) { 2347 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2348 if (NARROW_MODE(ctx)) { 2349 tcg_gen_ext32u_tl(EA, EA); 2350 } 2351 } else { 2352 if (NARROW_MODE(ctx)) { 2353 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2354 } else { 2355 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2356 } 2357 } 2358 } 2359 2360 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2361 { 2362 if (rA(ctx->opcode) == 0) { 2363 if (NARROW_MODE(ctx)) { 2364 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2365 } else { 2366 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2367 } 2368 } else { 2369 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2370 if (NARROW_MODE(ctx)) { 2371 tcg_gen_ext32u_tl(EA, EA); 2372 } 2373 } 2374 } 2375 2376 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2377 { 2378 if (rA(ctx->opcode) == 0) { 2379 tcg_gen_movi_tl(EA, 0); 2380 } else if (NARROW_MODE(ctx)) { 2381 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2382 } else { 2383 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2384 } 2385 } 2386 2387 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2388 target_long val) 2389 { 2390 tcg_gen_addi_tl(ret, arg1, val); 2391 if (NARROW_MODE(ctx)) { 2392 tcg_gen_ext32u_tl(ret, ret); 2393 } 2394 } 2395 2396 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask) 2397 { 2398 TCGLabel *l1 = gen_new_label(); 2399 TCGv t0 = tcg_temp_new(); 2400 TCGv_i32 t1, t2; 2401 tcg_gen_andi_tl(t0, EA, mask); 2402 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 2403 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN); 2404 t2 = tcg_const_i32(ctx->opcode & 0x03FF0000); 2405 gen_update_nip(ctx, ctx->nip - 4); 2406 gen_helper_raise_exception_err(cpu_env, t1, t2); 2407 tcg_temp_free_i32(t1); 2408 tcg_temp_free_i32(t2); 2409 gen_set_label(l1); 2410 tcg_temp_free(t0); 2411 } 2412 2413 static inline void gen_align_no_le(DisasContext *ctx) 2414 { 2415 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2416 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2417 } 2418 2419 /*** Integer load ***/ 2420 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2421 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2422 2423 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2424 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2425 TCGv val, \ 2426 TCGv addr) \ 2427 { \ 2428 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2429 } 2430 2431 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2432 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2433 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2434 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2435 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2436 2437 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2438 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2439 2440 #define GEN_QEMU_LOAD_64(ldop, op) \ 2441 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2442 TCGv_i64 val, \ 2443 TCGv addr) \ 2444 { \ 2445 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2446 } 2447 2448 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2449 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2450 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2451 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2452 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2453 2454 #if defined(TARGET_PPC64) 2455 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2456 #endif 2457 2458 #define GEN_QEMU_STORE_TL(stop, op) \ 2459 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2460 TCGv val, \ 2461 TCGv addr) \ 2462 { \ 2463 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2464 } 2465 2466 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2467 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2468 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2469 2470 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2471 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2472 2473 #define GEN_QEMU_STORE_64(stop, op) \ 2474 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2475 TCGv_i64 val, \ 2476 TCGv addr) \ 2477 { \ 2478 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2479 } 2480 2481 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2482 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2483 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2484 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2485 2486 #if defined(TARGET_PPC64) 2487 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2488 #endif 2489 2490 #define GEN_LD(name, ldop, opc, type) \ 2491 static void glue(gen_, name)(DisasContext *ctx) \ 2492 { \ 2493 TCGv EA; \ 2494 gen_set_access_type(ctx, ACCESS_INT); \ 2495 EA = tcg_temp_new(); \ 2496 gen_addr_imm_index(ctx, EA, 0); \ 2497 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2498 tcg_temp_free(EA); \ 2499 } 2500 2501 #define GEN_LDU(name, ldop, opc, type) \ 2502 static void glue(gen_, name##u)(DisasContext *ctx) \ 2503 { \ 2504 TCGv EA; \ 2505 if (unlikely(rA(ctx->opcode) == 0 || \ 2506 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2507 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2508 return; \ 2509 } \ 2510 gen_set_access_type(ctx, ACCESS_INT); \ 2511 EA = tcg_temp_new(); \ 2512 if (type == PPC_64B) \ 2513 gen_addr_imm_index(ctx, EA, 0x03); \ 2514 else \ 2515 gen_addr_imm_index(ctx, EA, 0); \ 2516 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2517 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2518 tcg_temp_free(EA); \ 2519 } 2520 2521 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2522 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2523 { \ 2524 TCGv EA; \ 2525 if (unlikely(rA(ctx->opcode) == 0 || \ 2526 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2527 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2528 return; \ 2529 } \ 2530 gen_set_access_type(ctx, ACCESS_INT); \ 2531 EA = tcg_temp_new(); \ 2532 gen_addr_reg_index(ctx, EA); \ 2533 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2534 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2535 tcg_temp_free(EA); \ 2536 } 2537 2538 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2539 static void glue(gen_, name##x)(DisasContext *ctx) \ 2540 { \ 2541 TCGv EA; \ 2542 chk; \ 2543 gen_set_access_type(ctx, ACCESS_INT); \ 2544 EA = tcg_temp_new(); \ 2545 gen_addr_reg_index(ctx, EA); \ 2546 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2547 tcg_temp_free(EA); \ 2548 } 2549 2550 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2551 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2552 2553 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2554 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2555 2556 #define GEN_LDS(name, ldop, op, type) \ 2557 GEN_LD(name, ldop, op | 0x20, type); \ 2558 GEN_LDU(name, ldop, op | 0x21, type); \ 2559 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2560 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2561 2562 /* lbz lbzu lbzux lbzx */ 2563 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2564 /* lha lhau lhaux lhax */ 2565 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2566 /* lhz lhzu lhzux lhzx */ 2567 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2568 /* lwz lwzu lwzux lwzx */ 2569 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2570 #if defined(TARGET_PPC64) 2571 /* lwaux */ 2572 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2573 /* lwax */ 2574 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2575 /* ldux */ 2576 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2577 /* ldx */ 2578 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2579 2580 /* CI load/store variants */ 2581 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2582 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2583 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2584 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2585 2586 static void gen_ld(DisasContext *ctx) 2587 { 2588 TCGv EA; 2589 if (Rc(ctx->opcode)) { 2590 if (unlikely(rA(ctx->opcode) == 0 || 2591 rA(ctx->opcode) == rD(ctx->opcode))) { 2592 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2593 return; 2594 } 2595 } 2596 gen_set_access_type(ctx, ACCESS_INT); 2597 EA = tcg_temp_new(); 2598 gen_addr_imm_index(ctx, EA, 0x03); 2599 if (ctx->opcode & 0x02) { 2600 /* lwa (lwau is undefined) */ 2601 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2602 } else { 2603 /* ld - ldu */ 2604 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2605 } 2606 if (Rc(ctx->opcode)) 2607 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2608 tcg_temp_free(EA); 2609 } 2610 2611 /* lq */ 2612 static void gen_lq(DisasContext *ctx) 2613 { 2614 int ra, rd; 2615 TCGv EA; 2616 2617 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2618 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2619 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2620 2621 if (!legal_in_user_mode && ctx->pr) { 2622 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2623 return; 2624 } 2625 2626 if (!le_is_supported && ctx->le_mode) { 2627 gen_align_no_le(ctx); 2628 return; 2629 } 2630 ra = rA(ctx->opcode); 2631 rd = rD(ctx->opcode); 2632 if (unlikely((rd & 1) || rd == ra)) { 2633 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2634 return; 2635 } 2636 2637 gen_set_access_type(ctx, ACCESS_INT); 2638 EA = tcg_temp_new(); 2639 gen_addr_imm_index(ctx, EA, 0x0F); 2640 2641 /* We only need to swap high and low halves. gen_qemu_ld64_i64 does 2642 necessary 64-bit byteswap already. */ 2643 if (unlikely(ctx->le_mode)) { 2644 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA); 2645 gen_addr_add(ctx, EA, EA, 8); 2646 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA); 2647 } else { 2648 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA); 2649 gen_addr_add(ctx, EA, EA, 8); 2650 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA); 2651 } 2652 tcg_temp_free(EA); 2653 } 2654 #endif 2655 2656 /*** Integer store ***/ 2657 #define GEN_ST(name, stop, opc, type) \ 2658 static void glue(gen_, name)(DisasContext *ctx) \ 2659 { \ 2660 TCGv EA; \ 2661 gen_set_access_type(ctx, ACCESS_INT); \ 2662 EA = tcg_temp_new(); \ 2663 gen_addr_imm_index(ctx, EA, 0); \ 2664 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2665 tcg_temp_free(EA); \ 2666 } 2667 2668 #define GEN_STU(name, stop, opc, type) \ 2669 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2670 { \ 2671 TCGv EA; \ 2672 if (unlikely(rA(ctx->opcode) == 0)) { \ 2673 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2674 return; \ 2675 } \ 2676 gen_set_access_type(ctx, ACCESS_INT); \ 2677 EA = tcg_temp_new(); \ 2678 if (type == PPC_64B) \ 2679 gen_addr_imm_index(ctx, EA, 0x03); \ 2680 else \ 2681 gen_addr_imm_index(ctx, EA, 0); \ 2682 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2683 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2684 tcg_temp_free(EA); \ 2685 } 2686 2687 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2688 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2689 { \ 2690 TCGv EA; \ 2691 if (unlikely(rA(ctx->opcode) == 0)) { \ 2692 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2693 return; \ 2694 } \ 2695 gen_set_access_type(ctx, ACCESS_INT); \ 2696 EA = tcg_temp_new(); \ 2697 gen_addr_reg_index(ctx, EA); \ 2698 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2699 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2700 tcg_temp_free(EA); \ 2701 } 2702 2703 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2704 static void glue(gen_, name##x)(DisasContext *ctx) \ 2705 { \ 2706 TCGv EA; \ 2707 chk; \ 2708 gen_set_access_type(ctx, ACCESS_INT); \ 2709 EA = tcg_temp_new(); \ 2710 gen_addr_reg_index(ctx, EA); \ 2711 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2712 tcg_temp_free(EA); \ 2713 } 2714 #define GEN_STX(name, stop, opc2, opc3, type) \ 2715 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2716 2717 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2718 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2719 2720 #define GEN_STS(name, stop, op, type) \ 2721 GEN_ST(name, stop, op | 0x20, type); \ 2722 GEN_STU(name, stop, op | 0x21, type); \ 2723 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2724 GEN_STX(name, stop, 0x17, op | 0x00, type) 2725 2726 /* stb stbu stbux stbx */ 2727 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2728 /* sth sthu sthux sthx */ 2729 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2730 /* stw stwu stwux stwx */ 2731 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2732 #if defined(TARGET_PPC64) 2733 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2734 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2735 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2736 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2737 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2738 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2739 2740 static void gen_std(DisasContext *ctx) 2741 { 2742 int rs; 2743 TCGv EA; 2744 2745 rs = rS(ctx->opcode); 2746 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2747 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2748 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2749 2750 if (!(ctx->insns_flags & PPC_64BX)) { 2751 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2752 } 2753 2754 if (!legal_in_user_mode && ctx->pr) { 2755 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2756 return; 2757 } 2758 2759 if (!le_is_supported && ctx->le_mode) { 2760 gen_align_no_le(ctx); 2761 return; 2762 } 2763 2764 if (unlikely(rs & 1)) { 2765 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2766 return; 2767 } 2768 gen_set_access_type(ctx, ACCESS_INT); 2769 EA = tcg_temp_new(); 2770 gen_addr_imm_index(ctx, EA, 0x03); 2771 2772 /* We only need to swap high and low halves. gen_qemu_st64_i64 does 2773 necessary 64-bit byteswap already. */ 2774 if (unlikely(ctx->le_mode)) { 2775 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA); 2776 gen_addr_add(ctx, EA, EA, 8); 2777 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2778 } else { 2779 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2780 gen_addr_add(ctx, EA, EA, 8); 2781 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA); 2782 } 2783 tcg_temp_free(EA); 2784 } else { 2785 /* std / stdu*/ 2786 if (Rc(ctx->opcode)) { 2787 if (unlikely(rA(ctx->opcode) == 0)) { 2788 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2789 return; 2790 } 2791 } 2792 gen_set_access_type(ctx, ACCESS_INT); 2793 EA = tcg_temp_new(); 2794 gen_addr_imm_index(ctx, EA, 0x03); 2795 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2796 if (Rc(ctx->opcode)) 2797 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2798 tcg_temp_free(EA); 2799 } 2800 } 2801 #endif 2802 /*** Integer load and store with byte reverse ***/ 2803 2804 /* lhbrx */ 2805 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2806 2807 /* lwbrx */ 2808 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2809 2810 #if defined(TARGET_PPC64) 2811 /* ldbrx */ 2812 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2813 /* stdbrx */ 2814 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2815 #endif /* TARGET_PPC64 */ 2816 2817 /* sthbrx */ 2818 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2819 /* stwbrx */ 2820 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2821 2822 /*** Integer load and store multiple ***/ 2823 2824 /* lmw */ 2825 static void gen_lmw(DisasContext *ctx) 2826 { 2827 TCGv t0; 2828 TCGv_i32 t1; 2829 2830 if (ctx->le_mode) { 2831 gen_align_no_le(ctx); 2832 return; 2833 } 2834 gen_set_access_type(ctx, ACCESS_INT); 2835 t0 = tcg_temp_new(); 2836 t1 = tcg_const_i32(rD(ctx->opcode)); 2837 gen_addr_imm_index(ctx, t0, 0); 2838 gen_helper_lmw(cpu_env, t0, t1); 2839 tcg_temp_free(t0); 2840 tcg_temp_free_i32(t1); 2841 } 2842 2843 /* stmw */ 2844 static void gen_stmw(DisasContext *ctx) 2845 { 2846 TCGv t0; 2847 TCGv_i32 t1; 2848 2849 if (ctx->le_mode) { 2850 gen_align_no_le(ctx); 2851 return; 2852 } 2853 gen_set_access_type(ctx, ACCESS_INT); 2854 t0 = tcg_temp_new(); 2855 t1 = tcg_const_i32(rS(ctx->opcode)); 2856 gen_addr_imm_index(ctx, t0, 0); 2857 gen_helper_stmw(cpu_env, t0, t1); 2858 tcg_temp_free(t0); 2859 tcg_temp_free_i32(t1); 2860 } 2861 2862 /*** Integer load and store strings ***/ 2863 2864 /* lswi */ 2865 /* PowerPC32 specification says we must generate an exception if 2866 * rA is in the range of registers to be loaded. 2867 * In an other hand, IBM says this is valid, but rA won't be loaded. 2868 * For now, I'll follow the spec... 2869 */ 2870 static void gen_lswi(DisasContext *ctx) 2871 { 2872 TCGv t0; 2873 TCGv_i32 t1, t2; 2874 int nb = NB(ctx->opcode); 2875 int start = rD(ctx->opcode); 2876 int ra = rA(ctx->opcode); 2877 int nr; 2878 2879 if (ctx->le_mode) { 2880 gen_align_no_le(ctx); 2881 return; 2882 } 2883 if (nb == 0) 2884 nb = 32; 2885 nr = DIV_ROUND_UP(nb, 4); 2886 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2887 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2888 return; 2889 } 2890 gen_set_access_type(ctx, ACCESS_INT); 2891 t0 = tcg_temp_new(); 2892 gen_addr_register(ctx, t0); 2893 t1 = tcg_const_i32(nb); 2894 t2 = tcg_const_i32(start); 2895 gen_helper_lsw(cpu_env, t0, t1, t2); 2896 tcg_temp_free(t0); 2897 tcg_temp_free_i32(t1); 2898 tcg_temp_free_i32(t2); 2899 } 2900 2901 /* lswx */ 2902 static void gen_lswx(DisasContext *ctx) 2903 { 2904 TCGv t0; 2905 TCGv_i32 t1, t2, t3; 2906 2907 if (ctx->le_mode) { 2908 gen_align_no_le(ctx); 2909 return; 2910 } 2911 gen_set_access_type(ctx, ACCESS_INT); 2912 t0 = tcg_temp_new(); 2913 gen_addr_reg_index(ctx, t0); 2914 t1 = tcg_const_i32(rD(ctx->opcode)); 2915 t2 = tcg_const_i32(rA(ctx->opcode)); 2916 t3 = tcg_const_i32(rB(ctx->opcode)); 2917 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 2918 tcg_temp_free(t0); 2919 tcg_temp_free_i32(t1); 2920 tcg_temp_free_i32(t2); 2921 tcg_temp_free_i32(t3); 2922 } 2923 2924 /* stswi */ 2925 static void gen_stswi(DisasContext *ctx) 2926 { 2927 TCGv t0; 2928 TCGv_i32 t1, t2; 2929 int nb = NB(ctx->opcode); 2930 2931 if (ctx->le_mode) { 2932 gen_align_no_le(ctx); 2933 return; 2934 } 2935 gen_set_access_type(ctx, ACCESS_INT); 2936 t0 = tcg_temp_new(); 2937 gen_addr_register(ctx, t0); 2938 if (nb == 0) 2939 nb = 32; 2940 t1 = tcg_const_i32(nb); 2941 t2 = tcg_const_i32(rS(ctx->opcode)); 2942 gen_helper_stsw(cpu_env, t0, t1, t2); 2943 tcg_temp_free(t0); 2944 tcg_temp_free_i32(t1); 2945 tcg_temp_free_i32(t2); 2946 } 2947 2948 /* stswx */ 2949 static void gen_stswx(DisasContext *ctx) 2950 { 2951 TCGv t0; 2952 TCGv_i32 t1, t2; 2953 2954 if (ctx->le_mode) { 2955 gen_align_no_le(ctx); 2956 return; 2957 } 2958 gen_set_access_type(ctx, ACCESS_INT); 2959 t0 = tcg_temp_new(); 2960 gen_addr_reg_index(ctx, t0); 2961 t1 = tcg_temp_new_i32(); 2962 tcg_gen_trunc_tl_i32(t1, cpu_xer); 2963 tcg_gen_andi_i32(t1, t1, 0x7F); 2964 t2 = tcg_const_i32(rS(ctx->opcode)); 2965 gen_helper_stsw(cpu_env, t0, t1, t2); 2966 tcg_temp_free(t0); 2967 tcg_temp_free_i32(t1); 2968 tcg_temp_free_i32(t2); 2969 } 2970 2971 /*** Memory synchronisation ***/ 2972 /* eieio */ 2973 static void gen_eieio(DisasContext *ctx) 2974 { 2975 tcg_gen_mb(TCG_MO_LD_ST | TCG_BAR_SC); 2976 } 2977 2978 #if !defined(CONFIG_USER_ONLY) 2979 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 2980 { 2981 TCGv_i32 t; 2982 TCGLabel *l; 2983 2984 if (!ctx->lazy_tlb_flush) { 2985 return; 2986 } 2987 l = gen_new_label(); 2988 t = tcg_temp_new_i32(); 2989 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 2990 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 2991 if (global) { 2992 gen_helper_check_tlb_flush_global(cpu_env); 2993 } else { 2994 gen_helper_check_tlb_flush_local(cpu_env); 2995 } 2996 gen_set_label(l); 2997 tcg_temp_free_i32(t); 2998 } 2999 #else 3000 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3001 #endif 3002 3003 /* isync */ 3004 static void gen_isync(DisasContext *ctx) 3005 { 3006 /* 3007 * We need to check for a pending TLB flush. This can only happen in 3008 * kernel mode however so check MSR_PR 3009 */ 3010 if (!ctx->pr) { 3011 gen_check_tlb_flush(ctx, false); 3012 } 3013 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3014 gen_stop_exception(ctx); 3015 } 3016 3017 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3018 3019 #define LARX(name, memop) \ 3020 static void gen_##name(DisasContext *ctx) \ 3021 { \ 3022 TCGv t0; \ 3023 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; \ 3024 int len = MEMOP_GET_SIZE(memop); \ 3025 gen_set_access_type(ctx, ACCESS_RES); \ 3026 t0 = tcg_temp_local_new(); \ 3027 gen_addr_reg_index(ctx, t0); \ 3028 if ((len) > 1) { \ 3029 gen_check_align(ctx, t0, (len)-1); \ 3030 } \ 3031 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop); \ 3032 tcg_gen_mov_tl(cpu_reserve, t0); \ 3033 tcg_gen_mov_tl(cpu_reserve_val, gpr); \ 3034 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); \ 3035 tcg_temp_free(t0); \ 3036 } 3037 3038 /* lwarx */ 3039 LARX(lbarx, DEF_MEMOP(MO_UB)) 3040 LARX(lharx, DEF_MEMOP(MO_UW)) 3041 LARX(lwarx, DEF_MEMOP(MO_UL)) 3042 3043 #define LD_ATOMIC(name, memop, tp, op, eop) \ 3044 static void gen_##name(DisasContext *ctx) \ 3045 { \ 3046 int len = MEMOP_GET_SIZE(memop); \ 3047 uint32_t gpr_FC = FC(ctx->opcode); \ 3048 TCGv EA = tcg_temp_local_new(); \ 3049 TCGv_##tp t0, t1; \ 3050 \ 3051 gen_addr_register(ctx, EA); \ 3052 if (len > 1) { \ 3053 gen_check_align(ctx, EA, len - 1); \ 3054 } \ 3055 t0 = tcg_temp_new_##tp(); \ 3056 t1 = tcg_temp_new_##tp(); \ 3057 tcg_gen_##op(t0, cpu_gpr[rD(ctx->opcode) + 1]); \ 3058 \ 3059 switch (gpr_FC) { \ 3060 case 0: /* Fetch and add */ \ 3061 tcg_gen_atomic_fetch_add_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3062 break; \ 3063 case 1: /* Fetch and xor */ \ 3064 tcg_gen_atomic_fetch_xor_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3065 break; \ 3066 case 2: /* Fetch and or */ \ 3067 tcg_gen_atomic_fetch_or_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3068 break; \ 3069 case 3: /* Fetch and 'and' */ \ 3070 tcg_gen_atomic_fetch_and_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3071 break; \ 3072 case 8: /* Swap */ \ 3073 tcg_gen_atomic_xchg_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3074 break; \ 3075 case 4: /* Fetch and max unsigned */ \ 3076 case 5: /* Fetch and max signed */ \ 3077 case 6: /* Fetch and min unsigned */ \ 3078 case 7: /* Fetch and min signed */ \ 3079 case 16: /* compare and swap not equal */ \ 3080 case 24: /* Fetch and increment bounded */ \ 3081 case 25: /* Fetch and increment equal */ \ 3082 case 28: /* Fetch and decrement bounded */ \ 3083 gen_invalid(ctx); \ 3084 break; \ 3085 default: \ 3086 /* invoke data storage error handler */ \ 3087 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); \ 3088 } \ 3089 tcg_gen_##eop(cpu_gpr[rD(ctx->opcode)], t1); \ 3090 tcg_temp_free_##tp(t0); \ 3091 tcg_temp_free_##tp(t1); \ 3092 tcg_temp_free(EA); \ 3093 } 3094 3095 LD_ATOMIC(lwat, DEF_MEMOP(MO_UL), i32, trunc_tl_i32, extu_i32_tl) 3096 #if defined(TARGET_PPC64) 3097 LD_ATOMIC(ldat, DEF_MEMOP(MO_Q), i64, mov_i64, mov_i64) 3098 #endif 3099 3100 #define ST_ATOMIC(name, memop, tp, op) \ 3101 static void gen_##name(DisasContext *ctx) \ 3102 { \ 3103 int len = MEMOP_GET_SIZE(memop); \ 3104 uint32_t gpr_FC = FC(ctx->opcode); \ 3105 TCGv EA = tcg_temp_local_new(); \ 3106 TCGv_##tp t0, t1; \ 3107 \ 3108 gen_addr_register(ctx, EA); \ 3109 if (len > 1) { \ 3110 gen_check_align(ctx, EA, len - 1); \ 3111 } \ 3112 t0 = tcg_temp_new_##tp(); \ 3113 t1 = tcg_temp_new_##tp(); \ 3114 tcg_gen_##op(t0, cpu_gpr[rD(ctx->opcode) + 1]); \ 3115 \ 3116 switch (gpr_FC) { \ 3117 case 0: /* add and Store */ \ 3118 tcg_gen_atomic_add_fetch_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3119 break; \ 3120 case 1: /* xor and Store */ \ 3121 tcg_gen_atomic_xor_fetch_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3122 break; \ 3123 case 2: /* Or and Store */ \ 3124 tcg_gen_atomic_or_fetch_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3125 break; \ 3126 case 3: /* 'and' and Store */ \ 3127 tcg_gen_atomic_and_fetch_##tp(t1, EA, t0, ctx->mem_idx, memop); \ 3128 break; \ 3129 case 4: /* Store max unsigned */ \ 3130 case 5: /* Store max signed */ \ 3131 case 6: /* Store min unsigned */ \ 3132 case 7: /* Store min signed */ \ 3133 case 24: /* Store twin */ \ 3134 gen_invalid(ctx); \ 3135 break; \ 3136 default: \ 3137 /* invoke data storage error handler */ \ 3138 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); \ 3139 } \ 3140 tcg_temp_free_##tp(t0); \ 3141 tcg_temp_free_##tp(t1); \ 3142 tcg_temp_free(EA); \ 3143 } 3144 3145 ST_ATOMIC(stwat, DEF_MEMOP(MO_UL), i32, trunc_tl_i32) 3146 #if defined(TARGET_PPC64) 3147 ST_ATOMIC(stdat, DEF_MEMOP(MO_Q), i64, mov_i64) 3148 #endif 3149 3150 #if defined(CONFIG_USER_ONLY) 3151 static void gen_conditional_store(DisasContext *ctx, TCGv EA, 3152 int reg, int memop) 3153 { 3154 TCGv t0 = tcg_temp_new(); 3155 3156 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUPPCState, reserve_ea)); 3157 tcg_gen_movi_tl(t0, (MEMOP_GET_SIZE(memop) << 5) | reg); 3158 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, reserve_info)); 3159 tcg_temp_free(t0); 3160 gen_exception_err(ctx, POWERPC_EXCP_STCX, 0); 3161 } 3162 #else 3163 static void gen_conditional_store(DisasContext *ctx, TCGv EA, 3164 int reg, int memop) 3165 { 3166 TCGLabel *l1 = gen_new_label(); 3167 TCGLabel *l2 = gen_new_label(); 3168 TCGv t0; 3169 3170 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1); 3171 3172 t0 = tcg_temp_new(); 3173 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3174 cpu_gpr[reg], ctx->mem_idx, 3175 DEF_MEMOP(memop) | MO_ALIGN); 3176 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3177 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3178 tcg_gen_or_tl(t0, t0, cpu_so); 3179 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3180 tcg_temp_free(t0); 3181 tcg_gen_br(l2); 3182 3183 gen_set_label(l1); 3184 3185 /* Address mismatch implies failure. But we still need to provide the 3186 memory barrier semantics of the instruction. */ 3187 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3188 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3189 3190 gen_set_label(l2); 3191 tcg_gen_movi_tl(cpu_reserve, -1); 3192 } 3193 #endif 3194 3195 #define STCX(name, memop) \ 3196 static void gen_##name(DisasContext *ctx) \ 3197 { \ 3198 TCGv t0; \ 3199 int len = MEMOP_GET_SIZE(memop); \ 3200 gen_set_access_type(ctx, ACCESS_RES); \ 3201 t0 = tcg_temp_local_new(); \ 3202 gen_addr_reg_index(ctx, t0); \ 3203 if (len > 1) { \ 3204 gen_check_align(ctx, t0, (len) - 1); \ 3205 } \ 3206 gen_conditional_store(ctx, t0, rS(ctx->opcode), memop); \ 3207 tcg_temp_free(t0); \ 3208 } 3209 3210 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3211 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3212 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3213 3214 #if defined(TARGET_PPC64) 3215 /* ldarx */ 3216 LARX(ldarx, DEF_MEMOP(MO_Q)) 3217 /* stdcx. */ 3218 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3219 3220 /* lqarx */ 3221 static void gen_lqarx(DisasContext *ctx) 3222 { 3223 TCGv EA; 3224 int rd = rD(ctx->opcode); 3225 TCGv gpr1, gpr2; 3226 3227 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3228 (rd == rB(ctx->opcode)))) { 3229 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3230 return; 3231 } 3232 3233 gen_set_access_type(ctx, ACCESS_RES); 3234 EA = tcg_temp_local_new(); 3235 gen_addr_reg_index(ctx, EA); 3236 gen_check_align(ctx, EA, 15); 3237 if (unlikely(ctx->le_mode)) { 3238 gpr1 = cpu_gpr[rd+1]; 3239 gpr2 = cpu_gpr[rd]; 3240 } else { 3241 gpr1 = cpu_gpr[rd]; 3242 gpr2 = cpu_gpr[rd+1]; 3243 } 3244 tcg_gen_qemu_ld_i64(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3245 tcg_gen_mov_tl(cpu_reserve, EA); 3246 gen_addr_add(ctx, EA, EA, 8); 3247 tcg_gen_qemu_ld_i64(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3248 3249 tcg_gen_st_tl(gpr1, cpu_env, offsetof(CPUPPCState, reserve_val)); 3250 tcg_gen_st_tl(gpr2, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3251 tcg_temp_free(EA); 3252 } 3253 3254 /* stqcx. */ 3255 static void gen_stqcx_(DisasContext *ctx) 3256 { 3257 TCGv EA; 3258 int reg = rS(ctx->opcode); 3259 int len = 16; 3260 #if !defined(CONFIG_USER_ONLY) 3261 TCGLabel *l1; 3262 TCGv gpr1, gpr2; 3263 #endif 3264 3265 if (unlikely((rD(ctx->opcode) & 1))) { 3266 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3267 return; 3268 } 3269 gen_set_access_type(ctx, ACCESS_RES); 3270 EA = tcg_temp_local_new(); 3271 gen_addr_reg_index(ctx, EA); 3272 if (len > 1) { 3273 gen_check_align(ctx, EA, (len) - 1); 3274 } 3275 3276 #if defined(CONFIG_USER_ONLY) 3277 gen_conditional_store(ctx, EA, reg, 16); 3278 #else 3279 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3280 l1 = gen_new_label(); 3281 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1); 3282 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 3283 3284 if (unlikely(ctx->le_mode)) { 3285 gpr1 = cpu_gpr[reg + 1]; 3286 gpr2 = cpu_gpr[reg]; 3287 } else { 3288 gpr1 = cpu_gpr[reg]; 3289 gpr2 = cpu_gpr[reg + 1]; 3290 } 3291 tcg_gen_qemu_st_tl(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3292 gen_addr_add(ctx, EA, EA, 8); 3293 tcg_gen_qemu_st_tl(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3294 3295 gen_set_label(l1); 3296 tcg_gen_movi_tl(cpu_reserve, -1); 3297 #endif 3298 tcg_temp_free(EA); 3299 } 3300 3301 #endif /* defined(TARGET_PPC64) */ 3302 3303 /* sync */ 3304 static void gen_sync(DisasContext *ctx) 3305 { 3306 uint32_t l = (ctx->opcode >> 21) & 3; 3307 3308 /* 3309 * We may need to check for a pending TLB flush. 3310 * 3311 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3312 * 3313 * Additionally, this can only happen in kernel mode however so 3314 * check MSR_PR as well. 3315 */ 3316 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3317 gen_check_tlb_flush(ctx, true); 3318 } 3319 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3320 } 3321 3322 /* wait */ 3323 static void gen_wait(DisasContext *ctx) 3324 { 3325 TCGv_i32 t0 = tcg_const_i32(1); 3326 tcg_gen_st_i32(t0, cpu_env, 3327 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3328 tcg_temp_free_i32(t0); 3329 /* Stop translation, as the CPU is supposed to sleep from now */ 3330 gen_exception_nip(ctx, EXCP_HLT, ctx->nip); 3331 } 3332 3333 #if defined(TARGET_PPC64) 3334 static void gen_doze(DisasContext *ctx) 3335 { 3336 #if defined(CONFIG_USER_ONLY) 3337 GEN_PRIV; 3338 #else 3339 TCGv_i32 t; 3340 3341 CHK_HV; 3342 t = tcg_const_i32(PPC_PM_DOZE); 3343 gen_helper_pminsn(cpu_env, t); 3344 tcg_temp_free_i32(t); 3345 gen_stop_exception(ctx); 3346 #endif /* defined(CONFIG_USER_ONLY) */ 3347 } 3348 3349 static void gen_nap(DisasContext *ctx) 3350 { 3351 #if defined(CONFIG_USER_ONLY) 3352 GEN_PRIV; 3353 #else 3354 TCGv_i32 t; 3355 3356 CHK_HV; 3357 t = tcg_const_i32(PPC_PM_NAP); 3358 gen_helper_pminsn(cpu_env, t); 3359 tcg_temp_free_i32(t); 3360 gen_stop_exception(ctx); 3361 #endif /* defined(CONFIG_USER_ONLY) */ 3362 } 3363 3364 static void gen_stop(DisasContext *ctx) 3365 { 3366 gen_nap(ctx); 3367 } 3368 3369 static void gen_sleep(DisasContext *ctx) 3370 { 3371 #if defined(CONFIG_USER_ONLY) 3372 GEN_PRIV; 3373 #else 3374 TCGv_i32 t; 3375 3376 CHK_HV; 3377 t = tcg_const_i32(PPC_PM_SLEEP); 3378 gen_helper_pminsn(cpu_env, t); 3379 tcg_temp_free_i32(t); 3380 gen_stop_exception(ctx); 3381 #endif /* defined(CONFIG_USER_ONLY) */ 3382 } 3383 3384 static void gen_rvwinkle(DisasContext *ctx) 3385 { 3386 #if defined(CONFIG_USER_ONLY) 3387 GEN_PRIV; 3388 #else 3389 TCGv_i32 t; 3390 3391 CHK_HV; 3392 t = tcg_const_i32(PPC_PM_RVWINKLE); 3393 gen_helper_pminsn(cpu_env, t); 3394 tcg_temp_free_i32(t); 3395 gen_stop_exception(ctx); 3396 #endif /* defined(CONFIG_USER_ONLY) */ 3397 } 3398 #endif /* #if defined(TARGET_PPC64) */ 3399 3400 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3401 { 3402 #if defined(TARGET_PPC64) 3403 if (ctx->has_cfar) 3404 tcg_gen_movi_tl(cpu_cfar, nip); 3405 #endif 3406 } 3407 3408 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3409 { 3410 if (unlikely(ctx->singlestep_enabled)) { 3411 return false; 3412 } 3413 3414 #ifndef CONFIG_USER_ONLY 3415 return (ctx->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3416 #else 3417 return true; 3418 #endif 3419 } 3420 3421 /*** Branch ***/ 3422 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3423 { 3424 if (NARROW_MODE(ctx)) { 3425 dest = (uint32_t) dest; 3426 } 3427 if (use_goto_tb(ctx, dest)) { 3428 tcg_gen_goto_tb(n); 3429 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3430 tcg_gen_exit_tb((uintptr_t)ctx->tb + n); 3431 } else { 3432 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3433 if (unlikely(ctx->singlestep_enabled)) { 3434 if ((ctx->singlestep_enabled & 3435 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) && 3436 (ctx->exception == POWERPC_EXCP_BRANCH || 3437 ctx->exception == POWERPC_EXCP_TRACE)) { 3438 gen_exception_nip(ctx, POWERPC_EXCP_TRACE, dest); 3439 } 3440 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) { 3441 gen_debug_exception(ctx); 3442 } 3443 } 3444 tcg_gen_lookup_and_goto_ptr(); 3445 } 3446 } 3447 3448 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3449 { 3450 if (NARROW_MODE(ctx)) { 3451 nip = (uint32_t)nip; 3452 } 3453 tcg_gen_movi_tl(cpu_lr, nip); 3454 } 3455 3456 /* b ba bl bla */ 3457 static void gen_b(DisasContext *ctx) 3458 { 3459 target_ulong li, target; 3460 3461 ctx->exception = POWERPC_EXCP_BRANCH; 3462 /* sign extend LI */ 3463 li = LI(ctx->opcode); 3464 li = (li ^ 0x02000000) - 0x02000000; 3465 if (likely(AA(ctx->opcode) == 0)) { 3466 target = ctx->nip + li - 4; 3467 } else { 3468 target = li; 3469 } 3470 if (LK(ctx->opcode)) { 3471 gen_setlr(ctx, ctx->nip); 3472 } 3473 gen_update_cfar(ctx, ctx->nip - 4); 3474 gen_goto_tb(ctx, 0, target); 3475 } 3476 3477 #define BCOND_IM 0 3478 #define BCOND_LR 1 3479 #define BCOND_CTR 2 3480 #define BCOND_TAR 3 3481 3482 static void gen_bcond(DisasContext *ctx, int type) 3483 { 3484 uint32_t bo = BO(ctx->opcode); 3485 TCGLabel *l1; 3486 TCGv target; 3487 3488 ctx->exception = POWERPC_EXCP_BRANCH; 3489 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3490 target = tcg_temp_local_new(); 3491 if (type == BCOND_CTR) 3492 tcg_gen_mov_tl(target, cpu_ctr); 3493 else if (type == BCOND_TAR) 3494 gen_load_spr(target, SPR_TAR); 3495 else 3496 tcg_gen_mov_tl(target, cpu_lr); 3497 } else { 3498 target = NULL; 3499 } 3500 if (LK(ctx->opcode)) 3501 gen_setlr(ctx, ctx->nip); 3502 l1 = gen_new_label(); 3503 if ((bo & 0x4) == 0) { 3504 /* Decrement and test CTR */ 3505 TCGv temp = tcg_temp_new(); 3506 if (unlikely(type == BCOND_CTR)) { 3507 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3508 return; 3509 } 3510 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3511 if (NARROW_MODE(ctx)) { 3512 tcg_gen_ext32u_tl(temp, cpu_ctr); 3513 } else { 3514 tcg_gen_mov_tl(temp, cpu_ctr); 3515 } 3516 if (bo & 0x2) { 3517 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3518 } else { 3519 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3520 } 3521 tcg_temp_free(temp); 3522 } 3523 if ((bo & 0x10) == 0) { 3524 /* Test CR */ 3525 uint32_t bi = BI(ctx->opcode); 3526 uint32_t mask = 0x08 >> (bi & 0x03); 3527 TCGv_i32 temp = tcg_temp_new_i32(); 3528 3529 if (bo & 0x8) { 3530 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3531 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3532 } else { 3533 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3534 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3535 } 3536 tcg_temp_free_i32(temp); 3537 } 3538 gen_update_cfar(ctx, ctx->nip - 4); 3539 if (type == BCOND_IM) { 3540 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3541 if (likely(AA(ctx->opcode) == 0)) { 3542 gen_goto_tb(ctx, 0, ctx->nip + li - 4); 3543 } else { 3544 gen_goto_tb(ctx, 0, li); 3545 } 3546 } else { 3547 if (NARROW_MODE(ctx)) { 3548 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3549 } else { 3550 tcg_gen_andi_tl(cpu_nip, target, ~3); 3551 } 3552 tcg_gen_lookup_and_goto_ptr(); 3553 tcg_temp_free(target); 3554 } 3555 if ((bo & 0x14) != 0x14) { 3556 gen_set_label(l1); 3557 gen_goto_tb(ctx, 1, ctx->nip); 3558 } 3559 } 3560 3561 static void gen_bc(DisasContext *ctx) 3562 { 3563 gen_bcond(ctx, BCOND_IM); 3564 } 3565 3566 static void gen_bcctr(DisasContext *ctx) 3567 { 3568 gen_bcond(ctx, BCOND_CTR); 3569 } 3570 3571 static void gen_bclr(DisasContext *ctx) 3572 { 3573 gen_bcond(ctx, BCOND_LR); 3574 } 3575 3576 static void gen_bctar(DisasContext *ctx) 3577 { 3578 gen_bcond(ctx, BCOND_TAR); 3579 } 3580 3581 /*** Condition register logical ***/ 3582 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3583 static void glue(gen_, name)(DisasContext *ctx) \ 3584 { \ 3585 uint8_t bitmask; \ 3586 int sh; \ 3587 TCGv_i32 t0, t1; \ 3588 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3589 t0 = tcg_temp_new_i32(); \ 3590 if (sh > 0) \ 3591 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3592 else if (sh < 0) \ 3593 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3594 else \ 3595 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3596 t1 = tcg_temp_new_i32(); \ 3597 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3598 if (sh > 0) \ 3599 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3600 else if (sh < 0) \ 3601 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3602 else \ 3603 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3604 tcg_op(t0, t0, t1); \ 3605 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3606 tcg_gen_andi_i32(t0, t0, bitmask); \ 3607 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3608 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3609 tcg_temp_free_i32(t0); \ 3610 tcg_temp_free_i32(t1); \ 3611 } 3612 3613 /* crand */ 3614 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3615 /* crandc */ 3616 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3617 /* creqv */ 3618 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3619 /* crnand */ 3620 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3621 /* crnor */ 3622 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3623 /* cror */ 3624 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3625 /* crorc */ 3626 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3627 /* crxor */ 3628 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3629 3630 /* mcrf */ 3631 static void gen_mcrf(DisasContext *ctx) 3632 { 3633 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3634 } 3635 3636 /*** System linkage ***/ 3637 3638 /* rfi (supervisor only) */ 3639 static void gen_rfi(DisasContext *ctx) 3640 { 3641 #if defined(CONFIG_USER_ONLY) 3642 GEN_PRIV; 3643 #else 3644 /* This instruction doesn't exist anymore on 64-bit server 3645 * processors compliant with arch 2.x 3646 */ 3647 if (ctx->insns_flags & PPC_SEGMENT_64B) { 3648 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3649 return; 3650 } 3651 /* Restore CPU state */ 3652 CHK_SV; 3653 gen_update_cfar(ctx, ctx->nip - 4); 3654 gen_helper_rfi(cpu_env); 3655 gen_sync_exception(ctx); 3656 #endif 3657 } 3658 3659 #if defined(TARGET_PPC64) 3660 static void gen_rfid(DisasContext *ctx) 3661 { 3662 #if defined(CONFIG_USER_ONLY) 3663 GEN_PRIV; 3664 #else 3665 /* Restore CPU state */ 3666 CHK_SV; 3667 gen_update_cfar(ctx, ctx->nip - 4); 3668 gen_helper_rfid(cpu_env); 3669 gen_sync_exception(ctx); 3670 #endif 3671 } 3672 3673 static void gen_hrfid(DisasContext *ctx) 3674 { 3675 #if defined(CONFIG_USER_ONLY) 3676 GEN_PRIV; 3677 #else 3678 /* Restore CPU state */ 3679 CHK_HV; 3680 gen_helper_hrfid(cpu_env); 3681 gen_sync_exception(ctx); 3682 #endif 3683 } 3684 #endif 3685 3686 /* sc */ 3687 #if defined(CONFIG_USER_ONLY) 3688 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3689 #else 3690 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3691 #endif 3692 static void gen_sc(DisasContext *ctx) 3693 { 3694 uint32_t lev; 3695 3696 lev = (ctx->opcode >> 5) & 0x7F; 3697 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3698 } 3699 3700 /*** Trap ***/ 3701 3702 /* Check for unconditional traps (always or never) */ 3703 static bool check_unconditional_trap(DisasContext *ctx) 3704 { 3705 /* Trap never */ 3706 if (TO(ctx->opcode) == 0) { 3707 return true; 3708 } 3709 /* Trap always */ 3710 if (TO(ctx->opcode) == 31) { 3711 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 3712 return true; 3713 } 3714 return false; 3715 } 3716 3717 /* tw */ 3718 static void gen_tw(DisasContext *ctx) 3719 { 3720 TCGv_i32 t0; 3721 3722 if (check_unconditional_trap(ctx)) { 3723 return; 3724 } 3725 t0 = tcg_const_i32(TO(ctx->opcode)); 3726 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3727 t0); 3728 tcg_temp_free_i32(t0); 3729 } 3730 3731 /* twi */ 3732 static void gen_twi(DisasContext *ctx) 3733 { 3734 TCGv t0; 3735 TCGv_i32 t1; 3736 3737 if (check_unconditional_trap(ctx)) { 3738 return; 3739 } 3740 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3741 t1 = tcg_const_i32(TO(ctx->opcode)); 3742 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3743 tcg_temp_free(t0); 3744 tcg_temp_free_i32(t1); 3745 } 3746 3747 #if defined(TARGET_PPC64) 3748 /* td */ 3749 static void gen_td(DisasContext *ctx) 3750 { 3751 TCGv_i32 t0; 3752 3753 if (check_unconditional_trap(ctx)) { 3754 return; 3755 } 3756 t0 = tcg_const_i32(TO(ctx->opcode)); 3757 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3758 t0); 3759 tcg_temp_free_i32(t0); 3760 } 3761 3762 /* tdi */ 3763 static void gen_tdi(DisasContext *ctx) 3764 { 3765 TCGv t0; 3766 TCGv_i32 t1; 3767 3768 if (check_unconditional_trap(ctx)) { 3769 return; 3770 } 3771 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3772 t1 = tcg_const_i32(TO(ctx->opcode)); 3773 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3774 tcg_temp_free(t0); 3775 tcg_temp_free_i32(t1); 3776 } 3777 #endif 3778 3779 /*** Processor control ***/ 3780 3781 static void gen_read_xer(DisasContext *ctx, TCGv dst) 3782 { 3783 TCGv t0 = tcg_temp_new(); 3784 TCGv t1 = tcg_temp_new(); 3785 TCGv t2 = tcg_temp_new(); 3786 tcg_gen_mov_tl(dst, cpu_xer); 3787 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 3788 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 3789 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 3790 tcg_gen_or_tl(t0, t0, t1); 3791 tcg_gen_or_tl(dst, dst, t2); 3792 tcg_gen_or_tl(dst, dst, t0); 3793 if (is_isa300(ctx)) { 3794 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 3795 tcg_gen_or_tl(dst, dst, t0); 3796 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 3797 tcg_gen_or_tl(dst, dst, t0); 3798 } 3799 tcg_temp_free(t0); 3800 tcg_temp_free(t1); 3801 tcg_temp_free(t2); 3802 } 3803 3804 static void gen_write_xer(TCGv src) 3805 { 3806 /* Write all flags, while reading back check for isa300 */ 3807 tcg_gen_andi_tl(cpu_xer, src, 3808 ~((1u << XER_SO) | 3809 (1u << XER_OV) | (1u << XER_OV32) | 3810 (1u << XER_CA) | (1u << XER_CA32))); 3811 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 3812 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 3813 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 3814 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 3815 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 3816 } 3817 3818 /* mcrxr */ 3819 static void gen_mcrxr(DisasContext *ctx) 3820 { 3821 TCGv_i32 t0 = tcg_temp_new_i32(); 3822 TCGv_i32 t1 = tcg_temp_new_i32(); 3823 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 3824 3825 tcg_gen_trunc_tl_i32(t0, cpu_so); 3826 tcg_gen_trunc_tl_i32(t1, cpu_ov); 3827 tcg_gen_trunc_tl_i32(dst, cpu_ca); 3828 tcg_gen_shli_i32(t0, t0, 3); 3829 tcg_gen_shli_i32(t1, t1, 2); 3830 tcg_gen_shli_i32(dst, dst, 1); 3831 tcg_gen_or_i32(dst, dst, t0); 3832 tcg_gen_or_i32(dst, dst, t1); 3833 tcg_temp_free_i32(t0); 3834 tcg_temp_free_i32(t1); 3835 3836 tcg_gen_movi_tl(cpu_so, 0); 3837 tcg_gen_movi_tl(cpu_ov, 0); 3838 tcg_gen_movi_tl(cpu_ca, 0); 3839 } 3840 3841 #ifdef TARGET_PPC64 3842 /* mcrxrx */ 3843 static void gen_mcrxrx(DisasContext *ctx) 3844 { 3845 TCGv t0 = tcg_temp_new(); 3846 TCGv t1 = tcg_temp_new(); 3847 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 3848 3849 /* copy OV and OV32 */ 3850 tcg_gen_shli_tl(t0, cpu_ov, 1); 3851 tcg_gen_or_tl(t0, t0, cpu_ov32); 3852 tcg_gen_shli_tl(t0, t0, 2); 3853 /* copy CA and CA32 */ 3854 tcg_gen_shli_tl(t1, cpu_ca, 1); 3855 tcg_gen_or_tl(t1, t1, cpu_ca32); 3856 tcg_gen_or_tl(t0, t0, t1); 3857 tcg_gen_trunc_tl_i32(dst, t0); 3858 tcg_temp_free(t0); 3859 tcg_temp_free(t1); 3860 } 3861 #endif 3862 3863 /* mfcr mfocrf */ 3864 static void gen_mfcr(DisasContext *ctx) 3865 { 3866 uint32_t crm, crn; 3867 3868 if (likely(ctx->opcode & 0x00100000)) { 3869 crm = CRM(ctx->opcode); 3870 if (likely(crm && ((crm & (crm - 1)) == 0))) { 3871 crn = ctz32 (crm); 3872 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 3873 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 3874 cpu_gpr[rD(ctx->opcode)], crn * 4); 3875 } 3876 } else { 3877 TCGv_i32 t0 = tcg_temp_new_i32(); 3878 tcg_gen_mov_i32(t0, cpu_crf[0]); 3879 tcg_gen_shli_i32(t0, t0, 4); 3880 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 3881 tcg_gen_shli_i32(t0, t0, 4); 3882 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 3883 tcg_gen_shli_i32(t0, t0, 4); 3884 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 3885 tcg_gen_shli_i32(t0, t0, 4); 3886 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 3887 tcg_gen_shli_i32(t0, t0, 4); 3888 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 3889 tcg_gen_shli_i32(t0, t0, 4); 3890 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 3891 tcg_gen_shli_i32(t0, t0, 4); 3892 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 3893 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 3894 tcg_temp_free_i32(t0); 3895 } 3896 } 3897 3898 /* mfmsr */ 3899 static void gen_mfmsr(DisasContext *ctx) 3900 { 3901 CHK_SV; 3902 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 3903 } 3904 3905 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 3906 { 3907 #if 0 3908 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 3909 printf("ERROR: try to access SPR %d !\n", sprn); 3910 #endif 3911 } 3912 #define SPR_NOACCESS (&spr_noaccess) 3913 3914 /* mfspr */ 3915 static inline void gen_op_mfspr(DisasContext *ctx) 3916 { 3917 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 3918 uint32_t sprn = SPR(ctx->opcode); 3919 3920 #if defined(CONFIG_USER_ONLY) 3921 read_cb = ctx->spr_cb[sprn].uea_read; 3922 #else 3923 if (ctx->pr) { 3924 read_cb = ctx->spr_cb[sprn].uea_read; 3925 } else if (ctx->hv) { 3926 read_cb = ctx->spr_cb[sprn].hea_read; 3927 } else { 3928 read_cb = ctx->spr_cb[sprn].oea_read; 3929 } 3930 #endif 3931 if (likely(read_cb != NULL)) { 3932 if (likely(read_cb != SPR_NOACCESS)) { 3933 (*read_cb)(ctx, rD(ctx->opcode), sprn); 3934 } else { 3935 /* Privilege exception */ 3936 /* This is a hack to avoid warnings when running Linux: 3937 * this OS breaks the PowerPC virtualisation model, 3938 * allowing userland application to read the PVR 3939 */ 3940 if (sprn != SPR_PVR) { 3941 fprintf(stderr, "Trying to read privileged spr %d (0x%03x) at " 3942 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3943 if (qemu_log_separate()) { 3944 qemu_log("Trying to read privileged spr %d (0x%03x) at " 3945 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3946 } 3947 } 3948 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 3949 } 3950 } else { 3951 /* ISA 2.07 defines these as no-ops */ 3952 if ((ctx->insns_flags2 & PPC2_ISA207S) && 3953 (sprn >= 808 && sprn <= 811)) { 3954 /* This is a nop */ 3955 return; 3956 } 3957 /* Not defined */ 3958 fprintf(stderr, "Trying to read invalid spr %d (0x%03x) at " 3959 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3960 if (qemu_log_separate()) { 3961 qemu_log("Trying to read invalid spr %d (0x%03x) at " 3962 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3963 } 3964 3965 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 3966 * it can generate a priv, a hv emu or a no-op 3967 */ 3968 if (sprn & 0x10) { 3969 if (ctx->pr) { 3970 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 3971 } 3972 } else { 3973 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 3974 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 3975 } 3976 } 3977 } 3978 } 3979 3980 static void gen_mfspr(DisasContext *ctx) 3981 { 3982 gen_op_mfspr(ctx); 3983 } 3984 3985 /* mftb */ 3986 static void gen_mftb(DisasContext *ctx) 3987 { 3988 gen_op_mfspr(ctx); 3989 } 3990 3991 /* mtcrf mtocrf*/ 3992 static void gen_mtcrf(DisasContext *ctx) 3993 { 3994 uint32_t crm, crn; 3995 3996 crm = CRM(ctx->opcode); 3997 if (likely((ctx->opcode & 0x00100000))) { 3998 if (crm && ((crm & (crm - 1)) == 0)) { 3999 TCGv_i32 temp = tcg_temp_new_i32(); 4000 crn = ctz32 (crm); 4001 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4002 tcg_gen_shri_i32(temp, temp, crn * 4); 4003 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4004 tcg_temp_free_i32(temp); 4005 } 4006 } else { 4007 TCGv_i32 temp = tcg_temp_new_i32(); 4008 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4009 for (crn = 0 ; crn < 8 ; crn++) { 4010 if (crm & (1 << crn)) { 4011 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4012 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4013 } 4014 } 4015 tcg_temp_free_i32(temp); 4016 } 4017 } 4018 4019 /* mtmsr */ 4020 #if defined(TARGET_PPC64) 4021 static void gen_mtmsrd(DisasContext *ctx) 4022 { 4023 CHK_SV; 4024 4025 #if !defined(CONFIG_USER_ONLY) 4026 if (ctx->opcode & 0x00010000) { 4027 /* Special form that does not need any synchronisation */ 4028 TCGv t0 = tcg_temp_new(); 4029 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4030 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4031 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4032 tcg_temp_free(t0); 4033 } else { 4034 /* XXX: we need to update nip before the store 4035 * if we enter power saving mode, we will exit the loop 4036 * directly from ppc_store_msr 4037 */ 4038 gen_update_nip(ctx, ctx->nip); 4039 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4040 /* Must stop the translation as machine state (may have) changed */ 4041 /* Note that mtmsr is not always defined as context-synchronizing */ 4042 gen_stop_exception(ctx); 4043 } 4044 #endif /* !defined(CONFIG_USER_ONLY) */ 4045 } 4046 #endif /* defined(TARGET_PPC64) */ 4047 4048 static void gen_mtmsr(DisasContext *ctx) 4049 { 4050 CHK_SV; 4051 4052 #if !defined(CONFIG_USER_ONLY) 4053 if (ctx->opcode & 0x00010000) { 4054 /* Special form that does not need any synchronisation */ 4055 TCGv t0 = tcg_temp_new(); 4056 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4057 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4058 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4059 tcg_temp_free(t0); 4060 } else { 4061 TCGv msr = tcg_temp_new(); 4062 4063 /* XXX: we need to update nip before the store 4064 * if we enter power saving mode, we will exit the loop 4065 * directly from ppc_store_msr 4066 */ 4067 gen_update_nip(ctx, ctx->nip); 4068 #if defined(TARGET_PPC64) 4069 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4070 #else 4071 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4072 #endif 4073 gen_helper_store_msr(cpu_env, msr); 4074 tcg_temp_free(msr); 4075 /* Must stop the translation as machine state (may have) changed */ 4076 /* Note that mtmsr is not always defined as context-synchronizing */ 4077 gen_stop_exception(ctx); 4078 } 4079 #endif 4080 } 4081 4082 /* mtspr */ 4083 static void gen_mtspr(DisasContext *ctx) 4084 { 4085 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4086 uint32_t sprn = SPR(ctx->opcode); 4087 4088 #if defined(CONFIG_USER_ONLY) 4089 write_cb = ctx->spr_cb[sprn].uea_write; 4090 #else 4091 if (ctx->pr) { 4092 write_cb = ctx->spr_cb[sprn].uea_write; 4093 } else if (ctx->hv) { 4094 write_cb = ctx->spr_cb[sprn].hea_write; 4095 } else { 4096 write_cb = ctx->spr_cb[sprn].oea_write; 4097 } 4098 #endif 4099 if (likely(write_cb != NULL)) { 4100 if (likely(write_cb != SPR_NOACCESS)) { 4101 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4102 } else { 4103 /* Privilege exception */ 4104 fprintf(stderr, "Trying to write privileged spr %d (0x%03x) at " 4105 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4106 if (qemu_log_separate()) { 4107 qemu_log("Trying to write privileged spr %d (0x%03x) at " 4108 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4109 } 4110 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4111 } 4112 } else { 4113 /* ISA 2.07 defines these as no-ops */ 4114 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4115 (sprn >= 808 && sprn <= 811)) { 4116 /* This is a nop */ 4117 return; 4118 } 4119 4120 /* Not defined */ 4121 if (qemu_log_separate()) { 4122 qemu_log("Trying to write invalid spr %d (0x%03x) at " 4123 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4124 } 4125 fprintf(stderr, "Trying to write invalid spr %d (0x%03x) at " 4126 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4127 4128 4129 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4130 * it can generate a priv, a hv emu or a no-op 4131 */ 4132 if (sprn & 0x10) { 4133 if (ctx->pr) { 4134 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4135 } 4136 } else { 4137 if (ctx->pr || sprn == 0) { 4138 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4139 } 4140 } 4141 } 4142 } 4143 4144 #if defined(TARGET_PPC64) 4145 /* setb */ 4146 static void gen_setb(DisasContext *ctx) 4147 { 4148 TCGv_i32 t0 = tcg_temp_new_i32(); 4149 TCGv_i32 t8 = tcg_temp_new_i32(); 4150 TCGv_i32 tm1 = tcg_temp_new_i32(); 4151 int crf = crfS(ctx->opcode); 4152 4153 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4154 tcg_gen_movi_i32(t8, 8); 4155 tcg_gen_movi_i32(tm1, -1); 4156 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4157 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4158 4159 tcg_temp_free_i32(t0); 4160 tcg_temp_free_i32(t8); 4161 tcg_temp_free_i32(tm1); 4162 } 4163 #endif 4164 4165 /*** Cache management ***/ 4166 4167 /* dcbf */ 4168 static void gen_dcbf(DisasContext *ctx) 4169 { 4170 /* XXX: specification says this is treated as a load by the MMU */ 4171 TCGv t0; 4172 gen_set_access_type(ctx, ACCESS_CACHE); 4173 t0 = tcg_temp_new(); 4174 gen_addr_reg_index(ctx, t0); 4175 gen_qemu_ld8u(ctx, t0, t0); 4176 tcg_temp_free(t0); 4177 } 4178 4179 /* dcbi (Supervisor only) */ 4180 static void gen_dcbi(DisasContext *ctx) 4181 { 4182 #if defined(CONFIG_USER_ONLY) 4183 GEN_PRIV; 4184 #else 4185 TCGv EA, val; 4186 4187 CHK_SV; 4188 EA = tcg_temp_new(); 4189 gen_set_access_type(ctx, ACCESS_CACHE); 4190 gen_addr_reg_index(ctx, EA); 4191 val = tcg_temp_new(); 4192 /* XXX: specification says this should be treated as a store by the MMU */ 4193 gen_qemu_ld8u(ctx, val, EA); 4194 gen_qemu_st8(ctx, val, EA); 4195 tcg_temp_free(val); 4196 tcg_temp_free(EA); 4197 #endif /* defined(CONFIG_USER_ONLY) */ 4198 } 4199 4200 /* dcdst */ 4201 static void gen_dcbst(DisasContext *ctx) 4202 { 4203 /* XXX: specification say this is treated as a load by the MMU */ 4204 TCGv t0; 4205 gen_set_access_type(ctx, ACCESS_CACHE); 4206 t0 = tcg_temp_new(); 4207 gen_addr_reg_index(ctx, t0); 4208 gen_qemu_ld8u(ctx, t0, t0); 4209 tcg_temp_free(t0); 4210 } 4211 4212 /* dcbt */ 4213 static void gen_dcbt(DisasContext *ctx) 4214 { 4215 /* interpreted as no-op */ 4216 /* XXX: specification say this is treated as a load by the MMU 4217 * but does not generate any exception 4218 */ 4219 } 4220 4221 /* dcbtst */ 4222 static void gen_dcbtst(DisasContext *ctx) 4223 { 4224 /* interpreted as no-op */ 4225 /* XXX: specification say this is treated as a load by the MMU 4226 * but does not generate any exception 4227 */ 4228 } 4229 4230 /* dcbtls */ 4231 static void gen_dcbtls(DisasContext *ctx) 4232 { 4233 /* Always fails locking the cache */ 4234 TCGv t0 = tcg_temp_new(); 4235 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4236 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4237 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4238 tcg_temp_free(t0); 4239 } 4240 4241 /* dcbz */ 4242 static void gen_dcbz(DisasContext *ctx) 4243 { 4244 TCGv tcgv_addr; 4245 TCGv_i32 tcgv_op; 4246 4247 gen_set_access_type(ctx, ACCESS_CACHE); 4248 tcgv_addr = tcg_temp_new(); 4249 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4250 gen_addr_reg_index(ctx, tcgv_addr); 4251 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4252 tcg_temp_free(tcgv_addr); 4253 tcg_temp_free_i32(tcgv_op); 4254 } 4255 4256 /* dst / dstt */ 4257 static void gen_dst(DisasContext *ctx) 4258 { 4259 if (rA(ctx->opcode) == 0) { 4260 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4261 } else { 4262 /* interpreted as no-op */ 4263 } 4264 } 4265 4266 /* dstst /dststt */ 4267 static void gen_dstst(DisasContext *ctx) 4268 { 4269 if (rA(ctx->opcode) == 0) { 4270 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4271 } else { 4272 /* interpreted as no-op */ 4273 } 4274 4275 } 4276 4277 /* dss / dssall */ 4278 static void gen_dss(DisasContext *ctx) 4279 { 4280 /* interpreted as no-op */ 4281 } 4282 4283 /* icbi */ 4284 static void gen_icbi(DisasContext *ctx) 4285 { 4286 TCGv t0; 4287 gen_set_access_type(ctx, ACCESS_CACHE); 4288 t0 = tcg_temp_new(); 4289 gen_addr_reg_index(ctx, t0); 4290 gen_helper_icbi(cpu_env, t0); 4291 tcg_temp_free(t0); 4292 } 4293 4294 /* Optional: */ 4295 /* dcba */ 4296 static void gen_dcba(DisasContext *ctx) 4297 { 4298 /* interpreted as no-op */ 4299 /* XXX: specification say this is treated as a store by the MMU 4300 * but does not generate any exception 4301 */ 4302 } 4303 4304 /*** Segment register manipulation ***/ 4305 /* Supervisor only: */ 4306 4307 /* mfsr */ 4308 static void gen_mfsr(DisasContext *ctx) 4309 { 4310 #if defined(CONFIG_USER_ONLY) 4311 GEN_PRIV; 4312 #else 4313 TCGv t0; 4314 4315 CHK_SV; 4316 t0 = tcg_const_tl(SR(ctx->opcode)); 4317 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4318 tcg_temp_free(t0); 4319 #endif /* defined(CONFIG_USER_ONLY) */ 4320 } 4321 4322 /* mfsrin */ 4323 static void gen_mfsrin(DisasContext *ctx) 4324 { 4325 #if defined(CONFIG_USER_ONLY) 4326 GEN_PRIV; 4327 #else 4328 TCGv t0; 4329 4330 CHK_SV; 4331 t0 = tcg_temp_new(); 4332 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4333 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4334 tcg_temp_free(t0); 4335 #endif /* defined(CONFIG_USER_ONLY) */ 4336 } 4337 4338 /* mtsr */ 4339 static void gen_mtsr(DisasContext *ctx) 4340 { 4341 #if defined(CONFIG_USER_ONLY) 4342 GEN_PRIV; 4343 #else 4344 TCGv t0; 4345 4346 CHK_SV; 4347 t0 = tcg_const_tl(SR(ctx->opcode)); 4348 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4349 tcg_temp_free(t0); 4350 #endif /* defined(CONFIG_USER_ONLY) */ 4351 } 4352 4353 /* mtsrin */ 4354 static void gen_mtsrin(DisasContext *ctx) 4355 { 4356 #if defined(CONFIG_USER_ONLY) 4357 GEN_PRIV; 4358 #else 4359 TCGv t0; 4360 CHK_SV; 4361 4362 t0 = tcg_temp_new(); 4363 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4364 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4365 tcg_temp_free(t0); 4366 #endif /* defined(CONFIG_USER_ONLY) */ 4367 } 4368 4369 #if defined(TARGET_PPC64) 4370 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4371 4372 /* mfsr */ 4373 static void gen_mfsr_64b(DisasContext *ctx) 4374 { 4375 #if defined(CONFIG_USER_ONLY) 4376 GEN_PRIV; 4377 #else 4378 TCGv t0; 4379 4380 CHK_SV; 4381 t0 = tcg_const_tl(SR(ctx->opcode)); 4382 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4383 tcg_temp_free(t0); 4384 #endif /* defined(CONFIG_USER_ONLY) */ 4385 } 4386 4387 /* mfsrin */ 4388 static void gen_mfsrin_64b(DisasContext *ctx) 4389 { 4390 #if defined(CONFIG_USER_ONLY) 4391 GEN_PRIV; 4392 #else 4393 TCGv t0; 4394 4395 CHK_SV; 4396 t0 = tcg_temp_new(); 4397 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4398 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4399 tcg_temp_free(t0); 4400 #endif /* defined(CONFIG_USER_ONLY) */ 4401 } 4402 4403 /* mtsr */ 4404 static void gen_mtsr_64b(DisasContext *ctx) 4405 { 4406 #if defined(CONFIG_USER_ONLY) 4407 GEN_PRIV; 4408 #else 4409 TCGv t0; 4410 4411 CHK_SV; 4412 t0 = tcg_const_tl(SR(ctx->opcode)); 4413 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4414 tcg_temp_free(t0); 4415 #endif /* defined(CONFIG_USER_ONLY) */ 4416 } 4417 4418 /* mtsrin */ 4419 static void gen_mtsrin_64b(DisasContext *ctx) 4420 { 4421 #if defined(CONFIG_USER_ONLY) 4422 GEN_PRIV; 4423 #else 4424 TCGv t0; 4425 4426 CHK_SV; 4427 t0 = tcg_temp_new(); 4428 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4429 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4430 tcg_temp_free(t0); 4431 #endif /* defined(CONFIG_USER_ONLY) */ 4432 } 4433 4434 /* slbmte */ 4435 static void gen_slbmte(DisasContext *ctx) 4436 { 4437 #if defined(CONFIG_USER_ONLY) 4438 GEN_PRIV; 4439 #else 4440 CHK_SV; 4441 4442 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4443 cpu_gpr[rS(ctx->opcode)]); 4444 #endif /* defined(CONFIG_USER_ONLY) */ 4445 } 4446 4447 static void gen_slbmfee(DisasContext *ctx) 4448 { 4449 #if defined(CONFIG_USER_ONLY) 4450 GEN_PRIV; 4451 #else 4452 CHK_SV; 4453 4454 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4455 cpu_gpr[rB(ctx->opcode)]); 4456 #endif /* defined(CONFIG_USER_ONLY) */ 4457 } 4458 4459 static void gen_slbmfev(DisasContext *ctx) 4460 { 4461 #if defined(CONFIG_USER_ONLY) 4462 GEN_PRIV; 4463 #else 4464 CHK_SV; 4465 4466 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4467 cpu_gpr[rB(ctx->opcode)]); 4468 #endif /* defined(CONFIG_USER_ONLY) */ 4469 } 4470 4471 static void gen_slbfee_(DisasContext *ctx) 4472 { 4473 #if defined(CONFIG_USER_ONLY) 4474 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4475 #else 4476 TCGLabel *l1, *l2; 4477 4478 if (unlikely(ctx->pr)) { 4479 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4480 return; 4481 } 4482 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4483 cpu_gpr[rB(ctx->opcode)]); 4484 l1 = gen_new_label(); 4485 l2 = gen_new_label(); 4486 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4487 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4488 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4489 tcg_gen_br(l2); 4490 gen_set_label(l1); 4491 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4492 gen_set_label(l2); 4493 #endif 4494 } 4495 #endif /* defined(TARGET_PPC64) */ 4496 4497 /*** Lookaside buffer management ***/ 4498 /* Optional & supervisor only: */ 4499 4500 /* tlbia */ 4501 static void gen_tlbia(DisasContext *ctx) 4502 { 4503 #if defined(CONFIG_USER_ONLY) 4504 GEN_PRIV; 4505 #else 4506 CHK_HV; 4507 4508 gen_helper_tlbia(cpu_env); 4509 #endif /* defined(CONFIG_USER_ONLY) */ 4510 } 4511 4512 /* tlbiel */ 4513 static void gen_tlbiel(DisasContext *ctx) 4514 { 4515 #if defined(CONFIG_USER_ONLY) 4516 GEN_PRIV; 4517 #else 4518 CHK_SV; 4519 4520 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4521 #endif /* defined(CONFIG_USER_ONLY) */ 4522 } 4523 4524 /* tlbie */ 4525 static void gen_tlbie(DisasContext *ctx) 4526 { 4527 #if defined(CONFIG_USER_ONLY) 4528 GEN_PRIV; 4529 #else 4530 TCGv_i32 t1; 4531 4532 if (ctx->gtse) { 4533 CHK_SV; /* If gtse is set then tblie is supervisor privileged */ 4534 } else { 4535 CHK_HV; /* Else hypervisor privileged */ 4536 } 4537 4538 if (NARROW_MODE(ctx)) { 4539 TCGv t0 = tcg_temp_new(); 4540 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4541 gen_helper_tlbie(cpu_env, t0); 4542 tcg_temp_free(t0); 4543 } else { 4544 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4545 } 4546 t1 = tcg_temp_new_i32(); 4547 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4548 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4549 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4550 tcg_temp_free_i32(t1); 4551 #endif /* defined(CONFIG_USER_ONLY) */ 4552 } 4553 4554 /* tlbsync */ 4555 static void gen_tlbsync(DisasContext *ctx) 4556 { 4557 #if defined(CONFIG_USER_ONLY) 4558 GEN_PRIV; 4559 #else 4560 CHK_HV; 4561 4562 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4563 if (ctx->insns_flags & PPC_BOOKE) { 4564 gen_check_tlb_flush(ctx, true); 4565 } 4566 #endif /* defined(CONFIG_USER_ONLY) */ 4567 } 4568 4569 #if defined(TARGET_PPC64) 4570 /* slbia */ 4571 static void gen_slbia(DisasContext *ctx) 4572 { 4573 #if defined(CONFIG_USER_ONLY) 4574 GEN_PRIV; 4575 #else 4576 CHK_SV; 4577 4578 gen_helper_slbia(cpu_env); 4579 #endif /* defined(CONFIG_USER_ONLY) */ 4580 } 4581 4582 /* slbie */ 4583 static void gen_slbie(DisasContext *ctx) 4584 { 4585 #if defined(CONFIG_USER_ONLY) 4586 GEN_PRIV; 4587 #else 4588 CHK_SV; 4589 4590 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4591 #endif /* defined(CONFIG_USER_ONLY) */ 4592 } 4593 4594 /* slbieg */ 4595 static void gen_slbieg(DisasContext *ctx) 4596 { 4597 #if defined(CONFIG_USER_ONLY) 4598 GEN_PRIV; 4599 #else 4600 CHK_SV; 4601 4602 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4603 #endif /* defined(CONFIG_USER_ONLY) */ 4604 } 4605 4606 /* slbsync */ 4607 static void gen_slbsync(DisasContext *ctx) 4608 { 4609 #if defined(CONFIG_USER_ONLY) 4610 GEN_PRIV; 4611 #else 4612 CHK_SV; 4613 gen_check_tlb_flush(ctx, true); 4614 #endif /* defined(CONFIG_USER_ONLY) */ 4615 } 4616 4617 #endif /* defined(TARGET_PPC64) */ 4618 4619 /*** External control ***/ 4620 /* Optional: */ 4621 4622 /* eciwx */ 4623 static void gen_eciwx(DisasContext *ctx) 4624 { 4625 TCGv t0; 4626 /* Should check EAR[E] ! */ 4627 gen_set_access_type(ctx, ACCESS_EXT); 4628 t0 = tcg_temp_new(); 4629 gen_addr_reg_index(ctx, t0); 4630 gen_check_align(ctx, t0, 0x03); 4631 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0); 4632 tcg_temp_free(t0); 4633 } 4634 4635 /* ecowx */ 4636 static void gen_ecowx(DisasContext *ctx) 4637 { 4638 TCGv t0; 4639 /* Should check EAR[E] ! */ 4640 gen_set_access_type(ctx, ACCESS_EXT); 4641 t0 = tcg_temp_new(); 4642 gen_addr_reg_index(ctx, t0); 4643 gen_check_align(ctx, t0, 0x03); 4644 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0); 4645 tcg_temp_free(t0); 4646 } 4647 4648 /* PowerPC 601 specific instructions */ 4649 4650 /* abs - abs. */ 4651 static void gen_abs(DisasContext *ctx) 4652 { 4653 TCGLabel *l1 = gen_new_label(); 4654 TCGLabel *l2 = gen_new_label(); 4655 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 4656 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4657 tcg_gen_br(l2); 4658 gen_set_label(l1); 4659 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4660 gen_set_label(l2); 4661 if (unlikely(Rc(ctx->opcode) != 0)) 4662 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4663 } 4664 4665 /* abso - abso. */ 4666 static void gen_abso(DisasContext *ctx) 4667 { 4668 TCGLabel *l1 = gen_new_label(); 4669 TCGLabel *l2 = gen_new_label(); 4670 TCGLabel *l3 = gen_new_label(); 4671 /* Start with XER OV disabled, the most likely case */ 4672 tcg_gen_movi_tl(cpu_ov, 0); 4673 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 4674 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 4675 tcg_gen_movi_tl(cpu_ov, 1); 4676 tcg_gen_movi_tl(cpu_so, 1); 4677 tcg_gen_br(l2); 4678 gen_set_label(l1); 4679 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4680 tcg_gen_br(l3); 4681 gen_set_label(l2); 4682 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4683 gen_set_label(l3); 4684 if (unlikely(Rc(ctx->opcode) != 0)) 4685 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4686 } 4687 4688 /* clcs */ 4689 static void gen_clcs(DisasContext *ctx) 4690 { 4691 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 4692 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4693 tcg_temp_free_i32(t0); 4694 /* Rc=1 sets CR0 to an undefined state */ 4695 } 4696 4697 /* div - div. */ 4698 static void gen_div(DisasContext *ctx) 4699 { 4700 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4701 cpu_gpr[rB(ctx->opcode)]); 4702 if (unlikely(Rc(ctx->opcode) != 0)) 4703 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4704 } 4705 4706 /* divo - divo. */ 4707 static void gen_divo(DisasContext *ctx) 4708 { 4709 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4710 cpu_gpr[rB(ctx->opcode)]); 4711 if (unlikely(Rc(ctx->opcode) != 0)) 4712 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4713 } 4714 4715 /* divs - divs. */ 4716 static void gen_divs(DisasContext *ctx) 4717 { 4718 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4719 cpu_gpr[rB(ctx->opcode)]); 4720 if (unlikely(Rc(ctx->opcode) != 0)) 4721 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4722 } 4723 4724 /* divso - divso. */ 4725 static void gen_divso(DisasContext *ctx) 4726 { 4727 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 4728 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4729 if (unlikely(Rc(ctx->opcode) != 0)) 4730 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4731 } 4732 4733 /* doz - doz. */ 4734 static void gen_doz(DisasContext *ctx) 4735 { 4736 TCGLabel *l1 = gen_new_label(); 4737 TCGLabel *l2 = gen_new_label(); 4738 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4739 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4740 tcg_gen_br(l2); 4741 gen_set_label(l1); 4742 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4743 gen_set_label(l2); 4744 if (unlikely(Rc(ctx->opcode) != 0)) 4745 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4746 } 4747 4748 /* dozo - dozo. */ 4749 static void gen_dozo(DisasContext *ctx) 4750 { 4751 TCGLabel *l1 = gen_new_label(); 4752 TCGLabel *l2 = gen_new_label(); 4753 TCGv t0 = tcg_temp_new(); 4754 TCGv t1 = tcg_temp_new(); 4755 TCGv t2 = tcg_temp_new(); 4756 /* Start with XER OV disabled, the most likely case */ 4757 tcg_gen_movi_tl(cpu_ov, 0); 4758 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4759 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4760 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4761 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 4762 tcg_gen_andc_tl(t1, t1, t2); 4763 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 4764 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 4765 tcg_gen_movi_tl(cpu_ov, 1); 4766 tcg_gen_movi_tl(cpu_so, 1); 4767 tcg_gen_br(l2); 4768 gen_set_label(l1); 4769 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4770 gen_set_label(l2); 4771 tcg_temp_free(t0); 4772 tcg_temp_free(t1); 4773 tcg_temp_free(t2); 4774 if (unlikely(Rc(ctx->opcode) != 0)) 4775 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4776 } 4777 4778 /* dozi */ 4779 static void gen_dozi(DisasContext *ctx) 4780 { 4781 target_long simm = SIMM(ctx->opcode); 4782 TCGLabel *l1 = gen_new_label(); 4783 TCGLabel *l2 = gen_new_label(); 4784 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 4785 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 4786 tcg_gen_br(l2); 4787 gen_set_label(l1); 4788 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4789 gen_set_label(l2); 4790 if (unlikely(Rc(ctx->opcode) != 0)) 4791 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4792 } 4793 4794 /* lscbx - lscbx. */ 4795 static void gen_lscbx(DisasContext *ctx) 4796 { 4797 TCGv t0 = tcg_temp_new(); 4798 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 4799 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 4800 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 4801 4802 gen_addr_reg_index(ctx, t0); 4803 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 4804 tcg_temp_free_i32(t1); 4805 tcg_temp_free_i32(t2); 4806 tcg_temp_free_i32(t3); 4807 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 4808 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 4809 if (unlikely(Rc(ctx->opcode) != 0)) 4810 gen_set_Rc0(ctx, t0); 4811 tcg_temp_free(t0); 4812 } 4813 4814 /* maskg - maskg. */ 4815 static void gen_maskg(DisasContext *ctx) 4816 { 4817 TCGLabel *l1 = gen_new_label(); 4818 TCGv t0 = tcg_temp_new(); 4819 TCGv t1 = tcg_temp_new(); 4820 TCGv t2 = tcg_temp_new(); 4821 TCGv t3 = tcg_temp_new(); 4822 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 4823 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4824 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 4825 tcg_gen_addi_tl(t2, t0, 1); 4826 tcg_gen_shr_tl(t2, t3, t2); 4827 tcg_gen_shr_tl(t3, t3, t1); 4828 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 4829 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 4830 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4831 gen_set_label(l1); 4832 tcg_temp_free(t0); 4833 tcg_temp_free(t1); 4834 tcg_temp_free(t2); 4835 tcg_temp_free(t3); 4836 if (unlikely(Rc(ctx->opcode) != 0)) 4837 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4838 } 4839 4840 /* maskir - maskir. */ 4841 static void gen_maskir(DisasContext *ctx) 4842 { 4843 TCGv t0 = tcg_temp_new(); 4844 TCGv t1 = tcg_temp_new(); 4845 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4846 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4847 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4848 tcg_temp_free(t0); 4849 tcg_temp_free(t1); 4850 if (unlikely(Rc(ctx->opcode) != 0)) 4851 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4852 } 4853 4854 /* mul - mul. */ 4855 static void gen_mul(DisasContext *ctx) 4856 { 4857 TCGv_i64 t0 = tcg_temp_new_i64(); 4858 TCGv_i64 t1 = tcg_temp_new_i64(); 4859 TCGv t2 = tcg_temp_new(); 4860 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 4861 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 4862 tcg_gen_mul_i64(t0, t0, t1); 4863 tcg_gen_trunc_i64_tl(t2, t0); 4864 gen_store_spr(SPR_MQ, t2); 4865 tcg_gen_shri_i64(t1, t0, 32); 4866 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 4867 tcg_temp_free_i64(t0); 4868 tcg_temp_free_i64(t1); 4869 tcg_temp_free(t2); 4870 if (unlikely(Rc(ctx->opcode) != 0)) 4871 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4872 } 4873 4874 /* mulo - mulo. */ 4875 static void gen_mulo(DisasContext *ctx) 4876 { 4877 TCGLabel *l1 = gen_new_label(); 4878 TCGv_i64 t0 = tcg_temp_new_i64(); 4879 TCGv_i64 t1 = tcg_temp_new_i64(); 4880 TCGv t2 = tcg_temp_new(); 4881 /* Start with XER OV disabled, the most likely case */ 4882 tcg_gen_movi_tl(cpu_ov, 0); 4883 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 4884 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 4885 tcg_gen_mul_i64(t0, t0, t1); 4886 tcg_gen_trunc_i64_tl(t2, t0); 4887 gen_store_spr(SPR_MQ, t2); 4888 tcg_gen_shri_i64(t1, t0, 32); 4889 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 4890 tcg_gen_ext32s_i64(t1, t0); 4891 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 4892 tcg_gen_movi_tl(cpu_ov, 1); 4893 tcg_gen_movi_tl(cpu_so, 1); 4894 gen_set_label(l1); 4895 tcg_temp_free_i64(t0); 4896 tcg_temp_free_i64(t1); 4897 tcg_temp_free(t2); 4898 if (unlikely(Rc(ctx->opcode) != 0)) 4899 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4900 } 4901 4902 /* nabs - nabs. */ 4903 static void gen_nabs(DisasContext *ctx) 4904 { 4905 TCGLabel *l1 = gen_new_label(); 4906 TCGLabel *l2 = gen_new_label(); 4907 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 4908 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4909 tcg_gen_br(l2); 4910 gen_set_label(l1); 4911 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4912 gen_set_label(l2); 4913 if (unlikely(Rc(ctx->opcode) != 0)) 4914 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4915 } 4916 4917 /* nabso - nabso. */ 4918 static void gen_nabso(DisasContext *ctx) 4919 { 4920 TCGLabel *l1 = gen_new_label(); 4921 TCGLabel *l2 = gen_new_label(); 4922 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 4923 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4924 tcg_gen_br(l2); 4925 gen_set_label(l1); 4926 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4927 gen_set_label(l2); 4928 /* nabs never overflows */ 4929 tcg_gen_movi_tl(cpu_ov, 0); 4930 if (unlikely(Rc(ctx->opcode) != 0)) 4931 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4932 } 4933 4934 /* rlmi - rlmi. */ 4935 static void gen_rlmi(DisasContext *ctx) 4936 { 4937 uint32_t mb = MB(ctx->opcode); 4938 uint32_t me = ME(ctx->opcode); 4939 TCGv t0 = tcg_temp_new(); 4940 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4941 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4942 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 4943 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 4944 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 4945 tcg_temp_free(t0); 4946 if (unlikely(Rc(ctx->opcode) != 0)) 4947 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4948 } 4949 4950 /* rrib - rrib. */ 4951 static void gen_rrib(DisasContext *ctx) 4952 { 4953 TCGv t0 = tcg_temp_new(); 4954 TCGv t1 = tcg_temp_new(); 4955 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4956 tcg_gen_movi_tl(t1, 0x80000000); 4957 tcg_gen_shr_tl(t1, t1, t0); 4958 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4959 tcg_gen_and_tl(t0, t0, t1); 4960 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 4961 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4962 tcg_temp_free(t0); 4963 tcg_temp_free(t1); 4964 if (unlikely(Rc(ctx->opcode) != 0)) 4965 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4966 } 4967 4968 /* sle - sle. */ 4969 static void gen_sle(DisasContext *ctx) 4970 { 4971 TCGv t0 = tcg_temp_new(); 4972 TCGv t1 = tcg_temp_new(); 4973 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 4974 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 4975 tcg_gen_subfi_tl(t1, 32, t1); 4976 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 4977 tcg_gen_or_tl(t1, t0, t1); 4978 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 4979 gen_store_spr(SPR_MQ, t1); 4980 tcg_temp_free(t0); 4981 tcg_temp_free(t1); 4982 if (unlikely(Rc(ctx->opcode) != 0)) 4983 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4984 } 4985 4986 /* sleq - sleq. */ 4987 static void gen_sleq(DisasContext *ctx) 4988 { 4989 TCGv t0 = tcg_temp_new(); 4990 TCGv t1 = tcg_temp_new(); 4991 TCGv t2 = tcg_temp_new(); 4992 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4993 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 4994 tcg_gen_shl_tl(t2, t2, t0); 4995 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4996 gen_load_spr(t1, SPR_MQ); 4997 gen_store_spr(SPR_MQ, t0); 4998 tcg_gen_and_tl(t0, t0, t2); 4999 tcg_gen_andc_tl(t1, t1, t2); 5000 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5001 tcg_temp_free(t0); 5002 tcg_temp_free(t1); 5003 tcg_temp_free(t2); 5004 if (unlikely(Rc(ctx->opcode) != 0)) 5005 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5006 } 5007 5008 /* sliq - sliq. */ 5009 static void gen_sliq(DisasContext *ctx) 5010 { 5011 int sh = SH(ctx->opcode); 5012 TCGv t0 = tcg_temp_new(); 5013 TCGv t1 = tcg_temp_new(); 5014 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5015 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5016 tcg_gen_or_tl(t1, t0, t1); 5017 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5018 gen_store_spr(SPR_MQ, t1); 5019 tcg_temp_free(t0); 5020 tcg_temp_free(t1); 5021 if (unlikely(Rc(ctx->opcode) != 0)) 5022 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5023 } 5024 5025 /* slliq - slliq. */ 5026 static void gen_slliq(DisasContext *ctx) 5027 { 5028 int sh = SH(ctx->opcode); 5029 TCGv t0 = tcg_temp_new(); 5030 TCGv t1 = tcg_temp_new(); 5031 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5032 gen_load_spr(t1, SPR_MQ); 5033 gen_store_spr(SPR_MQ, t0); 5034 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 5035 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 5036 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5037 tcg_temp_free(t0); 5038 tcg_temp_free(t1); 5039 if (unlikely(Rc(ctx->opcode) != 0)) 5040 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5041 } 5042 5043 /* sllq - sllq. */ 5044 static void gen_sllq(DisasContext *ctx) 5045 { 5046 TCGLabel *l1 = gen_new_label(); 5047 TCGLabel *l2 = gen_new_label(); 5048 TCGv t0 = tcg_temp_local_new(); 5049 TCGv t1 = tcg_temp_local_new(); 5050 TCGv t2 = tcg_temp_local_new(); 5051 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5052 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5053 tcg_gen_shl_tl(t1, t1, t2); 5054 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5055 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5056 gen_load_spr(t0, SPR_MQ); 5057 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5058 tcg_gen_br(l2); 5059 gen_set_label(l1); 5060 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5061 gen_load_spr(t2, SPR_MQ); 5062 tcg_gen_andc_tl(t1, t2, t1); 5063 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5064 gen_set_label(l2); 5065 tcg_temp_free(t0); 5066 tcg_temp_free(t1); 5067 tcg_temp_free(t2); 5068 if (unlikely(Rc(ctx->opcode) != 0)) 5069 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5070 } 5071 5072 /* slq - slq. */ 5073 static void gen_slq(DisasContext *ctx) 5074 { 5075 TCGLabel *l1 = gen_new_label(); 5076 TCGv t0 = tcg_temp_new(); 5077 TCGv t1 = tcg_temp_new(); 5078 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5079 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5080 tcg_gen_subfi_tl(t1, 32, t1); 5081 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5082 tcg_gen_or_tl(t1, t0, t1); 5083 gen_store_spr(SPR_MQ, t1); 5084 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5085 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5086 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5087 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5088 gen_set_label(l1); 5089 tcg_temp_free(t0); 5090 tcg_temp_free(t1); 5091 if (unlikely(Rc(ctx->opcode) != 0)) 5092 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5093 } 5094 5095 /* sraiq - sraiq. */ 5096 static void gen_sraiq(DisasContext *ctx) 5097 { 5098 int sh = SH(ctx->opcode); 5099 TCGLabel *l1 = gen_new_label(); 5100 TCGv t0 = tcg_temp_new(); 5101 TCGv t1 = tcg_temp_new(); 5102 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5103 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5104 tcg_gen_or_tl(t0, t0, t1); 5105 gen_store_spr(SPR_MQ, t0); 5106 tcg_gen_movi_tl(cpu_ca, 0); 5107 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5108 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5109 tcg_gen_movi_tl(cpu_ca, 1); 5110 gen_set_label(l1); 5111 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5112 tcg_temp_free(t0); 5113 tcg_temp_free(t1); 5114 if (unlikely(Rc(ctx->opcode) != 0)) 5115 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5116 } 5117 5118 /* sraq - sraq. */ 5119 static void gen_sraq(DisasContext *ctx) 5120 { 5121 TCGLabel *l1 = gen_new_label(); 5122 TCGLabel *l2 = gen_new_label(); 5123 TCGv t0 = tcg_temp_new(); 5124 TCGv t1 = tcg_temp_local_new(); 5125 TCGv t2 = tcg_temp_local_new(); 5126 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5127 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5128 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5129 tcg_gen_subfi_tl(t2, 32, t2); 5130 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5131 tcg_gen_or_tl(t0, t0, t2); 5132 gen_store_spr(SPR_MQ, t0); 5133 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5134 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5135 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5136 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5137 gen_set_label(l1); 5138 tcg_temp_free(t0); 5139 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5140 tcg_gen_movi_tl(cpu_ca, 0); 5141 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5142 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5143 tcg_gen_movi_tl(cpu_ca, 1); 5144 gen_set_label(l2); 5145 tcg_temp_free(t1); 5146 tcg_temp_free(t2); 5147 if (unlikely(Rc(ctx->opcode) != 0)) 5148 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5149 } 5150 5151 /* sre - sre. */ 5152 static void gen_sre(DisasContext *ctx) 5153 { 5154 TCGv t0 = tcg_temp_new(); 5155 TCGv t1 = tcg_temp_new(); 5156 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5157 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5158 tcg_gen_subfi_tl(t1, 32, t1); 5159 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5160 tcg_gen_or_tl(t1, t0, t1); 5161 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5162 gen_store_spr(SPR_MQ, t1); 5163 tcg_temp_free(t0); 5164 tcg_temp_free(t1); 5165 if (unlikely(Rc(ctx->opcode) != 0)) 5166 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5167 } 5168 5169 /* srea - srea. */ 5170 static void gen_srea(DisasContext *ctx) 5171 { 5172 TCGv t0 = tcg_temp_new(); 5173 TCGv t1 = tcg_temp_new(); 5174 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5175 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5176 gen_store_spr(SPR_MQ, t0); 5177 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5178 tcg_temp_free(t0); 5179 tcg_temp_free(t1); 5180 if (unlikely(Rc(ctx->opcode) != 0)) 5181 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5182 } 5183 5184 /* sreq */ 5185 static void gen_sreq(DisasContext *ctx) 5186 { 5187 TCGv t0 = tcg_temp_new(); 5188 TCGv t1 = tcg_temp_new(); 5189 TCGv t2 = tcg_temp_new(); 5190 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5191 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5192 tcg_gen_shr_tl(t1, t1, t0); 5193 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5194 gen_load_spr(t2, SPR_MQ); 5195 gen_store_spr(SPR_MQ, t0); 5196 tcg_gen_and_tl(t0, t0, t1); 5197 tcg_gen_andc_tl(t2, t2, t1); 5198 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5199 tcg_temp_free(t0); 5200 tcg_temp_free(t1); 5201 tcg_temp_free(t2); 5202 if (unlikely(Rc(ctx->opcode) != 0)) 5203 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5204 } 5205 5206 /* sriq */ 5207 static void gen_sriq(DisasContext *ctx) 5208 { 5209 int sh = SH(ctx->opcode); 5210 TCGv t0 = tcg_temp_new(); 5211 TCGv t1 = tcg_temp_new(); 5212 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5213 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5214 tcg_gen_or_tl(t1, t0, t1); 5215 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5216 gen_store_spr(SPR_MQ, t1); 5217 tcg_temp_free(t0); 5218 tcg_temp_free(t1); 5219 if (unlikely(Rc(ctx->opcode) != 0)) 5220 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5221 } 5222 5223 /* srliq */ 5224 static void gen_srliq(DisasContext *ctx) 5225 { 5226 int sh = SH(ctx->opcode); 5227 TCGv t0 = tcg_temp_new(); 5228 TCGv t1 = tcg_temp_new(); 5229 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5230 gen_load_spr(t1, SPR_MQ); 5231 gen_store_spr(SPR_MQ, t0); 5232 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5233 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5234 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5235 tcg_temp_free(t0); 5236 tcg_temp_free(t1); 5237 if (unlikely(Rc(ctx->opcode) != 0)) 5238 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5239 } 5240 5241 /* srlq */ 5242 static void gen_srlq(DisasContext *ctx) 5243 { 5244 TCGLabel *l1 = gen_new_label(); 5245 TCGLabel *l2 = gen_new_label(); 5246 TCGv t0 = tcg_temp_local_new(); 5247 TCGv t1 = tcg_temp_local_new(); 5248 TCGv t2 = tcg_temp_local_new(); 5249 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5250 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5251 tcg_gen_shr_tl(t2, t1, t2); 5252 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5253 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5254 gen_load_spr(t0, SPR_MQ); 5255 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5256 tcg_gen_br(l2); 5257 gen_set_label(l1); 5258 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5259 tcg_gen_and_tl(t0, t0, t2); 5260 gen_load_spr(t1, SPR_MQ); 5261 tcg_gen_andc_tl(t1, t1, t2); 5262 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5263 gen_set_label(l2); 5264 tcg_temp_free(t0); 5265 tcg_temp_free(t1); 5266 tcg_temp_free(t2); 5267 if (unlikely(Rc(ctx->opcode) != 0)) 5268 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5269 } 5270 5271 /* srq */ 5272 static void gen_srq(DisasContext *ctx) 5273 { 5274 TCGLabel *l1 = gen_new_label(); 5275 TCGv t0 = tcg_temp_new(); 5276 TCGv t1 = tcg_temp_new(); 5277 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5278 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5279 tcg_gen_subfi_tl(t1, 32, t1); 5280 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5281 tcg_gen_or_tl(t1, t0, t1); 5282 gen_store_spr(SPR_MQ, t1); 5283 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5284 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5285 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5286 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5287 gen_set_label(l1); 5288 tcg_temp_free(t0); 5289 tcg_temp_free(t1); 5290 if (unlikely(Rc(ctx->opcode) != 0)) 5291 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5292 } 5293 5294 /* PowerPC 602 specific instructions */ 5295 5296 /* dsa */ 5297 static void gen_dsa(DisasContext *ctx) 5298 { 5299 /* XXX: TODO */ 5300 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5301 } 5302 5303 /* esa */ 5304 static void gen_esa(DisasContext *ctx) 5305 { 5306 /* XXX: TODO */ 5307 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5308 } 5309 5310 /* mfrom */ 5311 static void gen_mfrom(DisasContext *ctx) 5312 { 5313 #if defined(CONFIG_USER_ONLY) 5314 GEN_PRIV; 5315 #else 5316 CHK_SV; 5317 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5318 #endif /* defined(CONFIG_USER_ONLY) */ 5319 } 5320 5321 /* 602 - 603 - G2 TLB management */ 5322 5323 /* tlbld */ 5324 static void gen_tlbld_6xx(DisasContext *ctx) 5325 { 5326 #if defined(CONFIG_USER_ONLY) 5327 GEN_PRIV; 5328 #else 5329 CHK_SV; 5330 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5331 #endif /* defined(CONFIG_USER_ONLY) */ 5332 } 5333 5334 /* tlbli */ 5335 static void gen_tlbli_6xx(DisasContext *ctx) 5336 { 5337 #if defined(CONFIG_USER_ONLY) 5338 GEN_PRIV; 5339 #else 5340 CHK_SV; 5341 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5342 #endif /* defined(CONFIG_USER_ONLY) */ 5343 } 5344 5345 /* 74xx TLB management */ 5346 5347 /* tlbld */ 5348 static void gen_tlbld_74xx(DisasContext *ctx) 5349 { 5350 #if defined(CONFIG_USER_ONLY) 5351 GEN_PRIV; 5352 #else 5353 CHK_SV; 5354 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5355 #endif /* defined(CONFIG_USER_ONLY) */ 5356 } 5357 5358 /* tlbli */ 5359 static void gen_tlbli_74xx(DisasContext *ctx) 5360 { 5361 #if defined(CONFIG_USER_ONLY) 5362 GEN_PRIV; 5363 #else 5364 CHK_SV; 5365 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5366 #endif /* defined(CONFIG_USER_ONLY) */ 5367 } 5368 5369 /* POWER instructions not in PowerPC 601 */ 5370 5371 /* clf */ 5372 static void gen_clf(DisasContext *ctx) 5373 { 5374 /* Cache line flush: implemented as no-op */ 5375 } 5376 5377 /* cli */ 5378 static void gen_cli(DisasContext *ctx) 5379 { 5380 #if defined(CONFIG_USER_ONLY) 5381 GEN_PRIV; 5382 #else 5383 /* Cache line invalidate: privileged and treated as no-op */ 5384 CHK_SV; 5385 #endif /* defined(CONFIG_USER_ONLY) */ 5386 } 5387 5388 /* dclst */ 5389 static void gen_dclst(DisasContext *ctx) 5390 { 5391 /* Data cache line store: treated as no-op */ 5392 } 5393 5394 static void gen_mfsri(DisasContext *ctx) 5395 { 5396 #if defined(CONFIG_USER_ONLY) 5397 GEN_PRIV; 5398 #else 5399 int ra = rA(ctx->opcode); 5400 int rd = rD(ctx->opcode); 5401 TCGv t0; 5402 5403 CHK_SV; 5404 t0 = tcg_temp_new(); 5405 gen_addr_reg_index(ctx, t0); 5406 tcg_gen_extract_tl(t0, t0, 28, 4); 5407 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5408 tcg_temp_free(t0); 5409 if (ra != 0 && ra != rd) 5410 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5411 #endif /* defined(CONFIG_USER_ONLY) */ 5412 } 5413 5414 static void gen_rac(DisasContext *ctx) 5415 { 5416 #if defined(CONFIG_USER_ONLY) 5417 GEN_PRIV; 5418 #else 5419 TCGv t0; 5420 5421 CHK_SV; 5422 t0 = tcg_temp_new(); 5423 gen_addr_reg_index(ctx, t0); 5424 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5425 tcg_temp_free(t0); 5426 #endif /* defined(CONFIG_USER_ONLY) */ 5427 } 5428 5429 static void gen_rfsvc(DisasContext *ctx) 5430 { 5431 #if defined(CONFIG_USER_ONLY) 5432 GEN_PRIV; 5433 #else 5434 CHK_SV; 5435 5436 gen_helper_rfsvc(cpu_env); 5437 gen_sync_exception(ctx); 5438 #endif /* defined(CONFIG_USER_ONLY) */ 5439 } 5440 5441 /* svc is not implemented for now */ 5442 5443 /* BookE specific instructions */ 5444 5445 /* XXX: not implemented on 440 ? */ 5446 static void gen_mfapidi(DisasContext *ctx) 5447 { 5448 /* XXX: TODO */ 5449 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5450 } 5451 5452 /* XXX: not implemented on 440 ? */ 5453 static void gen_tlbiva(DisasContext *ctx) 5454 { 5455 #if defined(CONFIG_USER_ONLY) 5456 GEN_PRIV; 5457 #else 5458 TCGv t0; 5459 5460 CHK_SV; 5461 t0 = tcg_temp_new(); 5462 gen_addr_reg_index(ctx, t0); 5463 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5464 tcg_temp_free(t0); 5465 #endif /* defined(CONFIG_USER_ONLY) */ 5466 } 5467 5468 /* All 405 MAC instructions are translated here */ 5469 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5470 int ra, int rb, int rt, int Rc) 5471 { 5472 TCGv t0, t1; 5473 5474 t0 = tcg_temp_local_new(); 5475 t1 = tcg_temp_local_new(); 5476 5477 switch (opc3 & 0x0D) { 5478 case 0x05: 5479 /* macchw - macchw. - macchwo - macchwo. */ 5480 /* macchws - macchws. - macchwso - macchwso. */ 5481 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5482 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5483 /* mulchw - mulchw. */ 5484 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5485 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5486 tcg_gen_ext16s_tl(t1, t1); 5487 break; 5488 case 0x04: 5489 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5490 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5491 /* mulchwu - mulchwu. */ 5492 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5493 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5494 tcg_gen_ext16u_tl(t1, t1); 5495 break; 5496 case 0x01: 5497 /* machhw - machhw. - machhwo - machhwo. */ 5498 /* machhws - machhws. - machhwso - machhwso. */ 5499 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5500 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5501 /* mulhhw - mulhhw. */ 5502 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5503 tcg_gen_ext16s_tl(t0, t0); 5504 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5505 tcg_gen_ext16s_tl(t1, t1); 5506 break; 5507 case 0x00: 5508 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5509 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5510 /* mulhhwu - mulhhwu. */ 5511 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5512 tcg_gen_ext16u_tl(t0, t0); 5513 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5514 tcg_gen_ext16u_tl(t1, t1); 5515 break; 5516 case 0x0D: 5517 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5518 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5519 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5520 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5521 /* mullhw - mullhw. */ 5522 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5523 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5524 break; 5525 case 0x0C: 5526 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5527 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5528 /* mullhwu - mullhwu. */ 5529 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5530 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5531 break; 5532 } 5533 if (opc2 & 0x04) { 5534 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5535 tcg_gen_mul_tl(t1, t0, t1); 5536 if (opc2 & 0x02) { 5537 /* nmultiply-and-accumulate (0x0E) */ 5538 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5539 } else { 5540 /* multiply-and-accumulate (0x0C) */ 5541 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5542 } 5543 5544 if (opc3 & 0x12) { 5545 /* Check overflow and/or saturate */ 5546 TCGLabel *l1 = gen_new_label(); 5547 5548 if (opc3 & 0x10) { 5549 /* Start with XER OV disabled, the most likely case */ 5550 tcg_gen_movi_tl(cpu_ov, 0); 5551 } 5552 if (opc3 & 0x01) { 5553 /* Signed */ 5554 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5555 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5556 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5557 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5558 if (opc3 & 0x02) { 5559 /* Saturate */ 5560 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5561 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5562 } 5563 } else { 5564 /* Unsigned */ 5565 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5566 if (opc3 & 0x02) { 5567 /* Saturate */ 5568 tcg_gen_movi_tl(t0, UINT32_MAX); 5569 } 5570 } 5571 if (opc3 & 0x10) { 5572 /* Check overflow */ 5573 tcg_gen_movi_tl(cpu_ov, 1); 5574 tcg_gen_movi_tl(cpu_so, 1); 5575 } 5576 gen_set_label(l1); 5577 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5578 } 5579 } else { 5580 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5581 } 5582 tcg_temp_free(t0); 5583 tcg_temp_free(t1); 5584 if (unlikely(Rc) != 0) { 5585 /* Update Rc0 */ 5586 gen_set_Rc0(ctx, cpu_gpr[rt]); 5587 } 5588 } 5589 5590 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5591 static void glue(gen_, name)(DisasContext *ctx) \ 5592 { \ 5593 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5594 rD(ctx->opcode), Rc(ctx->opcode)); \ 5595 } 5596 5597 /* macchw - macchw. */ 5598 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5599 /* macchwo - macchwo. */ 5600 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5601 /* macchws - macchws. */ 5602 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5603 /* macchwso - macchwso. */ 5604 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5605 /* macchwsu - macchwsu. */ 5606 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5607 /* macchwsuo - macchwsuo. */ 5608 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5609 /* macchwu - macchwu. */ 5610 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5611 /* macchwuo - macchwuo. */ 5612 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5613 /* machhw - machhw. */ 5614 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5615 /* machhwo - machhwo. */ 5616 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5617 /* machhws - machhws. */ 5618 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5619 /* machhwso - machhwso. */ 5620 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5621 /* machhwsu - machhwsu. */ 5622 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5623 /* machhwsuo - machhwsuo. */ 5624 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5625 /* machhwu - machhwu. */ 5626 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5627 /* machhwuo - machhwuo. */ 5628 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5629 /* maclhw - maclhw. */ 5630 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5631 /* maclhwo - maclhwo. */ 5632 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5633 /* maclhws - maclhws. */ 5634 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5635 /* maclhwso - maclhwso. */ 5636 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 5637 /* maclhwu - maclhwu. */ 5638 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 5639 /* maclhwuo - maclhwuo. */ 5640 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 5641 /* maclhwsu - maclhwsu. */ 5642 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 5643 /* maclhwsuo - maclhwsuo. */ 5644 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 5645 /* nmacchw - nmacchw. */ 5646 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 5647 /* nmacchwo - nmacchwo. */ 5648 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 5649 /* nmacchws - nmacchws. */ 5650 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 5651 /* nmacchwso - nmacchwso. */ 5652 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 5653 /* nmachhw - nmachhw. */ 5654 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 5655 /* nmachhwo - nmachhwo. */ 5656 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 5657 /* nmachhws - nmachhws. */ 5658 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 5659 /* nmachhwso - nmachhwso. */ 5660 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 5661 /* nmaclhw - nmaclhw. */ 5662 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 5663 /* nmaclhwo - nmaclhwo. */ 5664 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 5665 /* nmaclhws - nmaclhws. */ 5666 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 5667 /* nmaclhwso - nmaclhwso. */ 5668 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 5669 5670 /* mulchw - mulchw. */ 5671 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 5672 /* mulchwu - mulchwu. */ 5673 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 5674 /* mulhhw - mulhhw. */ 5675 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 5676 /* mulhhwu - mulhhwu. */ 5677 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 5678 /* mullhw - mullhw. */ 5679 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 5680 /* mullhwu - mullhwu. */ 5681 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 5682 5683 /* mfdcr */ 5684 static void gen_mfdcr(DisasContext *ctx) 5685 { 5686 #if defined(CONFIG_USER_ONLY) 5687 GEN_PRIV; 5688 #else 5689 TCGv dcrn; 5690 5691 CHK_SV; 5692 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5693 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 5694 tcg_temp_free(dcrn); 5695 #endif /* defined(CONFIG_USER_ONLY) */ 5696 } 5697 5698 /* mtdcr */ 5699 static void gen_mtdcr(DisasContext *ctx) 5700 { 5701 #if defined(CONFIG_USER_ONLY) 5702 GEN_PRIV; 5703 #else 5704 TCGv dcrn; 5705 5706 CHK_SV; 5707 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5708 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 5709 tcg_temp_free(dcrn); 5710 #endif /* defined(CONFIG_USER_ONLY) */ 5711 } 5712 5713 /* mfdcrx */ 5714 /* XXX: not implemented on 440 ? */ 5715 static void gen_mfdcrx(DisasContext *ctx) 5716 { 5717 #if defined(CONFIG_USER_ONLY) 5718 GEN_PRIV; 5719 #else 5720 CHK_SV; 5721 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5722 cpu_gpr[rA(ctx->opcode)]); 5723 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5724 #endif /* defined(CONFIG_USER_ONLY) */ 5725 } 5726 5727 /* mtdcrx */ 5728 /* XXX: not implemented on 440 ? */ 5729 static void gen_mtdcrx(DisasContext *ctx) 5730 { 5731 #if defined(CONFIG_USER_ONLY) 5732 GEN_PRIV; 5733 #else 5734 CHK_SV; 5735 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5736 cpu_gpr[rS(ctx->opcode)]); 5737 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5738 #endif /* defined(CONFIG_USER_ONLY) */ 5739 } 5740 5741 /* mfdcrux (PPC 460) : user-mode access to DCR */ 5742 static void gen_mfdcrux(DisasContext *ctx) 5743 { 5744 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5745 cpu_gpr[rA(ctx->opcode)]); 5746 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5747 } 5748 5749 /* mtdcrux (PPC 460) : user-mode access to DCR */ 5750 static void gen_mtdcrux(DisasContext *ctx) 5751 { 5752 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5753 cpu_gpr[rS(ctx->opcode)]); 5754 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5755 } 5756 5757 /* dccci */ 5758 static void gen_dccci(DisasContext *ctx) 5759 { 5760 CHK_SV; 5761 /* interpreted as no-op */ 5762 } 5763 5764 /* dcread */ 5765 static void gen_dcread(DisasContext *ctx) 5766 { 5767 #if defined(CONFIG_USER_ONLY) 5768 GEN_PRIV; 5769 #else 5770 TCGv EA, val; 5771 5772 CHK_SV; 5773 gen_set_access_type(ctx, ACCESS_CACHE); 5774 EA = tcg_temp_new(); 5775 gen_addr_reg_index(ctx, EA); 5776 val = tcg_temp_new(); 5777 gen_qemu_ld32u(ctx, val, EA); 5778 tcg_temp_free(val); 5779 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 5780 tcg_temp_free(EA); 5781 #endif /* defined(CONFIG_USER_ONLY) */ 5782 } 5783 5784 /* icbt */ 5785 static void gen_icbt_40x(DisasContext *ctx) 5786 { 5787 /* interpreted as no-op */ 5788 /* XXX: specification say this is treated as a load by the MMU 5789 * but does not generate any exception 5790 */ 5791 } 5792 5793 /* iccci */ 5794 static void gen_iccci(DisasContext *ctx) 5795 { 5796 CHK_SV; 5797 /* interpreted as no-op */ 5798 } 5799 5800 /* icread */ 5801 static void gen_icread(DisasContext *ctx) 5802 { 5803 CHK_SV; 5804 /* interpreted as no-op */ 5805 } 5806 5807 /* rfci (supervisor only) */ 5808 static void gen_rfci_40x(DisasContext *ctx) 5809 { 5810 #if defined(CONFIG_USER_ONLY) 5811 GEN_PRIV; 5812 #else 5813 CHK_SV; 5814 /* Restore CPU state */ 5815 gen_helper_40x_rfci(cpu_env); 5816 gen_sync_exception(ctx); 5817 #endif /* defined(CONFIG_USER_ONLY) */ 5818 } 5819 5820 static void gen_rfci(DisasContext *ctx) 5821 { 5822 #if defined(CONFIG_USER_ONLY) 5823 GEN_PRIV; 5824 #else 5825 CHK_SV; 5826 /* Restore CPU state */ 5827 gen_helper_rfci(cpu_env); 5828 gen_sync_exception(ctx); 5829 #endif /* defined(CONFIG_USER_ONLY) */ 5830 } 5831 5832 /* BookE specific */ 5833 5834 /* XXX: not implemented on 440 ? */ 5835 static void gen_rfdi(DisasContext *ctx) 5836 { 5837 #if defined(CONFIG_USER_ONLY) 5838 GEN_PRIV; 5839 #else 5840 CHK_SV; 5841 /* Restore CPU state */ 5842 gen_helper_rfdi(cpu_env); 5843 gen_sync_exception(ctx); 5844 #endif /* defined(CONFIG_USER_ONLY) */ 5845 } 5846 5847 /* XXX: not implemented on 440 ? */ 5848 static void gen_rfmci(DisasContext *ctx) 5849 { 5850 #if defined(CONFIG_USER_ONLY) 5851 GEN_PRIV; 5852 #else 5853 CHK_SV; 5854 /* Restore CPU state */ 5855 gen_helper_rfmci(cpu_env); 5856 gen_sync_exception(ctx); 5857 #endif /* defined(CONFIG_USER_ONLY) */ 5858 } 5859 5860 /* TLB management - PowerPC 405 implementation */ 5861 5862 /* tlbre */ 5863 static void gen_tlbre_40x(DisasContext *ctx) 5864 { 5865 #if defined(CONFIG_USER_ONLY) 5866 GEN_PRIV; 5867 #else 5868 CHK_SV; 5869 switch (rB(ctx->opcode)) { 5870 case 0: 5871 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 5872 cpu_gpr[rA(ctx->opcode)]); 5873 break; 5874 case 1: 5875 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 5876 cpu_gpr[rA(ctx->opcode)]); 5877 break; 5878 default: 5879 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5880 break; 5881 } 5882 #endif /* defined(CONFIG_USER_ONLY) */ 5883 } 5884 5885 /* tlbsx - tlbsx. */ 5886 static void gen_tlbsx_40x(DisasContext *ctx) 5887 { 5888 #if defined(CONFIG_USER_ONLY) 5889 GEN_PRIV; 5890 #else 5891 TCGv t0; 5892 5893 CHK_SV; 5894 t0 = tcg_temp_new(); 5895 gen_addr_reg_index(ctx, t0); 5896 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5897 tcg_temp_free(t0); 5898 if (Rc(ctx->opcode)) { 5899 TCGLabel *l1 = gen_new_label(); 5900 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5901 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 5902 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 5903 gen_set_label(l1); 5904 } 5905 #endif /* defined(CONFIG_USER_ONLY) */ 5906 } 5907 5908 /* tlbwe */ 5909 static void gen_tlbwe_40x(DisasContext *ctx) 5910 { 5911 #if defined(CONFIG_USER_ONLY) 5912 GEN_PRIV; 5913 #else 5914 CHK_SV; 5915 5916 switch (rB(ctx->opcode)) { 5917 case 0: 5918 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 5919 cpu_gpr[rS(ctx->opcode)]); 5920 break; 5921 case 1: 5922 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 5923 cpu_gpr[rS(ctx->opcode)]); 5924 break; 5925 default: 5926 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5927 break; 5928 } 5929 #endif /* defined(CONFIG_USER_ONLY) */ 5930 } 5931 5932 /* TLB management - PowerPC 440 implementation */ 5933 5934 /* tlbre */ 5935 static void gen_tlbre_440(DisasContext *ctx) 5936 { 5937 #if defined(CONFIG_USER_ONLY) 5938 GEN_PRIV; 5939 #else 5940 CHK_SV; 5941 5942 switch (rB(ctx->opcode)) { 5943 case 0: 5944 case 1: 5945 case 2: 5946 { 5947 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 5948 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 5949 t0, cpu_gpr[rA(ctx->opcode)]); 5950 tcg_temp_free_i32(t0); 5951 } 5952 break; 5953 default: 5954 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5955 break; 5956 } 5957 #endif /* defined(CONFIG_USER_ONLY) */ 5958 } 5959 5960 /* tlbsx - tlbsx. */ 5961 static void gen_tlbsx_440(DisasContext *ctx) 5962 { 5963 #if defined(CONFIG_USER_ONLY) 5964 GEN_PRIV; 5965 #else 5966 TCGv t0; 5967 5968 CHK_SV; 5969 t0 = tcg_temp_new(); 5970 gen_addr_reg_index(ctx, t0); 5971 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5972 tcg_temp_free(t0); 5973 if (Rc(ctx->opcode)) { 5974 TCGLabel *l1 = gen_new_label(); 5975 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5976 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 5977 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 5978 gen_set_label(l1); 5979 } 5980 #endif /* defined(CONFIG_USER_ONLY) */ 5981 } 5982 5983 /* tlbwe */ 5984 static void gen_tlbwe_440(DisasContext *ctx) 5985 { 5986 #if defined(CONFIG_USER_ONLY) 5987 GEN_PRIV; 5988 #else 5989 CHK_SV; 5990 switch (rB(ctx->opcode)) { 5991 case 0: 5992 case 1: 5993 case 2: 5994 { 5995 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 5996 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 5997 cpu_gpr[rS(ctx->opcode)]); 5998 tcg_temp_free_i32(t0); 5999 } 6000 break; 6001 default: 6002 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6003 break; 6004 } 6005 #endif /* defined(CONFIG_USER_ONLY) */ 6006 } 6007 6008 /* TLB management - PowerPC BookE 2.06 implementation */ 6009 6010 /* tlbre */ 6011 static void gen_tlbre_booke206(DisasContext *ctx) 6012 { 6013 #if defined(CONFIG_USER_ONLY) 6014 GEN_PRIV; 6015 #else 6016 CHK_SV; 6017 gen_helper_booke206_tlbre(cpu_env); 6018 #endif /* defined(CONFIG_USER_ONLY) */ 6019 } 6020 6021 /* tlbsx - tlbsx. */ 6022 static void gen_tlbsx_booke206(DisasContext *ctx) 6023 { 6024 #if defined(CONFIG_USER_ONLY) 6025 GEN_PRIV; 6026 #else 6027 TCGv t0; 6028 6029 CHK_SV; 6030 if (rA(ctx->opcode)) { 6031 t0 = tcg_temp_new(); 6032 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6033 } else { 6034 t0 = tcg_const_tl(0); 6035 } 6036 6037 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6038 gen_helper_booke206_tlbsx(cpu_env, t0); 6039 tcg_temp_free(t0); 6040 #endif /* defined(CONFIG_USER_ONLY) */ 6041 } 6042 6043 /* tlbwe */ 6044 static void gen_tlbwe_booke206(DisasContext *ctx) 6045 { 6046 #if defined(CONFIG_USER_ONLY) 6047 GEN_PRIV; 6048 #else 6049 CHK_SV; 6050 gen_helper_booke206_tlbwe(cpu_env); 6051 #endif /* defined(CONFIG_USER_ONLY) */ 6052 } 6053 6054 static void gen_tlbivax_booke206(DisasContext *ctx) 6055 { 6056 #if defined(CONFIG_USER_ONLY) 6057 GEN_PRIV; 6058 #else 6059 TCGv t0; 6060 6061 CHK_SV; 6062 t0 = tcg_temp_new(); 6063 gen_addr_reg_index(ctx, t0); 6064 gen_helper_booke206_tlbivax(cpu_env, t0); 6065 tcg_temp_free(t0); 6066 #endif /* defined(CONFIG_USER_ONLY) */ 6067 } 6068 6069 static void gen_tlbilx_booke206(DisasContext *ctx) 6070 { 6071 #if defined(CONFIG_USER_ONLY) 6072 GEN_PRIV; 6073 #else 6074 TCGv t0; 6075 6076 CHK_SV; 6077 t0 = tcg_temp_new(); 6078 gen_addr_reg_index(ctx, t0); 6079 6080 switch((ctx->opcode >> 21) & 0x3) { 6081 case 0: 6082 gen_helper_booke206_tlbilx0(cpu_env, t0); 6083 break; 6084 case 1: 6085 gen_helper_booke206_tlbilx1(cpu_env, t0); 6086 break; 6087 case 3: 6088 gen_helper_booke206_tlbilx3(cpu_env, t0); 6089 break; 6090 default: 6091 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6092 break; 6093 } 6094 6095 tcg_temp_free(t0); 6096 #endif /* defined(CONFIG_USER_ONLY) */ 6097 } 6098 6099 6100 /* wrtee */ 6101 static void gen_wrtee(DisasContext *ctx) 6102 { 6103 #if defined(CONFIG_USER_ONLY) 6104 GEN_PRIV; 6105 #else 6106 TCGv t0; 6107 6108 CHK_SV; 6109 t0 = tcg_temp_new(); 6110 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6111 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6112 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6113 tcg_temp_free(t0); 6114 /* Stop translation to have a chance to raise an exception 6115 * if we just set msr_ee to 1 6116 */ 6117 gen_stop_exception(ctx); 6118 #endif /* defined(CONFIG_USER_ONLY) */ 6119 } 6120 6121 /* wrteei */ 6122 static void gen_wrteei(DisasContext *ctx) 6123 { 6124 #if defined(CONFIG_USER_ONLY) 6125 GEN_PRIV; 6126 #else 6127 CHK_SV; 6128 if (ctx->opcode & 0x00008000) { 6129 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6130 /* Stop translation to have a chance to raise an exception */ 6131 gen_stop_exception(ctx); 6132 } else { 6133 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6134 } 6135 #endif /* defined(CONFIG_USER_ONLY) */ 6136 } 6137 6138 /* PowerPC 440 specific instructions */ 6139 6140 /* dlmzb */ 6141 static void gen_dlmzb(DisasContext *ctx) 6142 { 6143 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6144 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6145 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6146 tcg_temp_free_i32(t0); 6147 } 6148 6149 /* mbar replaces eieio on 440 */ 6150 static void gen_mbar(DisasContext *ctx) 6151 { 6152 /* interpreted as no-op */ 6153 } 6154 6155 /* msync replaces sync on 440 */ 6156 static void gen_msync_4xx(DisasContext *ctx) 6157 { 6158 /* interpreted as no-op */ 6159 } 6160 6161 /* icbt */ 6162 static void gen_icbt_440(DisasContext *ctx) 6163 { 6164 /* interpreted as no-op */ 6165 /* XXX: specification say this is treated as a load by the MMU 6166 * but does not generate any exception 6167 */ 6168 } 6169 6170 /* Embedded.Processor Control */ 6171 6172 static void gen_msgclr(DisasContext *ctx) 6173 { 6174 #if defined(CONFIG_USER_ONLY) 6175 GEN_PRIV; 6176 #else 6177 CHK_SV; 6178 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6179 #endif /* defined(CONFIG_USER_ONLY) */ 6180 } 6181 6182 static void gen_msgsnd(DisasContext *ctx) 6183 { 6184 #if defined(CONFIG_USER_ONLY) 6185 GEN_PRIV; 6186 #else 6187 CHK_SV; 6188 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6189 #endif /* defined(CONFIG_USER_ONLY) */ 6190 } 6191 6192 6193 #if defined(TARGET_PPC64) 6194 static void gen_maddld(DisasContext *ctx) 6195 { 6196 TCGv_i64 t1 = tcg_temp_new_i64(); 6197 6198 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6199 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6200 tcg_temp_free_i64(t1); 6201 } 6202 6203 /* maddhd maddhdu */ 6204 static void gen_maddhd_maddhdu(DisasContext *ctx) 6205 { 6206 TCGv_i64 lo = tcg_temp_new_i64(); 6207 TCGv_i64 hi = tcg_temp_new_i64(); 6208 TCGv_i64 t1 = tcg_temp_new_i64(); 6209 6210 if (Rc(ctx->opcode)) { 6211 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6212 cpu_gpr[rB(ctx->opcode)]); 6213 tcg_gen_movi_i64(t1, 0); 6214 } else { 6215 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6216 cpu_gpr[rB(ctx->opcode)]); 6217 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6218 } 6219 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6220 cpu_gpr[rC(ctx->opcode)], t1); 6221 tcg_temp_free_i64(lo); 6222 tcg_temp_free_i64(hi); 6223 tcg_temp_free_i64(t1); 6224 } 6225 #endif /* defined(TARGET_PPC64) */ 6226 6227 static void gen_tbegin(DisasContext *ctx) 6228 { 6229 if (unlikely(!ctx->tm_enabled)) { 6230 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6231 return; 6232 } 6233 gen_helper_tbegin(cpu_env); 6234 } 6235 6236 #define GEN_TM_NOOP(name) \ 6237 static inline void gen_##name(DisasContext *ctx) \ 6238 { \ 6239 if (unlikely(!ctx->tm_enabled)) { \ 6240 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6241 return; \ 6242 } \ 6243 /* Because tbegin always fails in QEMU, these user \ 6244 * space instructions all have a simple implementation: \ 6245 * \ 6246 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6247 * = 0b0 || 0b00 || 0b0 \ 6248 */ \ 6249 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6250 } 6251 6252 GEN_TM_NOOP(tend); 6253 GEN_TM_NOOP(tabort); 6254 GEN_TM_NOOP(tabortwc); 6255 GEN_TM_NOOP(tabortwci); 6256 GEN_TM_NOOP(tabortdc); 6257 GEN_TM_NOOP(tabortdci); 6258 GEN_TM_NOOP(tsr); 6259 static inline void gen_cp_abort(DisasContext *ctx) 6260 { 6261 // Do Nothing 6262 } 6263 6264 #define GEN_CP_PASTE_NOOP(name) \ 6265 static inline void gen_##name(DisasContext *ctx) \ 6266 { \ 6267 /* Generate invalid exception until \ 6268 * we have an implementation of the copy \ 6269 * paste facility \ 6270 */ \ 6271 gen_invalid(ctx); \ 6272 } 6273 6274 GEN_CP_PASTE_NOOP(copy) 6275 GEN_CP_PASTE_NOOP(paste) 6276 6277 static void gen_tcheck(DisasContext *ctx) 6278 { 6279 if (unlikely(!ctx->tm_enabled)) { 6280 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6281 return; 6282 } 6283 /* Because tbegin always fails, the tcheck implementation 6284 * is simple: 6285 * 6286 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6287 * = 0b1 || 0b00 || 0b0 6288 */ 6289 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6290 } 6291 6292 #if defined(CONFIG_USER_ONLY) 6293 #define GEN_TM_PRIV_NOOP(name) \ 6294 static inline void gen_##name(DisasContext *ctx) \ 6295 { \ 6296 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6297 } 6298 6299 #else 6300 6301 #define GEN_TM_PRIV_NOOP(name) \ 6302 static inline void gen_##name(DisasContext *ctx) \ 6303 { \ 6304 CHK_SV; \ 6305 if (unlikely(!ctx->tm_enabled)) { \ 6306 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6307 return; \ 6308 } \ 6309 /* Because tbegin always fails, the implementation is \ 6310 * simple: \ 6311 * \ 6312 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6313 * = 0b0 || 0b00 | 0b0 \ 6314 */ \ 6315 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6316 } 6317 6318 #endif 6319 6320 GEN_TM_PRIV_NOOP(treclaim); 6321 GEN_TM_PRIV_NOOP(trechkpt); 6322 6323 #include "translate/fp-impl.inc.c" 6324 6325 #include "translate/vmx-impl.inc.c" 6326 6327 #include "translate/vsx-impl.inc.c" 6328 6329 #include "translate/dfp-impl.inc.c" 6330 6331 #include "translate/spe-impl.inc.c" 6332 6333 /* Handles lfdp, lxsd, lxssp */ 6334 static void gen_dform39(DisasContext *ctx) 6335 { 6336 switch (ctx->opcode & 0x3) { 6337 case 0: /* lfdp */ 6338 if (ctx->insns_flags2 & PPC2_ISA205) { 6339 return gen_lfdp(ctx); 6340 } 6341 break; 6342 case 2: /* lxsd */ 6343 if (ctx->insns_flags2 & PPC2_ISA300) { 6344 return gen_lxsd(ctx); 6345 } 6346 break; 6347 case 3: /* lxssp */ 6348 if (ctx->insns_flags2 & PPC2_ISA300) { 6349 return gen_lxssp(ctx); 6350 } 6351 break; 6352 } 6353 return gen_invalid(ctx); 6354 } 6355 6356 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 6357 static void gen_dform3D(DisasContext *ctx) 6358 { 6359 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 6360 switch (ctx->opcode & 0x7) { 6361 case 1: /* lxv */ 6362 if (ctx->insns_flags2 & PPC2_ISA300) { 6363 return gen_lxv(ctx); 6364 } 6365 break; 6366 case 5: /* stxv */ 6367 if (ctx->insns_flags2 & PPC2_ISA300) { 6368 return gen_stxv(ctx); 6369 } 6370 break; 6371 } 6372 } else { /* DS-FORM */ 6373 switch (ctx->opcode & 0x3) { 6374 case 0: /* stfdp */ 6375 if (ctx->insns_flags2 & PPC2_ISA205) { 6376 return gen_stfdp(ctx); 6377 } 6378 break; 6379 case 2: /* stxsd */ 6380 if (ctx->insns_flags2 & PPC2_ISA300) { 6381 return gen_stxsd(ctx); 6382 } 6383 break; 6384 case 3: /* stxssp */ 6385 if (ctx->insns_flags2 & PPC2_ISA300) { 6386 return gen_stxssp(ctx); 6387 } 6388 break; 6389 } 6390 } 6391 return gen_invalid(ctx); 6392 } 6393 6394 static opcode_t opcodes[] = { 6395 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6396 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6397 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6398 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6399 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6400 #if defined(TARGET_PPC64) 6401 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6402 #endif 6403 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6404 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6405 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6406 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6407 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6408 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6409 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6410 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6411 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6412 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6413 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6414 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6415 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6416 #if defined(TARGET_PPC64) 6417 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6418 #endif 6419 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6420 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6421 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6422 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6423 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6424 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6425 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6426 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 6427 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6428 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 6429 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6430 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6431 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6432 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6433 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6434 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6435 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6436 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6437 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6438 #if defined(TARGET_PPC64) 6439 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6440 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6441 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6442 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6443 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6444 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6445 #endif 6446 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6447 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6448 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6449 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6450 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6451 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6452 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6453 #if defined(TARGET_PPC64) 6454 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6455 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6456 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6457 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6458 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6459 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6460 PPC_NONE, PPC2_ISA300), 6461 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6462 PPC_NONE, PPC2_ISA300), 6463 #endif 6464 #if defined(TARGET_PPC64) 6465 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6466 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6467 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6468 #endif 6469 /* handles lfdp, lxsd, lxssp */ 6470 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6471 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 6472 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6473 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6474 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6475 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6476 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6477 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6478 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6479 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO), 6480 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6481 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6482 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6483 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6484 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 6485 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 6486 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6487 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6488 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6489 #if defined(TARGET_PPC64) 6490 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 6491 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 6492 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6493 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6494 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6495 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6496 #endif 6497 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6498 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6499 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 6500 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6501 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6502 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6503 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6504 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6505 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6506 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6507 #if defined(TARGET_PPC64) 6508 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6509 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6510 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6511 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6512 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6513 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6514 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6515 #endif 6516 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6517 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6518 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6519 #if defined(TARGET_PPC64) 6520 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6521 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6522 #endif 6523 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6524 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6525 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6526 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6527 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6528 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6529 #if defined(TARGET_PPC64) 6530 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6531 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6532 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 6533 #endif 6534 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6535 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6536 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6537 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6538 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6539 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6540 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6541 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6542 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6543 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6544 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC), 6545 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6546 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6547 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6548 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6549 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6550 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6551 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6552 #if defined(TARGET_PPC64) 6553 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6554 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6555 PPC_SEGMENT_64B), 6556 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6557 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6558 PPC_SEGMENT_64B), 6559 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6560 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6561 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6562 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6563 #endif 6564 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6565 /* XXX Those instructions will need to be handled differently for 6566 * different ISA versions */ 6567 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6568 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6569 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 6570 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 6571 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6572 #if defined(TARGET_PPC64) 6573 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6574 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6575 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 6576 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6577 #endif 6578 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6579 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6580 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6581 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6582 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6583 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6584 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6585 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6586 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6587 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6588 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 6589 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6590 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 6591 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 6592 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 6593 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 6594 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 6595 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 6596 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 6597 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6598 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 6599 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 6600 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 6601 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 6602 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 6603 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 6604 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 6605 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 6606 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 6607 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 6608 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 6609 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 6610 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 6611 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 6612 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 6613 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 6614 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 6615 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 6616 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 6617 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 6618 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 6619 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 6620 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 6621 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 6622 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 6623 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 6624 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 6625 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 6626 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 6627 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6628 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6629 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 6630 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 6631 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6632 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6633 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 6634 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 6635 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 6636 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 6637 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 6638 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 6639 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 6640 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 6641 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 6642 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 6643 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 6644 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 6645 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 6646 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 6647 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 6648 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 6649 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 6650 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 6651 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 6652 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 6653 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 6654 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 6655 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 6656 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 6657 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 6658 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 6659 PPC_NONE, PPC2_BOOKE206), 6660 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 6661 PPC_NONE, PPC2_BOOKE206), 6662 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 6663 PPC_NONE, PPC2_BOOKE206), 6664 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 6665 PPC_NONE, PPC2_BOOKE206), 6666 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 6667 PPC_NONE, PPC2_BOOKE206), 6668 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 6669 PPC_NONE, PPC2_PRCNTL), 6670 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 6671 PPC_NONE, PPC2_PRCNTL), 6672 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 6673 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 6674 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 6675 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 6676 PPC_BOOKE, PPC2_BOOKE206), 6677 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE), 6678 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 6679 PPC_BOOKE, PPC2_BOOKE206), 6680 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 6681 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 6682 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 6683 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 6684 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 6685 #if defined(TARGET_PPC64) 6686 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 6687 PPC2_ISA300), 6688 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6689 #endif 6690 6691 #undef GEN_INT_ARITH_ADD 6692 #undef GEN_INT_ARITH_ADD_CONST 6693 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 6694 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 6695 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 6696 add_ca, compute_ca, compute_ov) \ 6697 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 6698 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 6699 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 6700 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 6701 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 6702 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 6703 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 6704 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 6705 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 6706 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 6707 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 6708 6709 #undef GEN_INT_ARITH_DIVW 6710 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 6711 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 6712 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 6713 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 6714 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 6715 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 6716 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6717 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6718 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6719 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6720 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6721 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6722 6723 #if defined(TARGET_PPC64) 6724 #undef GEN_INT_ARITH_DIVD 6725 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 6726 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6727 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 6728 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 6729 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 6730 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 6731 6732 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6733 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6734 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6735 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6736 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6737 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6738 6739 #undef GEN_INT_ARITH_MUL_HELPER 6740 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 6741 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6742 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 6743 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 6744 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 6745 #endif 6746 6747 #undef GEN_INT_ARITH_SUBF 6748 #undef GEN_INT_ARITH_SUBF_CONST 6749 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 6750 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 6751 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 6752 add_ca, compute_ca, compute_ov) \ 6753 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 6754 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 6755 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 6756 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 6757 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 6758 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 6759 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 6760 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 6761 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 6762 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 6763 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 6764 6765 #undef GEN_LOGICAL1 6766 #undef GEN_LOGICAL2 6767 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 6768 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 6769 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 6770 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 6771 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 6772 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 6773 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 6774 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 6775 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 6776 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 6777 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 6778 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 6779 #if defined(TARGET_PPC64) 6780 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 6781 #endif 6782 6783 #if defined(TARGET_PPC64) 6784 #undef GEN_PPC64_R2 6785 #undef GEN_PPC64_R4 6786 #define GEN_PPC64_R2(name, opc1, opc2) \ 6787 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 6788 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 6789 PPC_64B) 6790 #define GEN_PPC64_R4(name, opc1, opc2) \ 6791 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 6792 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 6793 PPC_64B), \ 6794 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 6795 PPC_64B), \ 6796 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 6797 PPC_64B) 6798 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 6799 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 6800 GEN_PPC64_R4(rldic, 0x1E, 0x04), 6801 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 6802 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 6803 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 6804 #endif 6805 6806 #undef GEN_LD 6807 #undef GEN_LDU 6808 #undef GEN_LDUX 6809 #undef GEN_LDX_E 6810 #undef GEN_LDS 6811 #define GEN_LD(name, ldop, opc, type) \ 6812 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 6813 #define GEN_LDU(name, ldop, opc, type) \ 6814 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 6815 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 6816 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 6817 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 6818 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 6819 #define GEN_LDS(name, ldop, op, type) \ 6820 GEN_LD(name, ldop, op | 0x20, type) \ 6821 GEN_LDU(name, ldop, op | 0x21, type) \ 6822 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 6823 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 6824 6825 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 6826 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 6827 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 6828 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 6829 #if defined(TARGET_PPC64) 6830 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 6831 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 6832 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 6833 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 6834 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 6835 6836 /* HV/P7 and later only */ 6837 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 6838 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 6839 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 6840 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 6841 #endif 6842 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 6843 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 6844 6845 #undef GEN_ST 6846 #undef GEN_STU 6847 #undef GEN_STUX 6848 #undef GEN_STX_E 6849 #undef GEN_STS 6850 #define GEN_ST(name, stop, opc, type) \ 6851 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 6852 #define GEN_STU(name, stop, opc, type) \ 6853 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 6854 #define GEN_STUX(name, stop, opc2, opc3, type) \ 6855 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 6856 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 6857 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 6858 #define GEN_STS(name, stop, op, type) \ 6859 GEN_ST(name, stop, op | 0x20, type) \ 6860 GEN_STU(name, stop, op | 0x21, type) \ 6861 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 6862 GEN_STX(name, stop, 0x17, op | 0x00, type) 6863 6864 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 6865 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 6866 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 6867 #if defined(TARGET_PPC64) 6868 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 6869 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 6870 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 6871 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 6872 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 6873 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 6874 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 6875 #endif 6876 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 6877 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 6878 6879 #undef GEN_CRLOGIC 6880 #define GEN_CRLOGIC(name, tcg_op, opc) \ 6881 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 6882 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 6883 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 6884 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 6885 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 6886 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 6887 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 6888 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 6889 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 6890 6891 #undef GEN_MAC_HANDLER 6892 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 6893 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 6894 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 6895 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 6896 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 6897 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 6898 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 6899 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 6900 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 6901 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 6902 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 6903 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 6904 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 6905 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 6906 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 6907 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 6908 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 6909 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 6910 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 6911 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 6912 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 6913 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 6914 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 6915 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 6916 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 6917 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 6918 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 6919 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 6920 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 6921 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 6922 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 6923 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 6924 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 6925 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 6926 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 6927 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 6928 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 6929 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 6930 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 6931 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 6932 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 6933 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 6934 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 6935 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 6936 6937 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 6938 PPC_NONE, PPC2_TM), 6939 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 6940 PPC_NONE, PPC2_TM), 6941 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 6942 PPC_NONE, PPC2_TM), 6943 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 6944 PPC_NONE, PPC2_TM), 6945 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 6946 PPC_NONE, PPC2_TM), 6947 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 6948 PPC_NONE, PPC2_TM), 6949 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 6950 PPC_NONE, PPC2_TM), 6951 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 6952 PPC_NONE, PPC2_TM), 6953 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 6954 PPC_NONE, PPC2_TM), 6955 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 6956 PPC_NONE, PPC2_TM), 6957 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 6958 PPC_NONE, PPC2_TM), 6959 6960 #include "translate/fp-ops.inc.c" 6961 6962 #include "translate/vmx-ops.inc.c" 6963 6964 #include "translate/vsx-ops.inc.c" 6965 6966 #include "translate/dfp-ops.inc.c" 6967 6968 #include "translate/spe-ops.inc.c" 6969 }; 6970 6971 #include "helper_regs.h" 6972 #include "translate_init.c" 6973 6974 /*****************************************************************************/ 6975 /* Misc PowerPC helpers */ 6976 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 6977 int flags) 6978 { 6979 #define RGPL 4 6980 #define RFPL 4 6981 6982 PowerPCCPU *cpu = POWERPC_CPU(cs); 6983 CPUPPCState *env = &cpu->env; 6984 int i; 6985 6986 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 6987 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 6988 env->nip, env->lr, env->ctr, cpu_read_xer(env), 6989 cs->cpu_index); 6990 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 6991 TARGET_FMT_lx " iidx %d didx %d\n", 6992 env->msr, env->spr[SPR_HID0], 6993 env->hflags, env->immu_idx, env->dmmu_idx); 6994 #if !defined(NO_TIMER_DUMP) 6995 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 6996 #if !defined(CONFIG_USER_ONLY) 6997 " DECR %08" PRIu32 6998 #endif 6999 "\n", 7000 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 7001 #if !defined(CONFIG_USER_ONLY) 7002 , cpu_ppc_load_decr(env) 7003 #endif 7004 ); 7005 #endif 7006 for (i = 0; i < 32; i++) { 7007 if ((i & (RGPL - 1)) == 0) 7008 cpu_fprintf(f, "GPR%02d", i); 7009 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 7010 if ((i & (RGPL - 1)) == (RGPL - 1)) 7011 cpu_fprintf(f, "\n"); 7012 } 7013 cpu_fprintf(f, "CR "); 7014 for (i = 0; i < 8; i++) 7015 cpu_fprintf(f, "%01x", env->crf[i]); 7016 cpu_fprintf(f, " ["); 7017 for (i = 0; i < 8; i++) { 7018 char a = '-'; 7019 if (env->crf[i] & 0x08) 7020 a = 'L'; 7021 else if (env->crf[i] & 0x04) 7022 a = 'G'; 7023 else if (env->crf[i] & 0x02) 7024 a = 'E'; 7025 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 7026 } 7027 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 7028 env->reserve_addr); 7029 for (i = 0; i < 32; i++) { 7030 if ((i & (RFPL - 1)) == 0) 7031 cpu_fprintf(f, "FPR%02d", i); 7032 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i])); 7033 if ((i & (RFPL - 1)) == (RFPL - 1)) 7034 cpu_fprintf(f, "\n"); 7035 } 7036 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 7037 #if !defined(CONFIG_USER_ONLY) 7038 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 7039 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 7040 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 7041 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 7042 7043 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 7044 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 7045 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 7046 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 7047 7048 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 7049 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 7050 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 7051 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 7052 7053 #if defined(TARGET_PPC64) 7054 if (env->excp_model == POWERPC_EXCP_POWER7 || 7055 env->excp_model == POWERPC_EXCP_POWER8) { 7056 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 7057 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 7058 } 7059 #endif 7060 if (env->excp_model == POWERPC_EXCP_BOOKE) { 7061 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 7062 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 7063 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 7064 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 7065 7066 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 7067 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 7068 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 7069 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 7070 7071 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 7072 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 7073 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 7074 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 7075 7076 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 7077 " EPR " TARGET_FMT_lx "\n", 7078 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 7079 env->spr[SPR_BOOKE_EPR]); 7080 7081 /* FSL-specific */ 7082 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 7083 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 7084 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 7085 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 7086 7087 /* 7088 * IVORs are left out as they are large and do not change often -- 7089 * they can be read with "p $ivor0", "p $ivor1", etc. 7090 */ 7091 } 7092 7093 #if defined(TARGET_PPC64) 7094 if (env->flags & POWERPC_FLAG_CFAR) { 7095 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 7096 } 7097 #endif 7098 7099 if (env->spr_cb[SPR_LPCR].name) 7100 cpu_fprintf(f, " LPCR " TARGET_FMT_lx "\n", env->spr[SPR_LPCR]); 7101 7102 switch (POWERPC_MMU_VER(env->mmu_model)) { 7103 case POWERPC_MMU_32B: 7104 case POWERPC_MMU_601: 7105 case POWERPC_MMU_SOFT_6xx: 7106 case POWERPC_MMU_SOFT_74xx: 7107 #if defined(TARGET_PPC64) 7108 case POWERPC_MMU_VER_64B: 7109 case POWERPC_MMU_VER_2_03: 7110 case POWERPC_MMU_VER_2_06: 7111 case POWERPC_MMU_VER_2_07: 7112 case POWERPC_MMU_VER_3_00: 7113 #endif 7114 if (env->spr_cb[SPR_SDR1].name) { /* SDR1 Exists */ 7115 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " ", env->spr[SPR_SDR1]); 7116 } 7117 cpu_fprintf(f, " DAR " TARGET_FMT_lx " DSISR " TARGET_FMT_lx "\n", 7118 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 7119 break; 7120 case POWERPC_MMU_BOOKE206: 7121 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 7122 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 7123 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 7124 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 7125 7126 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 7127 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 7128 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 7129 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 7130 7131 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 7132 " TLB1CFG " TARGET_FMT_lx "\n", 7133 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 7134 env->spr[SPR_BOOKE_TLB1CFG]); 7135 break; 7136 default: 7137 break; 7138 } 7139 #endif 7140 7141 #undef RGPL 7142 #undef RFPL 7143 } 7144 7145 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 7146 fprintf_function cpu_fprintf, int flags) 7147 { 7148 #if defined(DO_PPC_STATISTICS) 7149 PowerPCCPU *cpu = POWERPC_CPU(cs); 7150 opc_handler_t **t1, **t2, **t3, *handler; 7151 int op1, op2, op3; 7152 7153 t1 = cpu->env.opcodes; 7154 for (op1 = 0; op1 < 64; op1++) { 7155 handler = t1[op1]; 7156 if (is_indirect_opcode(handler)) { 7157 t2 = ind_table(handler); 7158 for (op2 = 0; op2 < 32; op2++) { 7159 handler = t2[op2]; 7160 if (is_indirect_opcode(handler)) { 7161 t3 = ind_table(handler); 7162 for (op3 = 0; op3 < 32; op3++) { 7163 handler = t3[op3]; 7164 if (handler->count == 0) 7165 continue; 7166 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 7167 "%016" PRIx64 " %" PRId64 "\n", 7168 op1, op2, op3, op1, (op3 << 5) | op2, 7169 handler->oname, 7170 handler->count, handler->count); 7171 } 7172 } else { 7173 if (handler->count == 0) 7174 continue; 7175 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 7176 "%016" PRIx64 " %" PRId64 "\n", 7177 op1, op2, op1, op2, handler->oname, 7178 handler->count, handler->count); 7179 } 7180 } 7181 } else { 7182 if (handler->count == 0) 7183 continue; 7184 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 7185 " %" PRId64 "\n", 7186 op1, op1, handler->oname, 7187 handler->count, handler->count); 7188 } 7189 } 7190 #endif 7191 } 7192 7193 /*****************************************************************************/ 7194 void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb) 7195 { 7196 CPUPPCState *env = cs->env_ptr; 7197 DisasContext ctx, *ctxp = &ctx; 7198 opc_handler_t **table, *handler; 7199 target_ulong pc_start; 7200 int num_insns; 7201 int max_insns; 7202 7203 pc_start = tb->pc; 7204 ctx.nip = pc_start; 7205 ctx.tb = tb; 7206 ctx.exception = POWERPC_EXCP_NONE; 7207 ctx.spr_cb = env->spr_cb; 7208 ctx.pr = msr_pr; 7209 ctx.mem_idx = env->dmmu_idx; 7210 ctx.dr = msr_dr; 7211 #if !defined(CONFIG_USER_ONLY) 7212 ctx.hv = msr_hv || !env->has_hv_mode; 7213 #endif 7214 ctx.insns_flags = env->insns_flags; 7215 ctx.insns_flags2 = env->insns_flags2; 7216 ctx.access_type = -1; 7217 ctx.need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7218 ctx.le_mode = !!(env->hflags & (1 << MSR_LE)); 7219 ctx.default_tcg_memop_mask = ctx.le_mode ? MO_LE : MO_BE; 7220 #if defined(TARGET_PPC64) 7221 ctx.sf_mode = msr_is_64bit(env, env->msr); 7222 ctx.has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7223 #endif 7224 if (env->mmu_model == POWERPC_MMU_32B || 7225 env->mmu_model == POWERPC_MMU_601 || 7226 (env->mmu_model & POWERPC_MMU_64B)) 7227 ctx.lazy_tlb_flush = true; 7228 7229 ctx.fpu_enabled = !!msr_fp; 7230 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7231 ctx.spe_enabled = !!msr_spe; 7232 else 7233 ctx.spe_enabled = false; 7234 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7235 ctx.altivec_enabled = !!msr_vr; 7236 else 7237 ctx.altivec_enabled = false; 7238 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7239 ctx.vsx_enabled = !!msr_vsx; 7240 } else { 7241 ctx.vsx_enabled = false; 7242 } 7243 #if defined(TARGET_PPC64) 7244 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7245 ctx.tm_enabled = !!msr_tm; 7246 } else { 7247 ctx.tm_enabled = false; 7248 } 7249 #endif 7250 ctx.gtse = !!(env->spr[SPR_LPCR] & LPCR_GTSE); 7251 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7252 ctx.singlestep_enabled = CPU_SINGLE_STEP; 7253 else 7254 ctx.singlestep_enabled = 0; 7255 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7256 ctx.singlestep_enabled |= CPU_BRANCH_STEP; 7257 if (unlikely(cs->singlestep_enabled)) { 7258 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7259 } 7260 #if defined (DO_SINGLE_STEP) && 0 7261 /* Single step trace mode */ 7262 msr_se = 1; 7263 #endif 7264 num_insns = 0; 7265 max_insns = tb_cflags(tb) & CF_COUNT_MASK; 7266 if (max_insns == 0) { 7267 max_insns = CF_COUNT_MASK; 7268 } 7269 if (max_insns > TCG_MAX_INSNS) { 7270 max_insns = TCG_MAX_INSNS; 7271 } 7272 7273 gen_tb_start(tb); 7274 tcg_clear_temp_count(); 7275 /* Set env in case of segfault during code fetch */ 7276 while (ctx.exception == POWERPC_EXCP_NONE && !tcg_op_buf_full()) { 7277 tcg_gen_insn_start(ctx.nip); 7278 num_insns++; 7279 7280 if (unlikely(cpu_breakpoint_test(cs, ctx.nip, BP_ANY))) { 7281 gen_debug_exception(ctxp); 7282 /* The address covered by the breakpoint must be included in 7283 [tb->pc, tb->pc + tb->size) in order to for it to be 7284 properly cleared -- thus we increment the PC here so that 7285 the logic setting tb->size below does the right thing. */ 7286 ctx.nip += 4; 7287 break; 7288 } 7289 7290 LOG_DISAS("----------------\n"); 7291 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7292 ctx.nip, ctx.mem_idx, (int)msr_ir); 7293 if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) 7294 gen_io_start(); 7295 if (unlikely(need_byteswap(&ctx))) { 7296 ctx.opcode = bswap32(cpu_ldl_code(env, ctx.nip)); 7297 } else { 7298 ctx.opcode = cpu_ldl_code(env, ctx.nip); 7299 } 7300 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7301 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode), 7302 opc3(ctx.opcode), opc4(ctx.opcode), 7303 ctx.le_mode ? "little" : "big"); 7304 ctx.nip += 4; 7305 table = env->opcodes; 7306 handler = table[opc1(ctx.opcode)]; 7307 if (is_indirect_opcode(handler)) { 7308 table = ind_table(handler); 7309 handler = table[opc2(ctx.opcode)]; 7310 if (is_indirect_opcode(handler)) { 7311 table = ind_table(handler); 7312 handler = table[opc3(ctx.opcode)]; 7313 if (is_indirect_opcode(handler)) { 7314 table = ind_table(handler); 7315 handler = table[opc4(ctx.opcode)]; 7316 } 7317 } 7318 } 7319 /* Is opcode *REALLY* valid ? */ 7320 if (unlikely(handler->handler == &gen_invalid)) { 7321 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7322 "%02x - %02x - %02x - %02x (%08x) " 7323 TARGET_FMT_lx " %d\n", 7324 opc1(ctx.opcode), opc2(ctx.opcode), 7325 opc3(ctx.opcode), opc4(ctx.opcode), 7326 ctx.opcode, ctx.nip - 4, (int)msr_ir); 7327 } else { 7328 uint32_t inval; 7329 7330 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) && Rc(ctx.opcode))) { 7331 inval = handler->inval2; 7332 } else { 7333 inval = handler->inval1; 7334 } 7335 7336 if (unlikely((ctx.opcode & inval) != 0)) { 7337 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7338 "%02x - %02x - %02x - %02x (%08x) " 7339 TARGET_FMT_lx "\n", ctx.opcode & inval, 7340 opc1(ctx.opcode), opc2(ctx.opcode), 7341 opc3(ctx.opcode), opc4(ctx.opcode), 7342 ctx.opcode, ctx.nip - 4); 7343 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL); 7344 break; 7345 } 7346 } 7347 (*(handler->handler))(&ctx); 7348 #if defined(DO_PPC_STATISTICS) 7349 handler->count++; 7350 #endif 7351 /* Check trace mode exceptions */ 7352 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP && 7353 (ctx.nip <= 0x100 || ctx.nip > 0xF00) && 7354 ctx.exception != POWERPC_SYSCALL && 7355 ctx.exception != POWERPC_EXCP_TRAP && 7356 ctx.exception != POWERPC_EXCP_BRANCH)) { 7357 gen_exception_nip(ctxp, POWERPC_EXCP_TRACE, ctx.nip); 7358 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) || 7359 (cs->singlestep_enabled) || 7360 singlestep || 7361 num_insns >= max_insns)) { 7362 /* if we reach a page boundary or are single stepping, stop 7363 * generation 7364 */ 7365 break; 7366 } 7367 if (tcg_check_temp_count()) { 7368 fprintf(stderr, "Opcode %02x %02x %02x %02x (%08x) leaked " 7369 "temporaries\n", opc1(ctx.opcode), opc2(ctx.opcode), 7370 opc3(ctx.opcode), opc4(ctx.opcode), ctx.opcode); 7371 exit(1); 7372 } 7373 } 7374 if (tb_cflags(tb) & CF_LAST_IO) 7375 gen_io_end(); 7376 if (ctx.exception == POWERPC_EXCP_NONE) { 7377 gen_goto_tb(&ctx, 0, ctx.nip); 7378 } else if (ctx.exception != POWERPC_EXCP_BRANCH) { 7379 if (unlikely(cs->singlestep_enabled)) { 7380 gen_debug_exception(ctxp); 7381 } 7382 /* Generate the return instruction */ 7383 tcg_gen_exit_tb(0); 7384 } 7385 gen_tb_end(tb, num_insns); 7386 7387 tb->size = ctx.nip - pc_start; 7388 tb->icount = num_insns; 7389 7390 #if defined(DEBUG_DISAS) 7391 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM) 7392 && qemu_log_in_addr_range(pc_start)) { 7393 qemu_log_lock(); 7394 qemu_log("IN: %s\n", lookup_symbol(pc_start)); 7395 log_target_disas(cs, pc_start, ctx.nip - pc_start); 7396 qemu_log("\n"); 7397 qemu_log_unlock(); 7398 } 7399 #endif 7400 } 7401 7402 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7403 target_ulong *data) 7404 { 7405 env->nip = data[0]; 7406 } 7407