1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "qemu/host-utils.h" 28 #include "exec/cpu_ldst.h" 29 30 #include "exec/helper-proto.h" 31 #include "exec/helper-gen.h" 32 33 #include "trace-tcg.h" 34 #include "exec/translator.h" 35 #include "exec/log.h" 36 37 38 #define CPU_SINGLE_STEP 0x1 39 #define CPU_BRANCH_STEP 0x2 40 #define GDBSTUB_SINGLE_STEP 0x4 41 42 /* Include definitions for instructions classes and implementations flags */ 43 //#define PPC_DEBUG_DISAS 44 //#define DO_PPC_STATISTICS 45 46 #ifdef PPC_DEBUG_DISAS 47 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 48 #else 49 # define LOG_DISAS(...) do { } while (0) 50 #endif 51 /*****************************************************************************/ 52 /* Code translation helpers */ 53 54 /* global register indexes */ 55 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 56 + 10*4 + 22*5 /* SPE GPRh */ 57 + 10*4 + 22*5 /* FPR */ 58 + 2*(10*6 + 22*7) /* AVRh, AVRl */ 59 + 10*5 + 22*6 /* VSR */ 60 + 8*5 /* CRF */]; 61 static TCGv cpu_gpr[32]; 62 static TCGv cpu_gprh[32]; 63 static TCGv_i64 cpu_fpr[32]; 64 static TCGv_i64 cpu_avrh[32], cpu_avrl[32]; 65 static TCGv_i64 cpu_vsr[32]; 66 static TCGv_i32 cpu_crf[8]; 67 static TCGv cpu_nip; 68 static TCGv cpu_msr; 69 static TCGv cpu_ctr; 70 static TCGv cpu_lr; 71 #if defined(TARGET_PPC64) 72 static TCGv cpu_cfar; 73 #endif 74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 75 static TCGv cpu_reserve; 76 static TCGv cpu_reserve_val; 77 static TCGv cpu_fpscr; 78 static TCGv_i32 cpu_access_type; 79 80 #include "exec/gen-icount.h" 81 82 void ppc_translate_init(void) 83 { 84 int i; 85 char* p; 86 size_t cpu_reg_names_size; 87 88 p = cpu_reg_names; 89 cpu_reg_names_size = sizeof(cpu_reg_names); 90 91 for (i = 0; i < 8; i++) { 92 snprintf(p, cpu_reg_names_size, "crf%d", i); 93 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 94 offsetof(CPUPPCState, crf[i]), p); 95 p += 5; 96 cpu_reg_names_size -= 5; 97 } 98 99 for (i = 0; i < 32; i++) { 100 snprintf(p, cpu_reg_names_size, "r%d", i); 101 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 102 offsetof(CPUPPCState, gpr[i]), p); 103 p += (i < 10) ? 3 : 4; 104 cpu_reg_names_size -= (i < 10) ? 3 : 4; 105 snprintf(p, cpu_reg_names_size, "r%dH", i); 106 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 107 offsetof(CPUPPCState, gprh[i]), p); 108 p += (i < 10) ? 4 : 5; 109 cpu_reg_names_size -= (i < 10) ? 4 : 5; 110 111 snprintf(p, cpu_reg_names_size, "fp%d", i); 112 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env, 113 offsetof(CPUPPCState, fpr[i]), p); 114 p += (i < 10) ? 4 : 5; 115 cpu_reg_names_size -= (i < 10) ? 4 : 5; 116 117 snprintf(p, cpu_reg_names_size, "avr%dH", i); 118 #ifdef HOST_WORDS_BIGENDIAN 119 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 120 offsetof(CPUPPCState, avr[i].u64[0]), p); 121 #else 122 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 123 offsetof(CPUPPCState, avr[i].u64[1]), p); 124 #endif 125 p += (i < 10) ? 6 : 7; 126 cpu_reg_names_size -= (i < 10) ? 6 : 7; 127 128 snprintf(p, cpu_reg_names_size, "avr%dL", i); 129 #ifdef HOST_WORDS_BIGENDIAN 130 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 131 offsetof(CPUPPCState, avr[i].u64[1]), p); 132 #else 133 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 134 offsetof(CPUPPCState, avr[i].u64[0]), p); 135 #endif 136 p += (i < 10) ? 6 : 7; 137 cpu_reg_names_size -= (i < 10) ? 6 : 7; 138 snprintf(p, cpu_reg_names_size, "vsr%d", i); 139 cpu_vsr[i] = tcg_global_mem_new_i64(cpu_env, 140 offsetof(CPUPPCState, vsr[i]), p); 141 p += (i < 10) ? 5 : 6; 142 cpu_reg_names_size -= (i < 10) ? 5 : 6; 143 } 144 145 cpu_nip = tcg_global_mem_new(cpu_env, 146 offsetof(CPUPPCState, nip), "nip"); 147 148 cpu_msr = tcg_global_mem_new(cpu_env, 149 offsetof(CPUPPCState, msr), "msr"); 150 151 cpu_ctr = tcg_global_mem_new(cpu_env, 152 offsetof(CPUPPCState, ctr), "ctr"); 153 154 cpu_lr = tcg_global_mem_new(cpu_env, 155 offsetof(CPUPPCState, lr), "lr"); 156 157 #if defined(TARGET_PPC64) 158 cpu_cfar = tcg_global_mem_new(cpu_env, 159 offsetof(CPUPPCState, cfar), "cfar"); 160 #endif 161 162 cpu_xer = tcg_global_mem_new(cpu_env, 163 offsetof(CPUPPCState, xer), "xer"); 164 cpu_so = tcg_global_mem_new(cpu_env, 165 offsetof(CPUPPCState, so), "SO"); 166 cpu_ov = tcg_global_mem_new(cpu_env, 167 offsetof(CPUPPCState, ov), "OV"); 168 cpu_ca = tcg_global_mem_new(cpu_env, 169 offsetof(CPUPPCState, ca), "CA"); 170 cpu_ov32 = tcg_global_mem_new(cpu_env, 171 offsetof(CPUPPCState, ov32), "OV32"); 172 cpu_ca32 = tcg_global_mem_new(cpu_env, 173 offsetof(CPUPPCState, ca32), "CA32"); 174 175 cpu_reserve = tcg_global_mem_new(cpu_env, 176 offsetof(CPUPPCState, reserve_addr), 177 "reserve_addr"); 178 cpu_reserve_val = tcg_global_mem_new(cpu_env, 179 offsetof(CPUPPCState, reserve_val), 180 "reserve_val"); 181 182 cpu_fpscr = tcg_global_mem_new(cpu_env, 183 offsetof(CPUPPCState, fpscr), "fpscr"); 184 185 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 186 offsetof(CPUPPCState, access_type), "access_type"); 187 } 188 189 /* internal defines */ 190 struct DisasContext { 191 DisasContextBase base; 192 uint32_t opcode; 193 uint32_t exception; 194 /* Routine used to access memory */ 195 bool pr, hv, dr, le_mode; 196 bool lazy_tlb_flush; 197 bool need_access_type; 198 int mem_idx; 199 int access_type; 200 /* Translation flags */ 201 TCGMemOp default_tcg_memop_mask; 202 #if defined(TARGET_PPC64) 203 bool sf_mode; 204 bool has_cfar; 205 #endif 206 bool fpu_enabled; 207 bool altivec_enabled; 208 bool vsx_enabled; 209 bool spe_enabled; 210 bool tm_enabled; 211 bool gtse; 212 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 213 int singlestep_enabled; 214 uint64_t insns_flags; 215 uint64_t insns_flags2; 216 }; 217 218 /* Return true iff byteswap is needed in a scalar memop */ 219 static inline bool need_byteswap(const DisasContext *ctx) 220 { 221 #if defined(TARGET_WORDS_BIGENDIAN) 222 return ctx->le_mode; 223 #else 224 return !ctx->le_mode; 225 #endif 226 } 227 228 /* True when active word size < size of target_long. */ 229 #ifdef TARGET_PPC64 230 # define NARROW_MODE(C) (!(C)->sf_mode) 231 #else 232 # define NARROW_MODE(C) 0 233 #endif 234 235 struct opc_handler_t { 236 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 237 uint32_t inval1; 238 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 239 uint32_t inval2; 240 /* instruction type */ 241 uint64_t type; 242 /* extended instruction type */ 243 uint64_t type2; 244 /* handler */ 245 void (*handler)(DisasContext *ctx); 246 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 247 const char *oname; 248 #endif 249 #if defined(DO_PPC_STATISTICS) 250 uint64_t count; 251 #endif 252 }; 253 254 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 255 { 256 if (ctx->need_access_type && ctx->access_type != access_type) { 257 tcg_gen_movi_i32(cpu_access_type, access_type); 258 ctx->access_type = access_type; 259 } 260 } 261 262 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 263 { 264 if (NARROW_MODE(ctx)) { 265 nip = (uint32_t)nip; 266 } 267 tcg_gen_movi_tl(cpu_nip, nip); 268 } 269 270 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 271 { 272 TCGv_i32 t0, t1; 273 274 /* These are all synchronous exceptions, we set the PC back to 275 * the faulting instruction 276 */ 277 if (ctx->exception == POWERPC_EXCP_NONE) { 278 gen_update_nip(ctx, ctx->base.pc_next - 4); 279 } 280 t0 = tcg_const_i32(excp); 281 t1 = tcg_const_i32(error); 282 gen_helper_raise_exception_err(cpu_env, t0, t1); 283 tcg_temp_free_i32(t0); 284 tcg_temp_free_i32(t1); 285 ctx->exception = (excp); 286 } 287 288 static void gen_exception(DisasContext *ctx, uint32_t excp) 289 { 290 TCGv_i32 t0; 291 292 /* These are all synchronous exceptions, we set the PC back to 293 * the faulting instruction 294 */ 295 if (ctx->exception == POWERPC_EXCP_NONE) { 296 gen_update_nip(ctx, ctx->base.pc_next - 4); 297 } 298 t0 = tcg_const_i32(excp); 299 gen_helper_raise_exception(cpu_env, t0); 300 tcg_temp_free_i32(t0); 301 ctx->exception = (excp); 302 } 303 304 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 305 target_ulong nip) 306 { 307 TCGv_i32 t0; 308 309 gen_update_nip(ctx, nip); 310 t0 = tcg_const_i32(excp); 311 gen_helper_raise_exception(cpu_env, t0); 312 tcg_temp_free_i32(t0); 313 ctx->exception = (excp); 314 } 315 316 static void gen_debug_exception(DisasContext *ctx) 317 { 318 TCGv_i32 t0; 319 320 /* These are all synchronous exceptions, we set the PC back to 321 * the faulting instruction 322 */ 323 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 324 (ctx->exception != POWERPC_EXCP_SYNC)) { 325 gen_update_nip(ctx, ctx->base.pc_next); 326 } 327 t0 = tcg_const_i32(EXCP_DEBUG); 328 gen_helper_raise_exception(cpu_env, t0); 329 tcg_temp_free_i32(t0); 330 } 331 332 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 333 { 334 /* Will be converted to program check if needed */ 335 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 336 } 337 338 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 339 { 340 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 341 } 342 343 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 344 { 345 /* Will be converted to program check if needed */ 346 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 347 } 348 349 /* Stop translation */ 350 static inline void gen_stop_exception(DisasContext *ctx) 351 { 352 gen_update_nip(ctx, ctx->base.pc_next); 353 ctx->exception = POWERPC_EXCP_STOP; 354 } 355 356 #ifndef CONFIG_USER_ONLY 357 /* No need to update nip here, as execution flow will change */ 358 static inline void gen_sync_exception(DisasContext *ctx) 359 { 360 ctx->exception = POWERPC_EXCP_SYNC; 361 } 362 #endif 363 364 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 365 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 366 367 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 368 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 369 370 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 371 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 372 373 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 374 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 375 376 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 377 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 378 379 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 380 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 381 382 typedef struct opcode_t { 383 unsigned char opc1, opc2, opc3, opc4; 384 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 385 unsigned char pad[4]; 386 #endif 387 opc_handler_t handler; 388 const char *oname; 389 } opcode_t; 390 391 /* Helpers for priv. check */ 392 #define GEN_PRIV \ 393 do { \ 394 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 395 } while (0) 396 397 #if defined(CONFIG_USER_ONLY) 398 #define CHK_HV GEN_PRIV 399 #define CHK_SV GEN_PRIV 400 #define CHK_HVRM GEN_PRIV 401 #else 402 #define CHK_HV \ 403 do { \ 404 if (unlikely(ctx->pr || !ctx->hv)) { \ 405 GEN_PRIV; \ 406 } \ 407 } while (0) 408 #define CHK_SV \ 409 do { \ 410 if (unlikely(ctx->pr)) { \ 411 GEN_PRIV; \ 412 } \ 413 } while (0) 414 #define CHK_HVRM \ 415 do { \ 416 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 417 GEN_PRIV; \ 418 } \ 419 } while (0) 420 #endif 421 422 #define CHK_NONE 423 424 /*****************************************************************************/ 425 /* PowerPC instructions table */ 426 427 #if defined(DO_PPC_STATISTICS) 428 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 429 { \ 430 .opc1 = op1, \ 431 .opc2 = op2, \ 432 .opc3 = op3, \ 433 .opc4 = 0xff, \ 434 .handler = { \ 435 .inval1 = invl, \ 436 .type = _typ, \ 437 .type2 = _typ2, \ 438 .handler = &gen_##name, \ 439 .oname = stringify(name), \ 440 }, \ 441 .oname = stringify(name), \ 442 } 443 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 444 { \ 445 .opc1 = op1, \ 446 .opc2 = op2, \ 447 .opc3 = op3, \ 448 .opc4 = 0xff, \ 449 .handler = { \ 450 .inval1 = invl1, \ 451 .inval2 = invl2, \ 452 .type = _typ, \ 453 .type2 = _typ2, \ 454 .handler = &gen_##name, \ 455 .oname = stringify(name), \ 456 }, \ 457 .oname = stringify(name), \ 458 } 459 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 460 { \ 461 .opc1 = op1, \ 462 .opc2 = op2, \ 463 .opc3 = op3, \ 464 .opc4 = 0xff, \ 465 .handler = { \ 466 .inval1 = invl, \ 467 .type = _typ, \ 468 .type2 = _typ2, \ 469 .handler = &gen_##name, \ 470 .oname = onam, \ 471 }, \ 472 .oname = onam, \ 473 } 474 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 475 { \ 476 .opc1 = op1, \ 477 .opc2 = op2, \ 478 .opc3 = op3, \ 479 .opc4 = op4, \ 480 .handler = { \ 481 .inval1 = invl, \ 482 .type = _typ, \ 483 .type2 = _typ2, \ 484 .handler = &gen_##name, \ 485 .oname = stringify(name), \ 486 }, \ 487 .oname = stringify(name), \ 488 } 489 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 490 { \ 491 .opc1 = op1, \ 492 .opc2 = op2, \ 493 .opc3 = op3, \ 494 .opc4 = op4, \ 495 .handler = { \ 496 .inval1 = invl, \ 497 .type = _typ, \ 498 .type2 = _typ2, \ 499 .handler = &gen_##name, \ 500 .oname = onam, \ 501 }, \ 502 .oname = onam, \ 503 } 504 #else 505 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 506 { \ 507 .opc1 = op1, \ 508 .opc2 = op2, \ 509 .opc3 = op3, \ 510 .opc4 = 0xff, \ 511 .handler = { \ 512 .inval1 = invl, \ 513 .type = _typ, \ 514 .type2 = _typ2, \ 515 .handler = &gen_##name, \ 516 }, \ 517 .oname = stringify(name), \ 518 } 519 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 520 { \ 521 .opc1 = op1, \ 522 .opc2 = op2, \ 523 .opc3 = op3, \ 524 .opc4 = 0xff, \ 525 .handler = { \ 526 .inval1 = invl1, \ 527 .inval2 = invl2, \ 528 .type = _typ, \ 529 .type2 = _typ2, \ 530 .handler = &gen_##name, \ 531 }, \ 532 .oname = stringify(name), \ 533 } 534 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 535 { \ 536 .opc1 = op1, \ 537 .opc2 = op2, \ 538 .opc3 = op3, \ 539 .opc4 = 0xff, \ 540 .handler = { \ 541 .inval1 = invl, \ 542 .type = _typ, \ 543 .type2 = _typ2, \ 544 .handler = &gen_##name, \ 545 }, \ 546 .oname = onam, \ 547 } 548 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 549 { \ 550 .opc1 = op1, \ 551 .opc2 = op2, \ 552 .opc3 = op3, \ 553 .opc4 = op4, \ 554 .handler = { \ 555 .inval1 = invl, \ 556 .type = _typ, \ 557 .type2 = _typ2, \ 558 .handler = &gen_##name, \ 559 }, \ 560 .oname = stringify(name), \ 561 } 562 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 563 { \ 564 .opc1 = op1, \ 565 .opc2 = op2, \ 566 .opc3 = op3, \ 567 .opc4 = op4, \ 568 .handler = { \ 569 .inval1 = invl, \ 570 .type = _typ, \ 571 .type2 = _typ2, \ 572 .handler = &gen_##name, \ 573 }, \ 574 .oname = onam, \ 575 } 576 #endif 577 578 /* SPR load/store helpers */ 579 static inline void gen_load_spr(TCGv t, int reg) 580 { 581 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 582 } 583 584 static inline void gen_store_spr(int reg, TCGv t) 585 { 586 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 587 } 588 589 /* Invalid instruction */ 590 static void gen_invalid(DisasContext *ctx) 591 { 592 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 593 } 594 595 static opc_handler_t invalid_handler = { 596 .inval1 = 0xFFFFFFFF, 597 .inval2 = 0xFFFFFFFF, 598 .type = PPC_NONE, 599 .type2 = PPC_NONE, 600 .handler = gen_invalid, 601 }; 602 603 /*** Integer comparison ***/ 604 605 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 606 { 607 TCGv t0 = tcg_temp_new(); 608 TCGv t1 = tcg_temp_new(); 609 TCGv_i32 t = tcg_temp_new_i32(); 610 611 tcg_gen_movi_tl(t0, CRF_EQ); 612 tcg_gen_movi_tl(t1, CRF_LT); 613 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), t0, arg0, arg1, t1, t0); 614 tcg_gen_movi_tl(t1, CRF_GT); 615 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), t0, arg0, arg1, t1, t0); 616 617 tcg_gen_trunc_tl_i32(t, t0); 618 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 619 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 620 621 tcg_temp_free(t0); 622 tcg_temp_free(t1); 623 tcg_temp_free_i32(t); 624 } 625 626 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 627 { 628 TCGv t0 = tcg_const_tl(arg1); 629 gen_op_cmp(arg0, t0, s, crf); 630 tcg_temp_free(t0); 631 } 632 633 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 634 { 635 TCGv t0, t1; 636 t0 = tcg_temp_new(); 637 t1 = tcg_temp_new(); 638 if (s) { 639 tcg_gen_ext32s_tl(t0, arg0); 640 tcg_gen_ext32s_tl(t1, arg1); 641 } else { 642 tcg_gen_ext32u_tl(t0, arg0); 643 tcg_gen_ext32u_tl(t1, arg1); 644 } 645 gen_op_cmp(t0, t1, s, crf); 646 tcg_temp_free(t1); 647 tcg_temp_free(t0); 648 } 649 650 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 651 { 652 TCGv t0 = tcg_const_tl(arg1); 653 gen_op_cmp32(arg0, t0, s, crf); 654 tcg_temp_free(t0); 655 } 656 657 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 658 { 659 if (NARROW_MODE(ctx)) { 660 gen_op_cmpi32(reg, 0, 1, 0); 661 } else { 662 gen_op_cmpi(reg, 0, 1, 0); 663 } 664 } 665 666 /* cmp */ 667 static void gen_cmp(DisasContext *ctx) 668 { 669 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 670 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 671 1, crfD(ctx->opcode)); 672 } else { 673 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 674 1, crfD(ctx->opcode)); 675 } 676 } 677 678 /* cmpi */ 679 static void gen_cmpi(DisasContext *ctx) 680 { 681 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 682 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 683 1, crfD(ctx->opcode)); 684 } else { 685 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 686 1, crfD(ctx->opcode)); 687 } 688 } 689 690 /* cmpl */ 691 static void gen_cmpl(DisasContext *ctx) 692 { 693 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 694 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 695 0, crfD(ctx->opcode)); 696 } else { 697 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 698 0, crfD(ctx->opcode)); 699 } 700 } 701 702 /* cmpli */ 703 static void gen_cmpli(DisasContext *ctx) 704 { 705 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 706 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 707 0, crfD(ctx->opcode)); 708 } else { 709 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 710 0, crfD(ctx->opcode)); 711 } 712 } 713 714 /* cmprb - range comparison: isupper, isaplha, islower*/ 715 static void gen_cmprb(DisasContext *ctx) 716 { 717 TCGv_i32 src1 = tcg_temp_new_i32(); 718 TCGv_i32 src2 = tcg_temp_new_i32(); 719 TCGv_i32 src2lo = tcg_temp_new_i32(); 720 TCGv_i32 src2hi = tcg_temp_new_i32(); 721 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 722 723 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 724 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 725 726 tcg_gen_andi_i32(src1, src1, 0xFF); 727 tcg_gen_ext8u_i32(src2lo, src2); 728 tcg_gen_shri_i32(src2, src2, 8); 729 tcg_gen_ext8u_i32(src2hi, src2); 730 731 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 732 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 733 tcg_gen_and_i32(crf, src2lo, src2hi); 734 735 if (ctx->opcode & 0x00200000) { 736 tcg_gen_shri_i32(src2, src2, 8); 737 tcg_gen_ext8u_i32(src2lo, src2); 738 tcg_gen_shri_i32(src2, src2, 8); 739 tcg_gen_ext8u_i32(src2hi, src2); 740 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 741 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 742 tcg_gen_and_i32(src2lo, src2lo, src2hi); 743 tcg_gen_or_i32(crf, crf, src2lo); 744 } 745 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 746 tcg_temp_free_i32(src1); 747 tcg_temp_free_i32(src2); 748 tcg_temp_free_i32(src2lo); 749 tcg_temp_free_i32(src2hi); 750 } 751 752 #if defined(TARGET_PPC64) 753 /* cmpeqb */ 754 static void gen_cmpeqb(DisasContext *ctx) 755 { 756 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 757 cpu_gpr[rB(ctx->opcode)]); 758 } 759 #endif 760 761 /* isel (PowerPC 2.03 specification) */ 762 static void gen_isel(DisasContext *ctx) 763 { 764 uint32_t bi = rC(ctx->opcode); 765 uint32_t mask = 0x08 >> (bi & 0x03); 766 TCGv t0 = tcg_temp_new(); 767 TCGv zr; 768 769 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 770 tcg_gen_andi_tl(t0, t0, mask); 771 772 zr = tcg_const_tl(0); 773 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 774 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 775 cpu_gpr[rB(ctx->opcode)]); 776 tcg_temp_free(zr); 777 tcg_temp_free(t0); 778 } 779 780 /* cmpb: PowerPC 2.05 specification */ 781 static void gen_cmpb(DisasContext *ctx) 782 { 783 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 784 cpu_gpr[rB(ctx->opcode)]); 785 } 786 787 /*** Integer arithmetic ***/ 788 789 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 790 TCGv arg1, TCGv arg2, int sub) 791 { 792 TCGv t0 = tcg_temp_new(); 793 794 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 795 tcg_gen_xor_tl(t0, arg1, arg2); 796 if (sub) { 797 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 798 } else { 799 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 800 } 801 tcg_temp_free(t0); 802 if (NARROW_MODE(ctx)) { 803 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 804 if (is_isa300(ctx)) { 805 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 806 } 807 } else { 808 if (is_isa300(ctx)) { 809 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 810 } 811 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 812 } 813 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 814 } 815 816 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 817 TCGv res, TCGv arg0, TCGv arg1, 818 int sub) 819 { 820 TCGv t0; 821 822 if (!is_isa300(ctx)) { 823 return; 824 } 825 826 t0 = tcg_temp_new(); 827 if (sub) { 828 tcg_gen_eqv_tl(t0, arg0, arg1); 829 } else { 830 tcg_gen_xor_tl(t0, arg0, arg1); 831 } 832 tcg_gen_xor_tl(t0, t0, res); 833 tcg_gen_extract_tl(cpu_ca32, t0, 32, 1); 834 tcg_temp_free(t0); 835 } 836 837 /* Common add function */ 838 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 839 TCGv arg2, bool add_ca, bool compute_ca, 840 bool compute_ov, bool compute_rc0) 841 { 842 TCGv t0 = ret; 843 844 if (compute_ca || compute_ov) { 845 t0 = tcg_temp_new(); 846 } 847 848 if (compute_ca) { 849 if (NARROW_MODE(ctx)) { 850 /* Caution: a non-obvious corner case of the spec is that we 851 must produce the *entire* 64-bit addition, but produce the 852 carry into bit 32. */ 853 TCGv t1 = tcg_temp_new(); 854 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 855 tcg_gen_add_tl(t0, arg1, arg2); 856 if (add_ca) { 857 tcg_gen_add_tl(t0, t0, cpu_ca); 858 } 859 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */ 860 tcg_temp_free(t1); 861 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 862 if (is_isa300(ctx)) { 863 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 864 } 865 } else { 866 TCGv zero = tcg_const_tl(0); 867 if (add_ca) { 868 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero); 869 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero); 870 } else { 871 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero); 872 } 873 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, 0); 874 tcg_temp_free(zero); 875 } 876 } else { 877 tcg_gen_add_tl(t0, arg1, arg2); 878 if (add_ca) { 879 tcg_gen_add_tl(t0, t0, cpu_ca); 880 } 881 } 882 883 if (compute_ov) { 884 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 885 } 886 if (unlikely(compute_rc0)) { 887 gen_set_Rc0(ctx, t0); 888 } 889 890 if (t0 != ret) { 891 tcg_gen_mov_tl(ret, t0); 892 tcg_temp_free(t0); 893 } 894 } 895 /* Add functions with two operands */ 896 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 897 static void glue(gen_, name)(DisasContext *ctx) \ 898 { \ 899 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 900 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 901 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 902 } 903 /* Add functions with one operand and one immediate */ 904 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 905 add_ca, compute_ca, compute_ov) \ 906 static void glue(gen_, name)(DisasContext *ctx) \ 907 { \ 908 TCGv t0 = tcg_const_tl(const_val); \ 909 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 910 cpu_gpr[rA(ctx->opcode)], t0, \ 911 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 912 tcg_temp_free(t0); \ 913 } 914 915 /* add add. addo addo. */ 916 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 917 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 918 /* addc addc. addco addco. */ 919 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 920 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 921 /* adde adde. addeo addeo. */ 922 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 923 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 924 /* addme addme. addmeo addmeo. */ 925 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 926 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 927 /* addze addze. addzeo addzeo.*/ 928 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 929 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 930 /* addi */ 931 static void gen_addi(DisasContext *ctx) 932 { 933 target_long simm = SIMM(ctx->opcode); 934 935 if (rA(ctx->opcode) == 0) { 936 /* li case */ 937 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 938 } else { 939 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 940 cpu_gpr[rA(ctx->opcode)], simm); 941 } 942 } 943 /* addic addic.*/ 944 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 945 { 946 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 947 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 948 c, 0, 1, 0, compute_rc0); 949 tcg_temp_free(c); 950 } 951 952 static void gen_addic(DisasContext *ctx) 953 { 954 gen_op_addic(ctx, 0); 955 } 956 957 static void gen_addic_(DisasContext *ctx) 958 { 959 gen_op_addic(ctx, 1); 960 } 961 962 /* addis */ 963 static void gen_addis(DisasContext *ctx) 964 { 965 target_long simm = SIMM(ctx->opcode); 966 967 if (rA(ctx->opcode) == 0) { 968 /* lis case */ 969 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 970 } else { 971 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 972 cpu_gpr[rA(ctx->opcode)], simm << 16); 973 } 974 } 975 976 /* addpcis */ 977 static void gen_addpcis(DisasContext *ctx) 978 { 979 target_long d = DX(ctx->opcode); 980 981 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16)); 982 } 983 984 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 985 TCGv arg2, int sign, int compute_ov) 986 { 987 TCGv_i32 t0 = tcg_temp_new_i32(); 988 TCGv_i32 t1 = tcg_temp_new_i32(); 989 TCGv_i32 t2 = tcg_temp_new_i32(); 990 TCGv_i32 t3 = tcg_temp_new_i32(); 991 992 tcg_gen_trunc_tl_i32(t0, arg1); 993 tcg_gen_trunc_tl_i32(t1, arg2); 994 if (sign) { 995 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 996 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 997 tcg_gen_and_i32(t2, t2, t3); 998 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 999 tcg_gen_or_i32(t2, t2, t3); 1000 tcg_gen_movi_i32(t3, 0); 1001 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1002 tcg_gen_div_i32(t3, t0, t1); 1003 tcg_gen_extu_i32_tl(ret, t3); 1004 } else { 1005 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1006 tcg_gen_movi_i32(t3, 0); 1007 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1008 tcg_gen_divu_i32(t3, t0, t1); 1009 tcg_gen_extu_i32_tl(ret, t3); 1010 } 1011 if (compute_ov) { 1012 tcg_gen_extu_i32_tl(cpu_ov, t2); 1013 if (is_isa300(ctx)) { 1014 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1015 } 1016 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1017 } 1018 tcg_temp_free_i32(t0); 1019 tcg_temp_free_i32(t1); 1020 tcg_temp_free_i32(t2); 1021 tcg_temp_free_i32(t3); 1022 1023 if (unlikely(Rc(ctx->opcode) != 0)) 1024 gen_set_Rc0(ctx, ret); 1025 } 1026 /* Div functions */ 1027 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1028 static void glue(gen_, name)(DisasContext *ctx) \ 1029 { \ 1030 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1031 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1032 sign, compute_ov); \ 1033 } 1034 /* divwu divwu. divwuo divwuo. */ 1035 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1036 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1037 /* divw divw. divwo divwo. */ 1038 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1039 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1040 1041 /* div[wd]eu[o][.] */ 1042 #define GEN_DIVE(name, hlpr, compute_ov) \ 1043 static void gen_##name(DisasContext *ctx) \ 1044 { \ 1045 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1046 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1047 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1048 tcg_temp_free_i32(t0); \ 1049 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1050 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1051 } \ 1052 } 1053 1054 GEN_DIVE(divweu, divweu, 0); 1055 GEN_DIVE(divweuo, divweu, 1); 1056 GEN_DIVE(divwe, divwe, 0); 1057 GEN_DIVE(divweo, divwe, 1); 1058 1059 #if defined(TARGET_PPC64) 1060 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1061 TCGv arg2, int sign, int compute_ov) 1062 { 1063 TCGv_i64 t0 = tcg_temp_new_i64(); 1064 TCGv_i64 t1 = tcg_temp_new_i64(); 1065 TCGv_i64 t2 = tcg_temp_new_i64(); 1066 TCGv_i64 t3 = tcg_temp_new_i64(); 1067 1068 tcg_gen_mov_i64(t0, arg1); 1069 tcg_gen_mov_i64(t1, arg2); 1070 if (sign) { 1071 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1072 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1073 tcg_gen_and_i64(t2, t2, t3); 1074 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1075 tcg_gen_or_i64(t2, t2, t3); 1076 tcg_gen_movi_i64(t3, 0); 1077 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1078 tcg_gen_div_i64(ret, t0, t1); 1079 } else { 1080 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1081 tcg_gen_movi_i64(t3, 0); 1082 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1083 tcg_gen_divu_i64(ret, t0, t1); 1084 } 1085 if (compute_ov) { 1086 tcg_gen_mov_tl(cpu_ov, t2); 1087 if (is_isa300(ctx)) { 1088 tcg_gen_mov_tl(cpu_ov32, t2); 1089 } 1090 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1091 } 1092 tcg_temp_free_i64(t0); 1093 tcg_temp_free_i64(t1); 1094 tcg_temp_free_i64(t2); 1095 tcg_temp_free_i64(t3); 1096 1097 if (unlikely(Rc(ctx->opcode) != 0)) 1098 gen_set_Rc0(ctx, ret); 1099 } 1100 1101 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1102 static void glue(gen_, name)(DisasContext *ctx) \ 1103 { \ 1104 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1105 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1106 sign, compute_ov); \ 1107 } 1108 /* divdu divdu. divduo divduo. */ 1109 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1110 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1111 /* divd divd. divdo divdo. */ 1112 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1113 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1114 1115 GEN_DIVE(divdeu, divdeu, 0); 1116 GEN_DIVE(divdeuo, divdeu, 1); 1117 GEN_DIVE(divde, divde, 0); 1118 GEN_DIVE(divdeo, divde, 1); 1119 #endif 1120 1121 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1122 TCGv arg2, int sign) 1123 { 1124 TCGv_i32 t0 = tcg_temp_new_i32(); 1125 TCGv_i32 t1 = tcg_temp_new_i32(); 1126 1127 tcg_gen_trunc_tl_i32(t0, arg1); 1128 tcg_gen_trunc_tl_i32(t1, arg2); 1129 if (sign) { 1130 TCGv_i32 t2 = tcg_temp_new_i32(); 1131 TCGv_i32 t3 = tcg_temp_new_i32(); 1132 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1133 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1134 tcg_gen_and_i32(t2, t2, t3); 1135 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1136 tcg_gen_or_i32(t2, t2, t3); 1137 tcg_gen_movi_i32(t3, 0); 1138 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1139 tcg_gen_rem_i32(t3, t0, t1); 1140 tcg_gen_ext_i32_tl(ret, t3); 1141 tcg_temp_free_i32(t2); 1142 tcg_temp_free_i32(t3); 1143 } else { 1144 TCGv_i32 t2 = tcg_const_i32(1); 1145 TCGv_i32 t3 = tcg_const_i32(0); 1146 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1147 tcg_gen_remu_i32(t3, t0, t1); 1148 tcg_gen_extu_i32_tl(ret, t3); 1149 tcg_temp_free_i32(t2); 1150 tcg_temp_free_i32(t3); 1151 } 1152 tcg_temp_free_i32(t0); 1153 tcg_temp_free_i32(t1); 1154 } 1155 1156 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1157 static void glue(gen_, name)(DisasContext *ctx) \ 1158 { \ 1159 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1160 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1161 sign); \ 1162 } 1163 1164 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1165 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1166 1167 #if defined(TARGET_PPC64) 1168 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1169 TCGv arg2, int sign) 1170 { 1171 TCGv_i64 t0 = tcg_temp_new_i64(); 1172 TCGv_i64 t1 = tcg_temp_new_i64(); 1173 1174 tcg_gen_mov_i64(t0, arg1); 1175 tcg_gen_mov_i64(t1, arg2); 1176 if (sign) { 1177 TCGv_i64 t2 = tcg_temp_new_i64(); 1178 TCGv_i64 t3 = tcg_temp_new_i64(); 1179 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1180 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1181 tcg_gen_and_i64(t2, t2, t3); 1182 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1183 tcg_gen_or_i64(t2, t2, t3); 1184 tcg_gen_movi_i64(t3, 0); 1185 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1186 tcg_gen_rem_i64(ret, t0, t1); 1187 tcg_temp_free_i64(t2); 1188 tcg_temp_free_i64(t3); 1189 } else { 1190 TCGv_i64 t2 = tcg_const_i64(1); 1191 TCGv_i64 t3 = tcg_const_i64(0); 1192 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1193 tcg_gen_remu_i64(ret, t0, t1); 1194 tcg_temp_free_i64(t2); 1195 tcg_temp_free_i64(t3); 1196 } 1197 tcg_temp_free_i64(t0); 1198 tcg_temp_free_i64(t1); 1199 } 1200 1201 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1202 static void glue(gen_, name)(DisasContext *ctx) \ 1203 { \ 1204 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1205 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1206 sign); \ 1207 } 1208 1209 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1210 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1211 #endif 1212 1213 /* mulhw mulhw. */ 1214 static void gen_mulhw(DisasContext *ctx) 1215 { 1216 TCGv_i32 t0 = tcg_temp_new_i32(); 1217 TCGv_i32 t1 = tcg_temp_new_i32(); 1218 1219 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1220 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1221 tcg_gen_muls2_i32(t0, t1, t0, t1); 1222 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1223 tcg_temp_free_i32(t0); 1224 tcg_temp_free_i32(t1); 1225 if (unlikely(Rc(ctx->opcode) != 0)) 1226 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1227 } 1228 1229 /* mulhwu mulhwu. */ 1230 static void gen_mulhwu(DisasContext *ctx) 1231 { 1232 TCGv_i32 t0 = tcg_temp_new_i32(); 1233 TCGv_i32 t1 = tcg_temp_new_i32(); 1234 1235 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1236 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1237 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1238 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1239 tcg_temp_free_i32(t0); 1240 tcg_temp_free_i32(t1); 1241 if (unlikely(Rc(ctx->opcode) != 0)) 1242 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1243 } 1244 1245 /* mullw mullw. */ 1246 static void gen_mullw(DisasContext *ctx) 1247 { 1248 #if defined(TARGET_PPC64) 1249 TCGv_i64 t0, t1; 1250 t0 = tcg_temp_new_i64(); 1251 t1 = tcg_temp_new_i64(); 1252 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1253 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1254 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1255 tcg_temp_free(t0); 1256 tcg_temp_free(t1); 1257 #else 1258 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1259 cpu_gpr[rB(ctx->opcode)]); 1260 #endif 1261 if (unlikely(Rc(ctx->opcode) != 0)) 1262 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1263 } 1264 1265 /* mullwo mullwo. */ 1266 static void gen_mullwo(DisasContext *ctx) 1267 { 1268 TCGv_i32 t0 = tcg_temp_new_i32(); 1269 TCGv_i32 t1 = tcg_temp_new_i32(); 1270 1271 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1272 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1273 tcg_gen_muls2_i32(t0, t1, t0, t1); 1274 #if defined(TARGET_PPC64) 1275 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1276 #else 1277 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1278 #endif 1279 1280 tcg_gen_sari_i32(t0, t0, 31); 1281 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1282 tcg_gen_extu_i32_tl(cpu_ov, t0); 1283 if (is_isa300(ctx)) { 1284 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1285 } 1286 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1287 1288 tcg_temp_free_i32(t0); 1289 tcg_temp_free_i32(t1); 1290 if (unlikely(Rc(ctx->opcode) != 0)) 1291 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1292 } 1293 1294 /* mulli */ 1295 static void gen_mulli(DisasContext *ctx) 1296 { 1297 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1298 SIMM(ctx->opcode)); 1299 } 1300 1301 #if defined(TARGET_PPC64) 1302 /* mulhd mulhd. */ 1303 static void gen_mulhd(DisasContext *ctx) 1304 { 1305 TCGv lo = tcg_temp_new(); 1306 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1307 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1308 tcg_temp_free(lo); 1309 if (unlikely(Rc(ctx->opcode) != 0)) { 1310 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1311 } 1312 } 1313 1314 /* mulhdu mulhdu. */ 1315 static void gen_mulhdu(DisasContext *ctx) 1316 { 1317 TCGv lo = tcg_temp_new(); 1318 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1319 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1320 tcg_temp_free(lo); 1321 if (unlikely(Rc(ctx->opcode) != 0)) { 1322 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1323 } 1324 } 1325 1326 /* mulld mulld. */ 1327 static void gen_mulld(DisasContext *ctx) 1328 { 1329 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1330 cpu_gpr[rB(ctx->opcode)]); 1331 if (unlikely(Rc(ctx->opcode) != 0)) 1332 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1333 } 1334 1335 /* mulldo mulldo. */ 1336 static void gen_mulldo(DisasContext *ctx) 1337 { 1338 TCGv_i64 t0 = tcg_temp_new_i64(); 1339 TCGv_i64 t1 = tcg_temp_new_i64(); 1340 1341 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1342 cpu_gpr[rB(ctx->opcode)]); 1343 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1344 1345 tcg_gen_sari_i64(t0, t0, 63); 1346 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1347 if (is_isa300(ctx)) { 1348 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1349 } 1350 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1351 1352 tcg_temp_free_i64(t0); 1353 tcg_temp_free_i64(t1); 1354 1355 if (unlikely(Rc(ctx->opcode) != 0)) { 1356 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1357 } 1358 } 1359 #endif 1360 1361 /* Common subf function */ 1362 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1363 TCGv arg2, bool add_ca, bool compute_ca, 1364 bool compute_ov, bool compute_rc0) 1365 { 1366 TCGv t0 = ret; 1367 1368 if (compute_ca || compute_ov) { 1369 t0 = tcg_temp_new(); 1370 } 1371 1372 if (compute_ca) { 1373 /* dest = ~arg1 + arg2 [+ ca]. */ 1374 if (NARROW_MODE(ctx)) { 1375 /* Caution: a non-obvious corner case of the spec is that we 1376 must produce the *entire* 64-bit addition, but produce the 1377 carry into bit 32. */ 1378 TCGv inv1 = tcg_temp_new(); 1379 TCGv t1 = tcg_temp_new(); 1380 tcg_gen_not_tl(inv1, arg1); 1381 if (add_ca) { 1382 tcg_gen_add_tl(t0, arg2, cpu_ca); 1383 } else { 1384 tcg_gen_addi_tl(t0, arg2, 1); 1385 } 1386 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1387 tcg_gen_add_tl(t0, t0, inv1); 1388 tcg_temp_free(inv1); 1389 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1390 tcg_temp_free(t1); 1391 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 1392 if (is_isa300(ctx)) { 1393 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 1394 } 1395 } else if (add_ca) { 1396 TCGv zero, inv1 = tcg_temp_new(); 1397 tcg_gen_not_tl(inv1, arg1); 1398 zero = tcg_const_tl(0); 1399 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1400 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1401 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, 0); 1402 tcg_temp_free(zero); 1403 tcg_temp_free(inv1); 1404 } else { 1405 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1406 tcg_gen_sub_tl(t0, arg2, arg1); 1407 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, 1); 1408 } 1409 } else if (add_ca) { 1410 /* Since we're ignoring carry-out, we can simplify the 1411 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1412 tcg_gen_sub_tl(t0, arg2, arg1); 1413 tcg_gen_add_tl(t0, t0, cpu_ca); 1414 tcg_gen_subi_tl(t0, t0, 1); 1415 } else { 1416 tcg_gen_sub_tl(t0, arg2, arg1); 1417 } 1418 1419 if (compute_ov) { 1420 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1421 } 1422 if (unlikely(compute_rc0)) { 1423 gen_set_Rc0(ctx, t0); 1424 } 1425 1426 if (t0 != ret) { 1427 tcg_gen_mov_tl(ret, t0); 1428 tcg_temp_free(t0); 1429 } 1430 } 1431 /* Sub functions with Two operands functions */ 1432 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1433 static void glue(gen_, name)(DisasContext *ctx) \ 1434 { \ 1435 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1436 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1437 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1438 } 1439 /* Sub functions with one operand and one immediate */ 1440 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1441 add_ca, compute_ca, compute_ov) \ 1442 static void glue(gen_, name)(DisasContext *ctx) \ 1443 { \ 1444 TCGv t0 = tcg_const_tl(const_val); \ 1445 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1446 cpu_gpr[rA(ctx->opcode)], t0, \ 1447 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1448 tcg_temp_free(t0); \ 1449 } 1450 /* subf subf. subfo subfo. */ 1451 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1452 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1453 /* subfc subfc. subfco subfco. */ 1454 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1455 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1456 /* subfe subfe. subfeo subfo. */ 1457 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1458 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1459 /* subfme subfme. subfmeo subfmeo. */ 1460 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1461 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1462 /* subfze subfze. subfzeo subfzeo.*/ 1463 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1464 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1465 1466 /* subfic */ 1467 static void gen_subfic(DisasContext *ctx) 1468 { 1469 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1470 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1471 c, 0, 1, 0, 0); 1472 tcg_temp_free(c); 1473 } 1474 1475 /* neg neg. nego nego. */ 1476 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1477 { 1478 TCGv zero = tcg_const_tl(0); 1479 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1480 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1481 tcg_temp_free(zero); 1482 } 1483 1484 static void gen_neg(DisasContext *ctx) 1485 { 1486 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 1487 if (unlikely(Rc(ctx->opcode))) { 1488 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1489 } 1490 } 1491 1492 static void gen_nego(DisasContext *ctx) 1493 { 1494 gen_op_arith_neg(ctx, 1); 1495 } 1496 1497 /*** Integer logical ***/ 1498 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1499 static void glue(gen_, name)(DisasContext *ctx) \ 1500 { \ 1501 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1502 cpu_gpr[rB(ctx->opcode)]); \ 1503 if (unlikely(Rc(ctx->opcode) != 0)) \ 1504 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1505 } 1506 1507 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1508 static void glue(gen_, name)(DisasContext *ctx) \ 1509 { \ 1510 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1511 if (unlikely(Rc(ctx->opcode) != 0)) \ 1512 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1513 } 1514 1515 /* and & and. */ 1516 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1517 /* andc & andc. */ 1518 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1519 1520 /* andi. */ 1521 static void gen_andi_(DisasContext *ctx) 1522 { 1523 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1524 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1525 } 1526 1527 /* andis. */ 1528 static void gen_andis_(DisasContext *ctx) 1529 { 1530 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1531 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1532 } 1533 1534 /* cntlzw */ 1535 static void gen_cntlzw(DisasContext *ctx) 1536 { 1537 TCGv_i32 t = tcg_temp_new_i32(); 1538 1539 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1540 tcg_gen_clzi_i32(t, t, 32); 1541 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1542 tcg_temp_free_i32(t); 1543 1544 if (unlikely(Rc(ctx->opcode) != 0)) 1545 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1546 } 1547 1548 /* cnttzw */ 1549 static void gen_cnttzw(DisasContext *ctx) 1550 { 1551 TCGv_i32 t = tcg_temp_new_i32(); 1552 1553 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1554 tcg_gen_ctzi_i32(t, t, 32); 1555 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1556 tcg_temp_free_i32(t); 1557 1558 if (unlikely(Rc(ctx->opcode) != 0)) { 1559 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1560 } 1561 } 1562 1563 /* eqv & eqv. */ 1564 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1565 /* extsb & extsb. */ 1566 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1567 /* extsh & extsh. */ 1568 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1569 /* nand & nand. */ 1570 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1571 /* nor & nor. */ 1572 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1573 1574 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1575 static void gen_pause(DisasContext *ctx) 1576 { 1577 TCGv_i32 t0 = tcg_const_i32(0); 1578 tcg_gen_st_i32(t0, cpu_env, 1579 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1580 tcg_temp_free_i32(t0); 1581 1582 /* Stop translation, this gives other CPUs a chance to run */ 1583 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 1584 } 1585 #endif /* defined(TARGET_PPC64) */ 1586 1587 /* or & or. */ 1588 static void gen_or(DisasContext *ctx) 1589 { 1590 int rs, ra, rb; 1591 1592 rs = rS(ctx->opcode); 1593 ra = rA(ctx->opcode); 1594 rb = rB(ctx->opcode); 1595 /* Optimisation for mr. ri case */ 1596 if (rs != ra || rs != rb) { 1597 if (rs != rb) 1598 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1599 else 1600 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1601 if (unlikely(Rc(ctx->opcode) != 0)) 1602 gen_set_Rc0(ctx, cpu_gpr[ra]); 1603 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1604 gen_set_Rc0(ctx, cpu_gpr[rs]); 1605 #if defined(TARGET_PPC64) 1606 } else if (rs != 0) { /* 0 is nop */ 1607 int prio = 0; 1608 1609 switch (rs) { 1610 case 1: 1611 /* Set process priority to low */ 1612 prio = 2; 1613 break; 1614 case 6: 1615 /* Set process priority to medium-low */ 1616 prio = 3; 1617 break; 1618 case 2: 1619 /* Set process priority to normal */ 1620 prio = 4; 1621 break; 1622 #if !defined(CONFIG_USER_ONLY) 1623 case 31: 1624 if (!ctx->pr) { 1625 /* Set process priority to very low */ 1626 prio = 1; 1627 } 1628 break; 1629 case 5: 1630 if (!ctx->pr) { 1631 /* Set process priority to medium-hight */ 1632 prio = 5; 1633 } 1634 break; 1635 case 3: 1636 if (!ctx->pr) { 1637 /* Set process priority to high */ 1638 prio = 6; 1639 } 1640 break; 1641 case 7: 1642 if (ctx->hv && !ctx->pr) { 1643 /* Set process priority to very high */ 1644 prio = 7; 1645 } 1646 break; 1647 #endif 1648 default: 1649 break; 1650 } 1651 if (prio) { 1652 TCGv t0 = tcg_temp_new(); 1653 gen_load_spr(t0, SPR_PPR); 1654 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1655 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1656 gen_store_spr(SPR_PPR, t0); 1657 tcg_temp_free(t0); 1658 } 1659 #if !defined(CONFIG_USER_ONLY) 1660 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1661 * CPU and the kernel hangs. This applies to all encodings other 1662 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1663 * and all currently undefined. 1664 */ 1665 gen_pause(ctx); 1666 #endif 1667 #endif 1668 } 1669 } 1670 /* orc & orc. */ 1671 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1672 1673 /* xor & xor. */ 1674 static void gen_xor(DisasContext *ctx) 1675 { 1676 /* Optimisation for "set to zero" case */ 1677 if (rS(ctx->opcode) != rB(ctx->opcode)) 1678 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1679 else 1680 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1681 if (unlikely(Rc(ctx->opcode) != 0)) 1682 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1683 } 1684 1685 /* ori */ 1686 static void gen_ori(DisasContext *ctx) 1687 { 1688 target_ulong uimm = UIMM(ctx->opcode); 1689 1690 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1691 return; 1692 } 1693 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1694 } 1695 1696 /* oris */ 1697 static void gen_oris(DisasContext *ctx) 1698 { 1699 target_ulong uimm = UIMM(ctx->opcode); 1700 1701 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1702 /* NOP */ 1703 return; 1704 } 1705 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1706 } 1707 1708 /* xori */ 1709 static void gen_xori(DisasContext *ctx) 1710 { 1711 target_ulong uimm = UIMM(ctx->opcode); 1712 1713 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1714 /* NOP */ 1715 return; 1716 } 1717 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1718 } 1719 1720 /* xoris */ 1721 static void gen_xoris(DisasContext *ctx) 1722 { 1723 target_ulong uimm = UIMM(ctx->opcode); 1724 1725 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1726 /* NOP */ 1727 return; 1728 } 1729 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1730 } 1731 1732 /* popcntb : PowerPC 2.03 specification */ 1733 static void gen_popcntb(DisasContext *ctx) 1734 { 1735 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1736 } 1737 1738 static void gen_popcntw(DisasContext *ctx) 1739 { 1740 #if defined(TARGET_PPC64) 1741 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1742 #else 1743 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1744 #endif 1745 } 1746 1747 #if defined(TARGET_PPC64) 1748 /* popcntd: PowerPC 2.06 specification */ 1749 static void gen_popcntd(DisasContext *ctx) 1750 { 1751 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1752 } 1753 #endif 1754 1755 /* prtyw: PowerPC 2.05 specification */ 1756 static void gen_prtyw(DisasContext *ctx) 1757 { 1758 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1759 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1760 TCGv t0 = tcg_temp_new(); 1761 tcg_gen_shri_tl(t0, rs, 16); 1762 tcg_gen_xor_tl(ra, rs, t0); 1763 tcg_gen_shri_tl(t0, ra, 8); 1764 tcg_gen_xor_tl(ra, ra, t0); 1765 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1766 tcg_temp_free(t0); 1767 } 1768 1769 #if defined(TARGET_PPC64) 1770 /* prtyd: PowerPC 2.05 specification */ 1771 static void gen_prtyd(DisasContext *ctx) 1772 { 1773 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1774 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1775 TCGv t0 = tcg_temp_new(); 1776 tcg_gen_shri_tl(t0, rs, 32); 1777 tcg_gen_xor_tl(ra, rs, t0); 1778 tcg_gen_shri_tl(t0, ra, 16); 1779 tcg_gen_xor_tl(ra, ra, t0); 1780 tcg_gen_shri_tl(t0, ra, 8); 1781 tcg_gen_xor_tl(ra, ra, t0); 1782 tcg_gen_andi_tl(ra, ra, 1); 1783 tcg_temp_free(t0); 1784 } 1785 #endif 1786 1787 #if defined(TARGET_PPC64) 1788 /* bpermd */ 1789 static void gen_bpermd(DisasContext *ctx) 1790 { 1791 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1792 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1793 } 1794 #endif 1795 1796 #if defined(TARGET_PPC64) 1797 /* extsw & extsw. */ 1798 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1799 1800 /* cntlzd */ 1801 static void gen_cntlzd(DisasContext *ctx) 1802 { 1803 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1804 if (unlikely(Rc(ctx->opcode) != 0)) 1805 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1806 } 1807 1808 /* cnttzd */ 1809 static void gen_cnttzd(DisasContext *ctx) 1810 { 1811 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1812 if (unlikely(Rc(ctx->opcode) != 0)) { 1813 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1814 } 1815 } 1816 1817 /* darn */ 1818 static void gen_darn(DisasContext *ctx) 1819 { 1820 int l = L(ctx->opcode); 1821 1822 if (l == 0) { 1823 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1824 } else if (l <= 2) { 1825 /* Return 64-bit random for both CRN and RRN */ 1826 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1827 } else { 1828 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1829 } 1830 } 1831 #endif 1832 1833 /*** Integer rotate ***/ 1834 1835 /* rlwimi & rlwimi. */ 1836 static void gen_rlwimi(DisasContext *ctx) 1837 { 1838 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1839 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1840 uint32_t sh = SH(ctx->opcode); 1841 uint32_t mb = MB(ctx->opcode); 1842 uint32_t me = ME(ctx->opcode); 1843 1844 if (sh == (31-me) && mb <= me) { 1845 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1846 } else { 1847 target_ulong mask; 1848 TCGv t1; 1849 1850 #if defined(TARGET_PPC64) 1851 mb += 32; 1852 me += 32; 1853 #endif 1854 mask = MASK(mb, me); 1855 1856 t1 = tcg_temp_new(); 1857 if (mask <= 0xffffffffu) { 1858 TCGv_i32 t0 = tcg_temp_new_i32(); 1859 tcg_gen_trunc_tl_i32(t0, t_rs); 1860 tcg_gen_rotli_i32(t0, t0, sh); 1861 tcg_gen_extu_i32_tl(t1, t0); 1862 tcg_temp_free_i32(t0); 1863 } else { 1864 #if defined(TARGET_PPC64) 1865 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1866 tcg_gen_rotli_i64(t1, t1, sh); 1867 #else 1868 g_assert_not_reached(); 1869 #endif 1870 } 1871 1872 tcg_gen_andi_tl(t1, t1, mask); 1873 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1874 tcg_gen_or_tl(t_ra, t_ra, t1); 1875 tcg_temp_free(t1); 1876 } 1877 if (unlikely(Rc(ctx->opcode) != 0)) { 1878 gen_set_Rc0(ctx, t_ra); 1879 } 1880 } 1881 1882 /* rlwinm & rlwinm. */ 1883 static void gen_rlwinm(DisasContext *ctx) 1884 { 1885 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1886 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1887 int sh = SH(ctx->opcode); 1888 int mb = MB(ctx->opcode); 1889 int me = ME(ctx->opcode); 1890 int len = me - mb + 1; 1891 int rsh = (32 - sh) & 31; 1892 1893 if (sh != 0 && len > 0 && me == (31 - sh)) { 1894 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 1895 } else if (me == 31 && rsh + len <= 32) { 1896 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 1897 } else { 1898 target_ulong mask; 1899 #if defined(TARGET_PPC64) 1900 mb += 32; 1901 me += 32; 1902 #endif 1903 mask = MASK(mb, me); 1904 if (sh == 0) { 1905 tcg_gen_andi_tl(t_ra, t_rs, mask); 1906 } else if (mask <= 0xffffffffu) { 1907 TCGv_i32 t0 = tcg_temp_new_i32(); 1908 tcg_gen_trunc_tl_i32(t0, t_rs); 1909 tcg_gen_rotli_i32(t0, t0, sh); 1910 tcg_gen_andi_i32(t0, t0, mask); 1911 tcg_gen_extu_i32_tl(t_ra, t0); 1912 tcg_temp_free_i32(t0); 1913 } else { 1914 #if defined(TARGET_PPC64) 1915 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1916 tcg_gen_rotli_i64(t_ra, t_ra, sh); 1917 tcg_gen_andi_i64(t_ra, t_ra, mask); 1918 #else 1919 g_assert_not_reached(); 1920 #endif 1921 } 1922 } 1923 if (unlikely(Rc(ctx->opcode) != 0)) { 1924 gen_set_Rc0(ctx, t_ra); 1925 } 1926 } 1927 1928 /* rlwnm & rlwnm. */ 1929 static void gen_rlwnm(DisasContext *ctx) 1930 { 1931 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1932 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1933 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 1934 uint32_t mb = MB(ctx->opcode); 1935 uint32_t me = ME(ctx->opcode); 1936 target_ulong mask; 1937 1938 #if defined(TARGET_PPC64) 1939 mb += 32; 1940 me += 32; 1941 #endif 1942 mask = MASK(mb, me); 1943 1944 if (mask <= 0xffffffffu) { 1945 TCGv_i32 t0 = tcg_temp_new_i32(); 1946 TCGv_i32 t1 = tcg_temp_new_i32(); 1947 tcg_gen_trunc_tl_i32(t0, t_rb); 1948 tcg_gen_trunc_tl_i32(t1, t_rs); 1949 tcg_gen_andi_i32(t0, t0, 0x1f); 1950 tcg_gen_rotl_i32(t1, t1, t0); 1951 tcg_gen_extu_i32_tl(t_ra, t1); 1952 tcg_temp_free_i32(t0); 1953 tcg_temp_free_i32(t1); 1954 } else { 1955 #if defined(TARGET_PPC64) 1956 TCGv_i64 t0 = tcg_temp_new_i64(); 1957 tcg_gen_andi_i64(t0, t_rb, 0x1f); 1958 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1959 tcg_gen_rotl_i64(t_ra, t_ra, t0); 1960 tcg_temp_free_i64(t0); 1961 #else 1962 g_assert_not_reached(); 1963 #endif 1964 } 1965 1966 tcg_gen_andi_tl(t_ra, t_ra, mask); 1967 1968 if (unlikely(Rc(ctx->opcode) != 0)) { 1969 gen_set_Rc0(ctx, t_ra); 1970 } 1971 } 1972 1973 #if defined(TARGET_PPC64) 1974 #define GEN_PPC64_R2(name, opc1, opc2) \ 1975 static void glue(gen_, name##0)(DisasContext *ctx) \ 1976 { \ 1977 gen_##name(ctx, 0); \ 1978 } \ 1979 \ 1980 static void glue(gen_, name##1)(DisasContext *ctx) \ 1981 { \ 1982 gen_##name(ctx, 1); \ 1983 } 1984 #define GEN_PPC64_R4(name, opc1, opc2) \ 1985 static void glue(gen_, name##0)(DisasContext *ctx) \ 1986 { \ 1987 gen_##name(ctx, 0, 0); \ 1988 } \ 1989 \ 1990 static void glue(gen_, name##1)(DisasContext *ctx) \ 1991 { \ 1992 gen_##name(ctx, 0, 1); \ 1993 } \ 1994 \ 1995 static void glue(gen_, name##2)(DisasContext *ctx) \ 1996 { \ 1997 gen_##name(ctx, 1, 0); \ 1998 } \ 1999 \ 2000 static void glue(gen_, name##3)(DisasContext *ctx) \ 2001 { \ 2002 gen_##name(ctx, 1, 1); \ 2003 } 2004 2005 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2006 { 2007 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2008 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2009 int len = me - mb + 1; 2010 int rsh = (64 - sh) & 63; 2011 2012 if (sh != 0 && len > 0 && me == (63 - sh)) { 2013 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2014 } else if (me == 63 && rsh + len <= 64) { 2015 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2016 } else { 2017 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2018 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2019 } 2020 if (unlikely(Rc(ctx->opcode) != 0)) { 2021 gen_set_Rc0(ctx, t_ra); 2022 } 2023 } 2024 2025 /* rldicl - rldicl. */ 2026 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2027 { 2028 uint32_t sh, mb; 2029 2030 sh = SH(ctx->opcode) | (shn << 5); 2031 mb = MB(ctx->opcode) | (mbn << 5); 2032 gen_rldinm(ctx, mb, 63, sh); 2033 } 2034 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2035 2036 /* rldicr - rldicr. */ 2037 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2038 { 2039 uint32_t sh, me; 2040 2041 sh = SH(ctx->opcode) | (shn << 5); 2042 me = MB(ctx->opcode) | (men << 5); 2043 gen_rldinm(ctx, 0, me, sh); 2044 } 2045 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2046 2047 /* rldic - rldic. */ 2048 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2049 { 2050 uint32_t sh, mb; 2051 2052 sh = SH(ctx->opcode) | (shn << 5); 2053 mb = MB(ctx->opcode) | (mbn << 5); 2054 gen_rldinm(ctx, mb, 63 - sh, sh); 2055 } 2056 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2057 2058 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2059 { 2060 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2061 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2062 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2063 TCGv t0; 2064 2065 t0 = tcg_temp_new(); 2066 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2067 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2068 tcg_temp_free(t0); 2069 2070 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2071 if (unlikely(Rc(ctx->opcode) != 0)) { 2072 gen_set_Rc0(ctx, t_ra); 2073 } 2074 } 2075 2076 /* rldcl - rldcl. */ 2077 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2078 { 2079 uint32_t mb; 2080 2081 mb = MB(ctx->opcode) | (mbn << 5); 2082 gen_rldnm(ctx, mb, 63); 2083 } 2084 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2085 2086 /* rldcr - rldcr. */ 2087 static inline void gen_rldcr(DisasContext *ctx, int men) 2088 { 2089 uint32_t me; 2090 2091 me = MB(ctx->opcode) | (men << 5); 2092 gen_rldnm(ctx, 0, me); 2093 } 2094 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2095 2096 /* rldimi - rldimi. */ 2097 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2098 { 2099 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2100 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2101 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2102 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2103 uint32_t me = 63 - sh; 2104 2105 if (mb <= me) { 2106 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2107 } else { 2108 target_ulong mask = MASK(mb, me); 2109 TCGv t1 = tcg_temp_new(); 2110 2111 tcg_gen_rotli_tl(t1, t_rs, sh); 2112 tcg_gen_andi_tl(t1, t1, mask); 2113 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2114 tcg_gen_or_tl(t_ra, t_ra, t1); 2115 tcg_temp_free(t1); 2116 } 2117 if (unlikely(Rc(ctx->opcode) != 0)) { 2118 gen_set_Rc0(ctx, t_ra); 2119 } 2120 } 2121 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2122 #endif 2123 2124 /*** Integer shift ***/ 2125 2126 /* slw & slw. */ 2127 static void gen_slw(DisasContext *ctx) 2128 { 2129 TCGv t0, t1; 2130 2131 t0 = tcg_temp_new(); 2132 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2133 #if defined(TARGET_PPC64) 2134 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2135 tcg_gen_sari_tl(t0, t0, 0x3f); 2136 #else 2137 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2138 tcg_gen_sari_tl(t0, t0, 0x1f); 2139 #endif 2140 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2141 t1 = tcg_temp_new(); 2142 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2143 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2144 tcg_temp_free(t1); 2145 tcg_temp_free(t0); 2146 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2147 if (unlikely(Rc(ctx->opcode) != 0)) 2148 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2149 } 2150 2151 /* sraw & sraw. */ 2152 static void gen_sraw(DisasContext *ctx) 2153 { 2154 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2155 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2156 if (unlikely(Rc(ctx->opcode) != 0)) 2157 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2158 } 2159 2160 /* srawi & srawi. */ 2161 static void gen_srawi(DisasContext *ctx) 2162 { 2163 int sh = SH(ctx->opcode); 2164 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2165 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2166 if (sh == 0) { 2167 tcg_gen_ext32s_tl(dst, src); 2168 tcg_gen_movi_tl(cpu_ca, 0); 2169 if (is_isa300(ctx)) { 2170 tcg_gen_movi_tl(cpu_ca32, 0); 2171 } 2172 } else { 2173 TCGv t0; 2174 tcg_gen_ext32s_tl(dst, src); 2175 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2176 t0 = tcg_temp_new(); 2177 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2178 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2179 tcg_temp_free(t0); 2180 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2181 if (is_isa300(ctx)) { 2182 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2183 } 2184 tcg_gen_sari_tl(dst, dst, sh); 2185 } 2186 if (unlikely(Rc(ctx->opcode) != 0)) { 2187 gen_set_Rc0(ctx, dst); 2188 } 2189 } 2190 2191 /* srw & srw. */ 2192 static void gen_srw(DisasContext *ctx) 2193 { 2194 TCGv t0, t1; 2195 2196 t0 = tcg_temp_new(); 2197 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2198 #if defined(TARGET_PPC64) 2199 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2200 tcg_gen_sari_tl(t0, t0, 0x3f); 2201 #else 2202 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2203 tcg_gen_sari_tl(t0, t0, 0x1f); 2204 #endif 2205 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2206 tcg_gen_ext32u_tl(t0, t0); 2207 t1 = tcg_temp_new(); 2208 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2209 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2210 tcg_temp_free(t1); 2211 tcg_temp_free(t0); 2212 if (unlikely(Rc(ctx->opcode) != 0)) 2213 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2214 } 2215 2216 #if defined(TARGET_PPC64) 2217 /* sld & sld. */ 2218 static void gen_sld(DisasContext *ctx) 2219 { 2220 TCGv t0, t1; 2221 2222 t0 = tcg_temp_new(); 2223 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2224 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2225 tcg_gen_sari_tl(t0, t0, 0x3f); 2226 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2227 t1 = tcg_temp_new(); 2228 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2229 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2230 tcg_temp_free(t1); 2231 tcg_temp_free(t0); 2232 if (unlikely(Rc(ctx->opcode) != 0)) 2233 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2234 } 2235 2236 /* srad & srad. */ 2237 static void gen_srad(DisasContext *ctx) 2238 { 2239 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2240 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2241 if (unlikely(Rc(ctx->opcode) != 0)) 2242 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2243 } 2244 /* sradi & sradi. */ 2245 static inline void gen_sradi(DisasContext *ctx, int n) 2246 { 2247 int sh = SH(ctx->opcode) + (n << 5); 2248 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2249 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2250 if (sh == 0) { 2251 tcg_gen_mov_tl(dst, src); 2252 tcg_gen_movi_tl(cpu_ca, 0); 2253 if (is_isa300(ctx)) { 2254 tcg_gen_movi_tl(cpu_ca32, 0); 2255 } 2256 } else { 2257 TCGv t0; 2258 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2259 t0 = tcg_temp_new(); 2260 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2261 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2262 tcg_temp_free(t0); 2263 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2264 if (is_isa300(ctx)) { 2265 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2266 } 2267 tcg_gen_sari_tl(dst, src, sh); 2268 } 2269 if (unlikely(Rc(ctx->opcode) != 0)) { 2270 gen_set_Rc0(ctx, dst); 2271 } 2272 } 2273 2274 static void gen_sradi0(DisasContext *ctx) 2275 { 2276 gen_sradi(ctx, 0); 2277 } 2278 2279 static void gen_sradi1(DisasContext *ctx) 2280 { 2281 gen_sradi(ctx, 1); 2282 } 2283 2284 /* extswsli & extswsli. */ 2285 static inline void gen_extswsli(DisasContext *ctx, int n) 2286 { 2287 int sh = SH(ctx->opcode) + (n << 5); 2288 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2289 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2290 2291 tcg_gen_ext32s_tl(dst, src); 2292 tcg_gen_shli_tl(dst, dst, sh); 2293 if (unlikely(Rc(ctx->opcode) != 0)) { 2294 gen_set_Rc0(ctx, dst); 2295 } 2296 } 2297 2298 static void gen_extswsli0(DisasContext *ctx) 2299 { 2300 gen_extswsli(ctx, 0); 2301 } 2302 2303 static void gen_extswsli1(DisasContext *ctx) 2304 { 2305 gen_extswsli(ctx, 1); 2306 } 2307 2308 /* srd & srd. */ 2309 static void gen_srd(DisasContext *ctx) 2310 { 2311 TCGv t0, t1; 2312 2313 t0 = tcg_temp_new(); 2314 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2315 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2316 tcg_gen_sari_tl(t0, t0, 0x3f); 2317 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2318 t1 = tcg_temp_new(); 2319 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2320 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2321 tcg_temp_free(t1); 2322 tcg_temp_free(t0); 2323 if (unlikely(Rc(ctx->opcode) != 0)) 2324 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2325 } 2326 #endif 2327 2328 /*** Addressing modes ***/ 2329 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2330 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2331 target_long maskl) 2332 { 2333 target_long simm = SIMM(ctx->opcode); 2334 2335 simm &= ~maskl; 2336 if (rA(ctx->opcode) == 0) { 2337 if (NARROW_MODE(ctx)) { 2338 simm = (uint32_t)simm; 2339 } 2340 tcg_gen_movi_tl(EA, simm); 2341 } else if (likely(simm != 0)) { 2342 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2343 if (NARROW_MODE(ctx)) { 2344 tcg_gen_ext32u_tl(EA, EA); 2345 } 2346 } else { 2347 if (NARROW_MODE(ctx)) { 2348 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2349 } else { 2350 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2351 } 2352 } 2353 } 2354 2355 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2356 { 2357 if (rA(ctx->opcode) == 0) { 2358 if (NARROW_MODE(ctx)) { 2359 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2360 } else { 2361 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2362 } 2363 } else { 2364 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2365 if (NARROW_MODE(ctx)) { 2366 tcg_gen_ext32u_tl(EA, EA); 2367 } 2368 } 2369 } 2370 2371 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2372 { 2373 if (rA(ctx->opcode) == 0) { 2374 tcg_gen_movi_tl(EA, 0); 2375 } else if (NARROW_MODE(ctx)) { 2376 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2377 } else { 2378 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2379 } 2380 } 2381 2382 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2383 target_long val) 2384 { 2385 tcg_gen_addi_tl(ret, arg1, val); 2386 if (NARROW_MODE(ctx)) { 2387 tcg_gen_ext32u_tl(ret, ret); 2388 } 2389 } 2390 2391 static inline void gen_align_no_le(DisasContext *ctx) 2392 { 2393 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2394 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2395 } 2396 2397 /*** Integer load ***/ 2398 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2399 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2400 2401 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2402 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2403 TCGv val, \ 2404 TCGv addr) \ 2405 { \ 2406 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2407 } 2408 2409 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2410 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2411 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2412 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2413 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2414 2415 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2416 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2417 2418 #define GEN_QEMU_LOAD_64(ldop, op) \ 2419 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2420 TCGv_i64 val, \ 2421 TCGv addr) \ 2422 { \ 2423 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2424 } 2425 2426 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2427 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2428 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2429 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2430 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2431 2432 #if defined(TARGET_PPC64) 2433 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2434 #endif 2435 2436 #define GEN_QEMU_STORE_TL(stop, op) \ 2437 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2438 TCGv val, \ 2439 TCGv addr) \ 2440 { \ 2441 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2442 } 2443 2444 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2445 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2446 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2447 2448 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2449 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2450 2451 #define GEN_QEMU_STORE_64(stop, op) \ 2452 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2453 TCGv_i64 val, \ 2454 TCGv addr) \ 2455 { \ 2456 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2457 } 2458 2459 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2460 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2461 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2462 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2463 2464 #if defined(TARGET_PPC64) 2465 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2466 #endif 2467 2468 #define GEN_LD(name, ldop, opc, type) \ 2469 static void glue(gen_, name)(DisasContext *ctx) \ 2470 { \ 2471 TCGv EA; \ 2472 gen_set_access_type(ctx, ACCESS_INT); \ 2473 EA = tcg_temp_new(); \ 2474 gen_addr_imm_index(ctx, EA, 0); \ 2475 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2476 tcg_temp_free(EA); \ 2477 } 2478 2479 #define GEN_LDU(name, ldop, opc, type) \ 2480 static void glue(gen_, name##u)(DisasContext *ctx) \ 2481 { \ 2482 TCGv EA; \ 2483 if (unlikely(rA(ctx->opcode) == 0 || \ 2484 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2485 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2486 return; \ 2487 } \ 2488 gen_set_access_type(ctx, ACCESS_INT); \ 2489 EA = tcg_temp_new(); \ 2490 if (type == PPC_64B) \ 2491 gen_addr_imm_index(ctx, EA, 0x03); \ 2492 else \ 2493 gen_addr_imm_index(ctx, EA, 0); \ 2494 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2495 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2496 tcg_temp_free(EA); \ 2497 } 2498 2499 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2500 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2501 { \ 2502 TCGv EA; \ 2503 if (unlikely(rA(ctx->opcode) == 0 || \ 2504 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2505 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2506 return; \ 2507 } \ 2508 gen_set_access_type(ctx, ACCESS_INT); \ 2509 EA = tcg_temp_new(); \ 2510 gen_addr_reg_index(ctx, EA); \ 2511 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2512 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2513 tcg_temp_free(EA); \ 2514 } 2515 2516 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2517 static void glue(gen_, name##x)(DisasContext *ctx) \ 2518 { \ 2519 TCGv EA; \ 2520 chk; \ 2521 gen_set_access_type(ctx, ACCESS_INT); \ 2522 EA = tcg_temp_new(); \ 2523 gen_addr_reg_index(ctx, EA); \ 2524 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2525 tcg_temp_free(EA); \ 2526 } 2527 2528 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2529 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2530 2531 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2532 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2533 2534 #define GEN_LDS(name, ldop, op, type) \ 2535 GEN_LD(name, ldop, op | 0x20, type); \ 2536 GEN_LDU(name, ldop, op | 0x21, type); \ 2537 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2538 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2539 2540 /* lbz lbzu lbzux lbzx */ 2541 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2542 /* lha lhau lhaux lhax */ 2543 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2544 /* lhz lhzu lhzux lhzx */ 2545 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2546 /* lwz lwzu lwzux lwzx */ 2547 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2548 #if defined(TARGET_PPC64) 2549 /* lwaux */ 2550 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2551 /* lwax */ 2552 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2553 /* ldux */ 2554 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2555 /* ldx */ 2556 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2557 2558 /* CI load/store variants */ 2559 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2560 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2561 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2562 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2563 2564 static void gen_ld(DisasContext *ctx) 2565 { 2566 TCGv EA; 2567 if (Rc(ctx->opcode)) { 2568 if (unlikely(rA(ctx->opcode) == 0 || 2569 rA(ctx->opcode) == rD(ctx->opcode))) { 2570 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2571 return; 2572 } 2573 } 2574 gen_set_access_type(ctx, ACCESS_INT); 2575 EA = tcg_temp_new(); 2576 gen_addr_imm_index(ctx, EA, 0x03); 2577 if (ctx->opcode & 0x02) { 2578 /* lwa (lwau is undefined) */ 2579 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2580 } else { 2581 /* ld - ldu */ 2582 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2583 } 2584 if (Rc(ctx->opcode)) 2585 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2586 tcg_temp_free(EA); 2587 } 2588 2589 /* lq */ 2590 static void gen_lq(DisasContext *ctx) 2591 { 2592 int ra, rd; 2593 TCGv EA, hi, lo; 2594 2595 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2596 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2597 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2598 2599 if (!legal_in_user_mode && ctx->pr) { 2600 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2601 return; 2602 } 2603 2604 if (!le_is_supported && ctx->le_mode) { 2605 gen_align_no_le(ctx); 2606 return; 2607 } 2608 ra = rA(ctx->opcode); 2609 rd = rD(ctx->opcode); 2610 if (unlikely((rd & 1) || rd == ra)) { 2611 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2612 return; 2613 } 2614 2615 gen_set_access_type(ctx, ACCESS_INT); 2616 EA = tcg_temp_new(); 2617 gen_addr_imm_index(ctx, EA, 0x0F); 2618 2619 /* Note that the low part is always in RD+1, even in LE mode. */ 2620 lo = cpu_gpr[rd + 1]; 2621 hi = cpu_gpr[rd]; 2622 2623 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2624 #ifdef CONFIG_ATOMIC128 2625 TCGv_i32 oi = tcg_temp_new_i32(); 2626 if (ctx->le_mode) { 2627 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2628 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 2629 } else { 2630 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2631 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 2632 } 2633 tcg_temp_free_i32(oi); 2634 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 2635 #else 2636 /* Restart with exclusive lock. */ 2637 gen_helper_exit_atomic(cpu_env); 2638 ctx->base.is_jmp = DISAS_NORETURN; 2639 #endif 2640 } else if (ctx->le_mode) { 2641 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2642 gen_addr_add(ctx, EA, EA, 8); 2643 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2644 } else { 2645 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2646 gen_addr_add(ctx, EA, EA, 8); 2647 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2648 } 2649 tcg_temp_free(EA); 2650 } 2651 #endif 2652 2653 /*** Integer store ***/ 2654 #define GEN_ST(name, stop, opc, type) \ 2655 static void glue(gen_, name)(DisasContext *ctx) \ 2656 { \ 2657 TCGv EA; \ 2658 gen_set_access_type(ctx, ACCESS_INT); \ 2659 EA = tcg_temp_new(); \ 2660 gen_addr_imm_index(ctx, EA, 0); \ 2661 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2662 tcg_temp_free(EA); \ 2663 } 2664 2665 #define GEN_STU(name, stop, opc, type) \ 2666 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2667 { \ 2668 TCGv EA; \ 2669 if (unlikely(rA(ctx->opcode) == 0)) { \ 2670 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2671 return; \ 2672 } \ 2673 gen_set_access_type(ctx, ACCESS_INT); \ 2674 EA = tcg_temp_new(); \ 2675 if (type == PPC_64B) \ 2676 gen_addr_imm_index(ctx, EA, 0x03); \ 2677 else \ 2678 gen_addr_imm_index(ctx, EA, 0); \ 2679 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2680 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2681 tcg_temp_free(EA); \ 2682 } 2683 2684 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2685 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2686 { \ 2687 TCGv EA; \ 2688 if (unlikely(rA(ctx->opcode) == 0)) { \ 2689 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2690 return; \ 2691 } \ 2692 gen_set_access_type(ctx, ACCESS_INT); \ 2693 EA = tcg_temp_new(); \ 2694 gen_addr_reg_index(ctx, EA); \ 2695 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2696 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2697 tcg_temp_free(EA); \ 2698 } 2699 2700 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2701 static void glue(gen_, name##x)(DisasContext *ctx) \ 2702 { \ 2703 TCGv EA; \ 2704 chk; \ 2705 gen_set_access_type(ctx, ACCESS_INT); \ 2706 EA = tcg_temp_new(); \ 2707 gen_addr_reg_index(ctx, EA); \ 2708 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2709 tcg_temp_free(EA); \ 2710 } 2711 #define GEN_STX(name, stop, opc2, opc3, type) \ 2712 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2713 2714 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2715 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2716 2717 #define GEN_STS(name, stop, op, type) \ 2718 GEN_ST(name, stop, op | 0x20, type); \ 2719 GEN_STU(name, stop, op | 0x21, type); \ 2720 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2721 GEN_STX(name, stop, 0x17, op | 0x00, type) 2722 2723 /* stb stbu stbux stbx */ 2724 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2725 /* sth sthu sthux sthx */ 2726 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2727 /* stw stwu stwux stwx */ 2728 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2729 #if defined(TARGET_PPC64) 2730 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2731 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2732 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2733 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2734 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2735 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2736 2737 static void gen_std(DisasContext *ctx) 2738 { 2739 int rs; 2740 TCGv EA; 2741 2742 rs = rS(ctx->opcode); 2743 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2744 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2745 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2746 TCGv hi, lo; 2747 2748 if (!(ctx->insns_flags & PPC_64BX)) { 2749 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2750 } 2751 2752 if (!legal_in_user_mode && ctx->pr) { 2753 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2754 return; 2755 } 2756 2757 if (!le_is_supported && ctx->le_mode) { 2758 gen_align_no_le(ctx); 2759 return; 2760 } 2761 2762 if (unlikely(rs & 1)) { 2763 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2764 return; 2765 } 2766 gen_set_access_type(ctx, ACCESS_INT); 2767 EA = tcg_temp_new(); 2768 gen_addr_imm_index(ctx, EA, 0x03); 2769 2770 /* Note that the low part is always in RS+1, even in LE mode. */ 2771 lo = cpu_gpr[rs + 1]; 2772 hi = cpu_gpr[rs]; 2773 2774 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2775 #ifdef CONFIG_ATOMIC128 2776 TCGv_i32 oi = tcg_temp_new_i32(); 2777 if (ctx->le_mode) { 2778 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2779 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 2780 } else { 2781 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2782 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 2783 } 2784 tcg_temp_free_i32(oi); 2785 #else 2786 /* Restart with exclusive lock. */ 2787 gen_helper_exit_atomic(cpu_env); 2788 ctx->base.is_jmp = DISAS_NORETURN; 2789 #endif 2790 } else if (ctx->le_mode) { 2791 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2792 gen_addr_add(ctx, EA, EA, 8); 2793 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2794 } else { 2795 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2796 gen_addr_add(ctx, EA, EA, 8); 2797 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2798 } 2799 tcg_temp_free(EA); 2800 } else { 2801 /* std / stdu */ 2802 if (Rc(ctx->opcode)) { 2803 if (unlikely(rA(ctx->opcode) == 0)) { 2804 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2805 return; 2806 } 2807 } 2808 gen_set_access_type(ctx, ACCESS_INT); 2809 EA = tcg_temp_new(); 2810 gen_addr_imm_index(ctx, EA, 0x03); 2811 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2812 if (Rc(ctx->opcode)) 2813 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2814 tcg_temp_free(EA); 2815 } 2816 } 2817 #endif 2818 /*** Integer load and store with byte reverse ***/ 2819 2820 /* lhbrx */ 2821 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2822 2823 /* lwbrx */ 2824 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2825 2826 #if defined(TARGET_PPC64) 2827 /* ldbrx */ 2828 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2829 /* stdbrx */ 2830 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2831 #endif /* TARGET_PPC64 */ 2832 2833 /* sthbrx */ 2834 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2835 /* stwbrx */ 2836 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2837 2838 /*** Integer load and store multiple ***/ 2839 2840 /* lmw */ 2841 static void gen_lmw(DisasContext *ctx) 2842 { 2843 TCGv t0; 2844 TCGv_i32 t1; 2845 2846 if (ctx->le_mode) { 2847 gen_align_no_le(ctx); 2848 return; 2849 } 2850 gen_set_access_type(ctx, ACCESS_INT); 2851 t0 = tcg_temp_new(); 2852 t1 = tcg_const_i32(rD(ctx->opcode)); 2853 gen_addr_imm_index(ctx, t0, 0); 2854 gen_helper_lmw(cpu_env, t0, t1); 2855 tcg_temp_free(t0); 2856 tcg_temp_free_i32(t1); 2857 } 2858 2859 /* stmw */ 2860 static void gen_stmw(DisasContext *ctx) 2861 { 2862 TCGv t0; 2863 TCGv_i32 t1; 2864 2865 if (ctx->le_mode) { 2866 gen_align_no_le(ctx); 2867 return; 2868 } 2869 gen_set_access_type(ctx, ACCESS_INT); 2870 t0 = tcg_temp_new(); 2871 t1 = tcg_const_i32(rS(ctx->opcode)); 2872 gen_addr_imm_index(ctx, t0, 0); 2873 gen_helper_stmw(cpu_env, t0, t1); 2874 tcg_temp_free(t0); 2875 tcg_temp_free_i32(t1); 2876 } 2877 2878 /*** Integer load and store strings ***/ 2879 2880 /* lswi */ 2881 /* PowerPC32 specification says we must generate an exception if 2882 * rA is in the range of registers to be loaded. 2883 * In an other hand, IBM says this is valid, but rA won't be loaded. 2884 * For now, I'll follow the spec... 2885 */ 2886 static void gen_lswi(DisasContext *ctx) 2887 { 2888 TCGv t0; 2889 TCGv_i32 t1, t2; 2890 int nb = NB(ctx->opcode); 2891 int start = rD(ctx->opcode); 2892 int ra = rA(ctx->opcode); 2893 int nr; 2894 2895 if (ctx->le_mode) { 2896 gen_align_no_le(ctx); 2897 return; 2898 } 2899 if (nb == 0) 2900 nb = 32; 2901 nr = DIV_ROUND_UP(nb, 4); 2902 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2903 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2904 return; 2905 } 2906 gen_set_access_type(ctx, ACCESS_INT); 2907 t0 = tcg_temp_new(); 2908 gen_addr_register(ctx, t0); 2909 t1 = tcg_const_i32(nb); 2910 t2 = tcg_const_i32(start); 2911 gen_helper_lsw(cpu_env, t0, t1, t2); 2912 tcg_temp_free(t0); 2913 tcg_temp_free_i32(t1); 2914 tcg_temp_free_i32(t2); 2915 } 2916 2917 /* lswx */ 2918 static void gen_lswx(DisasContext *ctx) 2919 { 2920 TCGv t0; 2921 TCGv_i32 t1, t2, t3; 2922 2923 if (ctx->le_mode) { 2924 gen_align_no_le(ctx); 2925 return; 2926 } 2927 gen_set_access_type(ctx, ACCESS_INT); 2928 t0 = tcg_temp_new(); 2929 gen_addr_reg_index(ctx, t0); 2930 t1 = tcg_const_i32(rD(ctx->opcode)); 2931 t2 = tcg_const_i32(rA(ctx->opcode)); 2932 t3 = tcg_const_i32(rB(ctx->opcode)); 2933 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 2934 tcg_temp_free(t0); 2935 tcg_temp_free_i32(t1); 2936 tcg_temp_free_i32(t2); 2937 tcg_temp_free_i32(t3); 2938 } 2939 2940 /* stswi */ 2941 static void gen_stswi(DisasContext *ctx) 2942 { 2943 TCGv t0; 2944 TCGv_i32 t1, t2; 2945 int nb = NB(ctx->opcode); 2946 2947 if (ctx->le_mode) { 2948 gen_align_no_le(ctx); 2949 return; 2950 } 2951 gen_set_access_type(ctx, ACCESS_INT); 2952 t0 = tcg_temp_new(); 2953 gen_addr_register(ctx, t0); 2954 if (nb == 0) 2955 nb = 32; 2956 t1 = tcg_const_i32(nb); 2957 t2 = tcg_const_i32(rS(ctx->opcode)); 2958 gen_helper_stsw(cpu_env, t0, t1, t2); 2959 tcg_temp_free(t0); 2960 tcg_temp_free_i32(t1); 2961 tcg_temp_free_i32(t2); 2962 } 2963 2964 /* stswx */ 2965 static void gen_stswx(DisasContext *ctx) 2966 { 2967 TCGv t0; 2968 TCGv_i32 t1, t2; 2969 2970 if (ctx->le_mode) { 2971 gen_align_no_le(ctx); 2972 return; 2973 } 2974 gen_set_access_type(ctx, ACCESS_INT); 2975 t0 = tcg_temp_new(); 2976 gen_addr_reg_index(ctx, t0); 2977 t1 = tcg_temp_new_i32(); 2978 tcg_gen_trunc_tl_i32(t1, cpu_xer); 2979 tcg_gen_andi_i32(t1, t1, 0x7F); 2980 t2 = tcg_const_i32(rS(ctx->opcode)); 2981 gen_helper_stsw(cpu_env, t0, t1, t2); 2982 tcg_temp_free(t0); 2983 tcg_temp_free_i32(t1); 2984 tcg_temp_free_i32(t2); 2985 } 2986 2987 /*** Memory synchronisation ***/ 2988 /* eieio */ 2989 static void gen_eieio(DisasContext *ctx) 2990 { 2991 TCGBar bar = TCG_MO_LD_ST; 2992 2993 /* 2994 * POWER9 has a eieio instruction variant using bit 6 as a hint to 2995 * tell the CPU it is a store-forwarding barrier. 2996 */ 2997 if (ctx->opcode & 0x2000000) { 2998 /* 2999 * ISA says that "Reserved fields in instructions are ignored 3000 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3001 * as this is not an instruction software should be using, 3002 * complain to the user. 3003 */ 3004 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3005 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3006 TARGET_FMT_lx "\n", ctx->base.pc_next - 4); 3007 } else { 3008 bar = TCG_MO_ST_LD; 3009 } 3010 } 3011 3012 tcg_gen_mb(bar | TCG_BAR_SC); 3013 } 3014 3015 #if !defined(CONFIG_USER_ONLY) 3016 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3017 { 3018 TCGv_i32 t; 3019 TCGLabel *l; 3020 3021 if (!ctx->lazy_tlb_flush) { 3022 return; 3023 } 3024 l = gen_new_label(); 3025 t = tcg_temp_new_i32(); 3026 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3027 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3028 if (global) { 3029 gen_helper_check_tlb_flush_global(cpu_env); 3030 } else { 3031 gen_helper_check_tlb_flush_local(cpu_env); 3032 } 3033 gen_set_label(l); 3034 tcg_temp_free_i32(t); 3035 } 3036 #else 3037 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3038 #endif 3039 3040 /* isync */ 3041 static void gen_isync(DisasContext *ctx) 3042 { 3043 /* 3044 * We need to check for a pending TLB flush. This can only happen in 3045 * kernel mode however so check MSR_PR 3046 */ 3047 if (!ctx->pr) { 3048 gen_check_tlb_flush(ctx, false); 3049 } 3050 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3051 gen_stop_exception(ctx); 3052 } 3053 3054 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3055 3056 static void gen_load_locked(DisasContext *ctx, TCGMemOp memop) 3057 { 3058 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3059 TCGv t0 = tcg_temp_new(); 3060 3061 gen_set_access_type(ctx, ACCESS_RES); 3062 gen_addr_reg_index(ctx, t0); 3063 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3064 tcg_gen_mov_tl(cpu_reserve, t0); 3065 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3066 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3067 tcg_temp_free(t0); 3068 } 3069 3070 #define LARX(name, memop) \ 3071 static void gen_##name(DisasContext *ctx) \ 3072 { \ 3073 gen_load_locked(ctx, memop); \ 3074 } 3075 3076 /* lwarx */ 3077 LARX(lbarx, DEF_MEMOP(MO_UB)) 3078 LARX(lharx, DEF_MEMOP(MO_UW)) 3079 LARX(lwarx, DEF_MEMOP(MO_UL)) 3080 3081 static void gen_fetch_inc_conditional(DisasContext *ctx, TCGMemOp memop, 3082 TCGv EA, TCGCond cond, int addend) 3083 { 3084 TCGv t = tcg_temp_new(); 3085 TCGv t2 = tcg_temp_new(); 3086 TCGv u = tcg_temp_new(); 3087 3088 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3089 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3090 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3091 tcg_gen_addi_tl(u, t, addend); 3092 3093 /* E.g. for fetch and increment bounded... */ 3094 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3095 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3096 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3097 3098 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3099 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3100 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3101 3102 tcg_temp_free(t); 3103 tcg_temp_free(t2); 3104 tcg_temp_free(u); 3105 } 3106 3107 static void gen_ld_atomic(DisasContext *ctx, TCGMemOp memop) 3108 { 3109 uint32_t gpr_FC = FC(ctx->opcode); 3110 TCGv EA = tcg_temp_new(); 3111 int rt = rD(ctx->opcode); 3112 bool need_serial; 3113 TCGv src, dst; 3114 3115 gen_addr_register(ctx, EA); 3116 dst = cpu_gpr[rt]; 3117 src = cpu_gpr[(rt + 1) & 31]; 3118 3119 need_serial = false; 3120 memop |= MO_ALIGN; 3121 switch (gpr_FC) { 3122 case 0: /* Fetch and add */ 3123 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3124 break; 3125 case 1: /* Fetch and xor */ 3126 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3127 break; 3128 case 2: /* Fetch and or */ 3129 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3130 break; 3131 case 3: /* Fetch and 'and' */ 3132 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3133 break; 3134 case 4: /* Fetch and max unsigned */ 3135 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3136 break; 3137 case 5: /* Fetch and max signed */ 3138 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3139 break; 3140 case 6: /* Fetch and min unsigned */ 3141 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3142 break; 3143 case 7: /* Fetch and min signed */ 3144 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3145 break; 3146 case 8: /* Swap */ 3147 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3148 break; 3149 3150 case 16: /* Compare and swap not equal */ 3151 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3152 need_serial = true; 3153 } else { 3154 TCGv t0 = tcg_temp_new(); 3155 TCGv t1 = tcg_temp_new(); 3156 3157 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3158 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3159 tcg_gen_mov_tl(t1, src); 3160 } else { 3161 tcg_gen_ext32u_tl(t1, src); 3162 } 3163 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3164 cpu_gpr[(rt + 2) & 31], t0); 3165 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3166 tcg_gen_mov_tl(dst, t0); 3167 3168 tcg_temp_free(t0); 3169 tcg_temp_free(t1); 3170 } 3171 break; 3172 3173 case 24: /* Fetch and increment bounded */ 3174 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3175 need_serial = true; 3176 } else { 3177 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3178 } 3179 break; 3180 case 25: /* Fetch and increment equal */ 3181 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3182 need_serial = true; 3183 } else { 3184 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3185 } 3186 break; 3187 case 28: /* Fetch and decrement bounded */ 3188 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3189 need_serial = true; 3190 } else { 3191 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3192 } 3193 break; 3194 3195 default: 3196 /* invoke data storage error handler */ 3197 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3198 } 3199 tcg_temp_free(EA); 3200 3201 if (need_serial) { 3202 /* Restart with exclusive lock. */ 3203 gen_helper_exit_atomic(cpu_env); 3204 ctx->base.is_jmp = DISAS_NORETURN; 3205 } 3206 } 3207 3208 static void gen_lwat(DisasContext *ctx) 3209 { 3210 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3211 } 3212 3213 #ifdef TARGET_PPC64 3214 static void gen_ldat(DisasContext *ctx) 3215 { 3216 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3217 } 3218 #endif 3219 3220 static void gen_st_atomic(DisasContext *ctx, TCGMemOp memop) 3221 { 3222 uint32_t gpr_FC = FC(ctx->opcode); 3223 TCGv EA = tcg_temp_new(); 3224 TCGv src, discard; 3225 3226 gen_addr_register(ctx, EA); 3227 src = cpu_gpr[rD(ctx->opcode)]; 3228 discard = tcg_temp_new(); 3229 3230 memop |= MO_ALIGN; 3231 switch (gpr_FC) { 3232 case 0: /* add and Store */ 3233 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3234 break; 3235 case 1: /* xor and Store */ 3236 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3237 break; 3238 case 2: /* Or and Store */ 3239 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3240 break; 3241 case 3: /* 'and' and Store */ 3242 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3243 break; 3244 case 4: /* Store max unsigned */ 3245 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3246 break; 3247 case 5: /* Store max signed */ 3248 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3249 break; 3250 case 6: /* Store min unsigned */ 3251 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3252 break; 3253 case 7: /* Store min signed */ 3254 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3255 break; 3256 case 24: /* Store twin */ 3257 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3258 /* Restart with exclusive lock. */ 3259 gen_helper_exit_atomic(cpu_env); 3260 ctx->base.is_jmp = DISAS_NORETURN; 3261 } else { 3262 TCGv t = tcg_temp_new(); 3263 TCGv t2 = tcg_temp_new(); 3264 TCGv s = tcg_temp_new(); 3265 TCGv s2 = tcg_temp_new(); 3266 TCGv ea_plus_s = tcg_temp_new(); 3267 3268 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3269 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3270 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3271 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3272 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3273 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3274 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3275 3276 tcg_temp_free(ea_plus_s); 3277 tcg_temp_free(s2); 3278 tcg_temp_free(s); 3279 tcg_temp_free(t2); 3280 tcg_temp_free(t); 3281 } 3282 break; 3283 default: 3284 /* invoke data storage error handler */ 3285 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3286 } 3287 tcg_temp_free(discard); 3288 tcg_temp_free(EA); 3289 } 3290 3291 static void gen_stwat(DisasContext *ctx) 3292 { 3293 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3294 } 3295 3296 #ifdef TARGET_PPC64 3297 static void gen_stdat(DisasContext *ctx) 3298 { 3299 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3300 } 3301 #endif 3302 3303 static void gen_conditional_store(DisasContext *ctx, TCGMemOp memop) 3304 { 3305 TCGLabel *l1 = gen_new_label(); 3306 TCGLabel *l2 = gen_new_label(); 3307 TCGv t0 = tcg_temp_new(); 3308 int reg = rS(ctx->opcode); 3309 3310 gen_set_access_type(ctx, ACCESS_RES); 3311 gen_addr_reg_index(ctx, t0); 3312 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 3313 tcg_temp_free(t0); 3314 3315 t0 = tcg_temp_new(); 3316 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3317 cpu_gpr[reg], ctx->mem_idx, 3318 DEF_MEMOP(memop) | MO_ALIGN); 3319 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3320 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3321 tcg_gen_or_tl(t0, t0, cpu_so); 3322 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3323 tcg_temp_free(t0); 3324 tcg_gen_br(l2); 3325 3326 gen_set_label(l1); 3327 3328 /* Address mismatch implies failure. But we still need to provide the 3329 memory barrier semantics of the instruction. */ 3330 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3331 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3332 3333 gen_set_label(l2); 3334 tcg_gen_movi_tl(cpu_reserve, -1); 3335 } 3336 3337 #define STCX(name, memop) \ 3338 static void gen_##name(DisasContext *ctx) \ 3339 { \ 3340 gen_conditional_store(ctx, memop); \ 3341 } 3342 3343 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3344 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3345 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3346 3347 #if defined(TARGET_PPC64) 3348 /* ldarx */ 3349 LARX(ldarx, DEF_MEMOP(MO_Q)) 3350 /* stdcx. */ 3351 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3352 3353 /* lqarx */ 3354 static void gen_lqarx(DisasContext *ctx) 3355 { 3356 int rd = rD(ctx->opcode); 3357 TCGv EA, hi, lo; 3358 3359 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3360 (rd == rB(ctx->opcode)))) { 3361 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3362 return; 3363 } 3364 3365 gen_set_access_type(ctx, ACCESS_RES); 3366 EA = tcg_temp_new(); 3367 gen_addr_reg_index(ctx, EA); 3368 3369 /* Note that the low part is always in RD+1, even in LE mode. */ 3370 lo = cpu_gpr[rd + 1]; 3371 hi = cpu_gpr[rd]; 3372 3373 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3374 #ifdef CONFIG_ATOMIC128 3375 TCGv_i32 oi = tcg_temp_new_i32(); 3376 if (ctx->le_mode) { 3377 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16, 3378 ctx->mem_idx)); 3379 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3380 } else { 3381 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16, 3382 ctx->mem_idx)); 3383 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3384 } 3385 tcg_temp_free_i32(oi); 3386 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3387 #else 3388 /* Restart with exclusive lock. */ 3389 gen_helper_exit_atomic(cpu_env); 3390 ctx->base.is_jmp = DISAS_NORETURN; 3391 tcg_temp_free(EA); 3392 return; 3393 #endif 3394 } else if (ctx->le_mode) { 3395 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 3396 tcg_gen_mov_tl(cpu_reserve, EA); 3397 gen_addr_add(ctx, EA, EA, 8); 3398 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3399 } else { 3400 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 3401 tcg_gen_mov_tl(cpu_reserve, EA); 3402 gen_addr_add(ctx, EA, EA, 8); 3403 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3404 } 3405 tcg_temp_free(EA); 3406 3407 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 3408 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3409 } 3410 3411 /* stqcx. */ 3412 static void gen_stqcx_(DisasContext *ctx) 3413 { 3414 int rs = rS(ctx->opcode); 3415 TCGv EA, hi, lo; 3416 3417 if (unlikely(rs & 1)) { 3418 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3419 return; 3420 } 3421 3422 gen_set_access_type(ctx, ACCESS_RES); 3423 EA = tcg_temp_new(); 3424 gen_addr_reg_index(ctx, EA); 3425 3426 /* Note that the low part is always in RS+1, even in LE mode. */ 3427 lo = cpu_gpr[rs + 1]; 3428 hi = cpu_gpr[rs]; 3429 3430 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3431 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16); 3432 #ifdef CONFIG_ATOMIC128 3433 if (ctx->le_mode) { 3434 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, EA, lo, hi, oi); 3435 } else { 3436 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, EA, lo, hi, oi); 3437 } 3438 #else 3439 /* Restart with exclusive lock. */ 3440 gen_helper_exit_atomic(cpu_env); 3441 ctx->base.is_jmp = DISAS_NORETURN; 3442 #endif 3443 tcg_temp_free(EA); 3444 tcg_temp_free_i32(oi); 3445 } else { 3446 TCGLabel *lab_fail = gen_new_label(); 3447 TCGLabel *lab_over = gen_new_label(); 3448 TCGv_i64 t0 = tcg_temp_new_i64(); 3449 TCGv_i64 t1 = tcg_temp_new_i64(); 3450 3451 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 3452 tcg_temp_free(EA); 3453 3454 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 3455 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3456 ? offsetof(CPUPPCState, reserve_val2) 3457 : offsetof(CPUPPCState, reserve_val))); 3458 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3459 3460 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3461 gen_qemu_ld64_i64(ctx, t0, t0); 3462 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3463 ? offsetof(CPUPPCState, reserve_val) 3464 : offsetof(CPUPPCState, reserve_val2))); 3465 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3466 3467 /* Success */ 3468 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 3469 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3470 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 3471 3472 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3473 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 3474 tcg_gen_br(lab_over); 3475 3476 gen_set_label(lab_fail); 3477 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3478 3479 gen_set_label(lab_over); 3480 tcg_gen_movi_tl(cpu_reserve, -1); 3481 tcg_temp_free_i64(t0); 3482 tcg_temp_free_i64(t1); 3483 } 3484 } 3485 #endif /* defined(TARGET_PPC64) */ 3486 3487 /* sync */ 3488 static void gen_sync(DisasContext *ctx) 3489 { 3490 uint32_t l = (ctx->opcode >> 21) & 3; 3491 3492 /* 3493 * We may need to check for a pending TLB flush. 3494 * 3495 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3496 * 3497 * Additionally, this can only happen in kernel mode however so 3498 * check MSR_PR as well. 3499 */ 3500 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3501 gen_check_tlb_flush(ctx, true); 3502 } 3503 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3504 } 3505 3506 /* wait */ 3507 static void gen_wait(DisasContext *ctx) 3508 { 3509 TCGv_i32 t0 = tcg_const_i32(1); 3510 tcg_gen_st_i32(t0, cpu_env, 3511 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3512 tcg_temp_free_i32(t0); 3513 /* Stop translation, as the CPU is supposed to sleep from now */ 3514 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3515 } 3516 3517 #if defined(TARGET_PPC64) 3518 static void gen_doze(DisasContext *ctx) 3519 { 3520 #if defined(CONFIG_USER_ONLY) 3521 GEN_PRIV; 3522 #else 3523 TCGv_i32 t; 3524 3525 CHK_HV; 3526 t = tcg_const_i32(PPC_PM_DOZE); 3527 gen_helper_pminsn(cpu_env, t); 3528 tcg_temp_free_i32(t); 3529 gen_stop_exception(ctx); 3530 #endif /* defined(CONFIG_USER_ONLY) */ 3531 } 3532 3533 static void gen_nap(DisasContext *ctx) 3534 { 3535 #if defined(CONFIG_USER_ONLY) 3536 GEN_PRIV; 3537 #else 3538 TCGv_i32 t; 3539 3540 CHK_HV; 3541 t = tcg_const_i32(PPC_PM_NAP); 3542 gen_helper_pminsn(cpu_env, t); 3543 tcg_temp_free_i32(t); 3544 gen_stop_exception(ctx); 3545 #endif /* defined(CONFIG_USER_ONLY) */ 3546 } 3547 3548 static void gen_stop(DisasContext *ctx) 3549 { 3550 gen_nap(ctx); 3551 } 3552 3553 static void gen_sleep(DisasContext *ctx) 3554 { 3555 #if defined(CONFIG_USER_ONLY) 3556 GEN_PRIV; 3557 #else 3558 TCGv_i32 t; 3559 3560 CHK_HV; 3561 t = tcg_const_i32(PPC_PM_SLEEP); 3562 gen_helper_pminsn(cpu_env, t); 3563 tcg_temp_free_i32(t); 3564 gen_stop_exception(ctx); 3565 #endif /* defined(CONFIG_USER_ONLY) */ 3566 } 3567 3568 static void gen_rvwinkle(DisasContext *ctx) 3569 { 3570 #if defined(CONFIG_USER_ONLY) 3571 GEN_PRIV; 3572 #else 3573 TCGv_i32 t; 3574 3575 CHK_HV; 3576 t = tcg_const_i32(PPC_PM_RVWINKLE); 3577 gen_helper_pminsn(cpu_env, t); 3578 tcg_temp_free_i32(t); 3579 gen_stop_exception(ctx); 3580 #endif /* defined(CONFIG_USER_ONLY) */ 3581 } 3582 #endif /* #if defined(TARGET_PPC64) */ 3583 3584 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3585 { 3586 #if defined(TARGET_PPC64) 3587 if (ctx->has_cfar) 3588 tcg_gen_movi_tl(cpu_cfar, nip); 3589 #endif 3590 } 3591 3592 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3593 { 3594 if (unlikely(ctx->singlestep_enabled)) { 3595 return false; 3596 } 3597 3598 #ifndef CONFIG_USER_ONLY 3599 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3600 #else 3601 return true; 3602 #endif 3603 } 3604 3605 /*** Branch ***/ 3606 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3607 { 3608 if (NARROW_MODE(ctx)) { 3609 dest = (uint32_t) dest; 3610 } 3611 if (use_goto_tb(ctx, dest)) { 3612 tcg_gen_goto_tb(n); 3613 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3614 tcg_gen_exit_tb(ctx->base.tb, n); 3615 } else { 3616 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3617 if (unlikely(ctx->singlestep_enabled)) { 3618 if ((ctx->singlestep_enabled & 3619 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) && 3620 (ctx->exception == POWERPC_EXCP_BRANCH || 3621 ctx->exception == POWERPC_EXCP_TRACE)) { 3622 gen_exception_nip(ctx, POWERPC_EXCP_TRACE, dest); 3623 } 3624 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) { 3625 gen_debug_exception(ctx); 3626 } 3627 } 3628 tcg_gen_lookup_and_goto_ptr(); 3629 } 3630 } 3631 3632 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3633 { 3634 if (NARROW_MODE(ctx)) { 3635 nip = (uint32_t)nip; 3636 } 3637 tcg_gen_movi_tl(cpu_lr, nip); 3638 } 3639 3640 /* b ba bl bla */ 3641 static void gen_b(DisasContext *ctx) 3642 { 3643 target_ulong li, target; 3644 3645 ctx->exception = POWERPC_EXCP_BRANCH; 3646 /* sign extend LI */ 3647 li = LI(ctx->opcode); 3648 li = (li ^ 0x02000000) - 0x02000000; 3649 if (likely(AA(ctx->opcode) == 0)) { 3650 target = ctx->base.pc_next + li - 4; 3651 } else { 3652 target = li; 3653 } 3654 if (LK(ctx->opcode)) { 3655 gen_setlr(ctx, ctx->base.pc_next); 3656 } 3657 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3658 gen_goto_tb(ctx, 0, target); 3659 } 3660 3661 #define BCOND_IM 0 3662 #define BCOND_LR 1 3663 #define BCOND_CTR 2 3664 #define BCOND_TAR 3 3665 3666 static void gen_bcond(DisasContext *ctx, int type) 3667 { 3668 uint32_t bo = BO(ctx->opcode); 3669 TCGLabel *l1; 3670 TCGv target; 3671 3672 ctx->exception = POWERPC_EXCP_BRANCH; 3673 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3674 target = tcg_temp_local_new(); 3675 if (type == BCOND_CTR) 3676 tcg_gen_mov_tl(target, cpu_ctr); 3677 else if (type == BCOND_TAR) 3678 gen_load_spr(target, SPR_TAR); 3679 else 3680 tcg_gen_mov_tl(target, cpu_lr); 3681 } else { 3682 target = NULL; 3683 } 3684 if (LK(ctx->opcode)) 3685 gen_setlr(ctx, ctx->base.pc_next); 3686 l1 = gen_new_label(); 3687 if ((bo & 0x4) == 0) { 3688 /* Decrement and test CTR */ 3689 TCGv temp = tcg_temp_new(); 3690 if (unlikely(type == BCOND_CTR)) { 3691 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3692 return; 3693 } 3694 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3695 if (NARROW_MODE(ctx)) { 3696 tcg_gen_ext32u_tl(temp, cpu_ctr); 3697 } else { 3698 tcg_gen_mov_tl(temp, cpu_ctr); 3699 } 3700 if (bo & 0x2) { 3701 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3702 } else { 3703 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3704 } 3705 tcg_temp_free(temp); 3706 } 3707 if ((bo & 0x10) == 0) { 3708 /* Test CR */ 3709 uint32_t bi = BI(ctx->opcode); 3710 uint32_t mask = 0x08 >> (bi & 0x03); 3711 TCGv_i32 temp = tcg_temp_new_i32(); 3712 3713 if (bo & 0x8) { 3714 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3715 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3716 } else { 3717 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3718 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3719 } 3720 tcg_temp_free_i32(temp); 3721 } 3722 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3723 if (type == BCOND_IM) { 3724 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3725 if (likely(AA(ctx->opcode) == 0)) { 3726 gen_goto_tb(ctx, 0, ctx->base.pc_next + li - 4); 3727 } else { 3728 gen_goto_tb(ctx, 0, li); 3729 } 3730 } else { 3731 if (NARROW_MODE(ctx)) { 3732 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3733 } else { 3734 tcg_gen_andi_tl(cpu_nip, target, ~3); 3735 } 3736 tcg_gen_lookup_and_goto_ptr(); 3737 tcg_temp_free(target); 3738 } 3739 if ((bo & 0x14) != 0x14) { 3740 gen_set_label(l1); 3741 gen_goto_tb(ctx, 1, ctx->base.pc_next); 3742 } 3743 } 3744 3745 static void gen_bc(DisasContext *ctx) 3746 { 3747 gen_bcond(ctx, BCOND_IM); 3748 } 3749 3750 static void gen_bcctr(DisasContext *ctx) 3751 { 3752 gen_bcond(ctx, BCOND_CTR); 3753 } 3754 3755 static void gen_bclr(DisasContext *ctx) 3756 { 3757 gen_bcond(ctx, BCOND_LR); 3758 } 3759 3760 static void gen_bctar(DisasContext *ctx) 3761 { 3762 gen_bcond(ctx, BCOND_TAR); 3763 } 3764 3765 /*** Condition register logical ***/ 3766 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3767 static void glue(gen_, name)(DisasContext *ctx) \ 3768 { \ 3769 uint8_t bitmask; \ 3770 int sh; \ 3771 TCGv_i32 t0, t1; \ 3772 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3773 t0 = tcg_temp_new_i32(); \ 3774 if (sh > 0) \ 3775 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3776 else if (sh < 0) \ 3777 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3778 else \ 3779 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3780 t1 = tcg_temp_new_i32(); \ 3781 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3782 if (sh > 0) \ 3783 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3784 else if (sh < 0) \ 3785 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3786 else \ 3787 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3788 tcg_op(t0, t0, t1); \ 3789 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3790 tcg_gen_andi_i32(t0, t0, bitmask); \ 3791 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3792 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3793 tcg_temp_free_i32(t0); \ 3794 tcg_temp_free_i32(t1); \ 3795 } 3796 3797 /* crand */ 3798 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3799 /* crandc */ 3800 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3801 /* creqv */ 3802 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3803 /* crnand */ 3804 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3805 /* crnor */ 3806 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3807 /* cror */ 3808 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3809 /* crorc */ 3810 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3811 /* crxor */ 3812 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3813 3814 /* mcrf */ 3815 static void gen_mcrf(DisasContext *ctx) 3816 { 3817 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3818 } 3819 3820 /*** System linkage ***/ 3821 3822 /* rfi (supervisor only) */ 3823 static void gen_rfi(DisasContext *ctx) 3824 { 3825 #if defined(CONFIG_USER_ONLY) 3826 GEN_PRIV; 3827 #else 3828 /* This instruction doesn't exist anymore on 64-bit server 3829 * processors compliant with arch 2.x 3830 */ 3831 if (ctx->insns_flags & PPC_SEGMENT_64B) { 3832 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3833 return; 3834 } 3835 /* Restore CPU state */ 3836 CHK_SV; 3837 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3838 gen_helper_rfi(cpu_env); 3839 gen_sync_exception(ctx); 3840 #endif 3841 } 3842 3843 #if defined(TARGET_PPC64) 3844 static void gen_rfid(DisasContext *ctx) 3845 { 3846 #if defined(CONFIG_USER_ONLY) 3847 GEN_PRIV; 3848 #else 3849 /* Restore CPU state */ 3850 CHK_SV; 3851 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3852 gen_helper_rfid(cpu_env); 3853 gen_sync_exception(ctx); 3854 #endif 3855 } 3856 3857 static void gen_hrfid(DisasContext *ctx) 3858 { 3859 #if defined(CONFIG_USER_ONLY) 3860 GEN_PRIV; 3861 #else 3862 /* Restore CPU state */ 3863 CHK_HV; 3864 gen_helper_hrfid(cpu_env); 3865 gen_sync_exception(ctx); 3866 #endif 3867 } 3868 #endif 3869 3870 /* sc */ 3871 #if defined(CONFIG_USER_ONLY) 3872 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3873 #else 3874 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3875 #endif 3876 static void gen_sc(DisasContext *ctx) 3877 { 3878 uint32_t lev; 3879 3880 lev = (ctx->opcode >> 5) & 0x7F; 3881 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3882 } 3883 3884 /*** Trap ***/ 3885 3886 /* Check for unconditional traps (always or never) */ 3887 static bool check_unconditional_trap(DisasContext *ctx) 3888 { 3889 /* Trap never */ 3890 if (TO(ctx->opcode) == 0) { 3891 return true; 3892 } 3893 /* Trap always */ 3894 if (TO(ctx->opcode) == 31) { 3895 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 3896 return true; 3897 } 3898 return false; 3899 } 3900 3901 /* tw */ 3902 static void gen_tw(DisasContext *ctx) 3903 { 3904 TCGv_i32 t0; 3905 3906 if (check_unconditional_trap(ctx)) { 3907 return; 3908 } 3909 t0 = tcg_const_i32(TO(ctx->opcode)); 3910 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3911 t0); 3912 tcg_temp_free_i32(t0); 3913 } 3914 3915 /* twi */ 3916 static void gen_twi(DisasContext *ctx) 3917 { 3918 TCGv t0; 3919 TCGv_i32 t1; 3920 3921 if (check_unconditional_trap(ctx)) { 3922 return; 3923 } 3924 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3925 t1 = tcg_const_i32(TO(ctx->opcode)); 3926 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3927 tcg_temp_free(t0); 3928 tcg_temp_free_i32(t1); 3929 } 3930 3931 #if defined(TARGET_PPC64) 3932 /* td */ 3933 static void gen_td(DisasContext *ctx) 3934 { 3935 TCGv_i32 t0; 3936 3937 if (check_unconditional_trap(ctx)) { 3938 return; 3939 } 3940 t0 = tcg_const_i32(TO(ctx->opcode)); 3941 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3942 t0); 3943 tcg_temp_free_i32(t0); 3944 } 3945 3946 /* tdi */ 3947 static void gen_tdi(DisasContext *ctx) 3948 { 3949 TCGv t0; 3950 TCGv_i32 t1; 3951 3952 if (check_unconditional_trap(ctx)) { 3953 return; 3954 } 3955 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3956 t1 = tcg_const_i32(TO(ctx->opcode)); 3957 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3958 tcg_temp_free(t0); 3959 tcg_temp_free_i32(t1); 3960 } 3961 #endif 3962 3963 /*** Processor control ***/ 3964 3965 static void gen_read_xer(DisasContext *ctx, TCGv dst) 3966 { 3967 TCGv t0 = tcg_temp_new(); 3968 TCGv t1 = tcg_temp_new(); 3969 TCGv t2 = tcg_temp_new(); 3970 tcg_gen_mov_tl(dst, cpu_xer); 3971 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 3972 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 3973 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 3974 tcg_gen_or_tl(t0, t0, t1); 3975 tcg_gen_or_tl(dst, dst, t2); 3976 tcg_gen_or_tl(dst, dst, t0); 3977 if (is_isa300(ctx)) { 3978 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 3979 tcg_gen_or_tl(dst, dst, t0); 3980 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 3981 tcg_gen_or_tl(dst, dst, t0); 3982 } 3983 tcg_temp_free(t0); 3984 tcg_temp_free(t1); 3985 tcg_temp_free(t2); 3986 } 3987 3988 static void gen_write_xer(TCGv src) 3989 { 3990 /* Write all flags, while reading back check for isa300 */ 3991 tcg_gen_andi_tl(cpu_xer, src, 3992 ~((1u << XER_SO) | 3993 (1u << XER_OV) | (1u << XER_OV32) | 3994 (1u << XER_CA) | (1u << XER_CA32))); 3995 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 3996 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 3997 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 3998 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 3999 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 4000 } 4001 4002 /* mcrxr */ 4003 static void gen_mcrxr(DisasContext *ctx) 4004 { 4005 TCGv_i32 t0 = tcg_temp_new_i32(); 4006 TCGv_i32 t1 = tcg_temp_new_i32(); 4007 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4008 4009 tcg_gen_trunc_tl_i32(t0, cpu_so); 4010 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4011 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4012 tcg_gen_shli_i32(t0, t0, 3); 4013 tcg_gen_shli_i32(t1, t1, 2); 4014 tcg_gen_shli_i32(dst, dst, 1); 4015 tcg_gen_or_i32(dst, dst, t0); 4016 tcg_gen_or_i32(dst, dst, t1); 4017 tcg_temp_free_i32(t0); 4018 tcg_temp_free_i32(t1); 4019 4020 tcg_gen_movi_tl(cpu_so, 0); 4021 tcg_gen_movi_tl(cpu_ov, 0); 4022 tcg_gen_movi_tl(cpu_ca, 0); 4023 } 4024 4025 #ifdef TARGET_PPC64 4026 /* mcrxrx */ 4027 static void gen_mcrxrx(DisasContext *ctx) 4028 { 4029 TCGv t0 = tcg_temp_new(); 4030 TCGv t1 = tcg_temp_new(); 4031 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4032 4033 /* copy OV and OV32 */ 4034 tcg_gen_shli_tl(t0, cpu_ov, 1); 4035 tcg_gen_or_tl(t0, t0, cpu_ov32); 4036 tcg_gen_shli_tl(t0, t0, 2); 4037 /* copy CA and CA32 */ 4038 tcg_gen_shli_tl(t1, cpu_ca, 1); 4039 tcg_gen_or_tl(t1, t1, cpu_ca32); 4040 tcg_gen_or_tl(t0, t0, t1); 4041 tcg_gen_trunc_tl_i32(dst, t0); 4042 tcg_temp_free(t0); 4043 tcg_temp_free(t1); 4044 } 4045 #endif 4046 4047 /* mfcr mfocrf */ 4048 static void gen_mfcr(DisasContext *ctx) 4049 { 4050 uint32_t crm, crn; 4051 4052 if (likely(ctx->opcode & 0x00100000)) { 4053 crm = CRM(ctx->opcode); 4054 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4055 crn = ctz32 (crm); 4056 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4057 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4058 cpu_gpr[rD(ctx->opcode)], crn * 4); 4059 } 4060 } else { 4061 TCGv_i32 t0 = tcg_temp_new_i32(); 4062 tcg_gen_mov_i32(t0, cpu_crf[0]); 4063 tcg_gen_shli_i32(t0, t0, 4); 4064 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4065 tcg_gen_shli_i32(t0, t0, 4); 4066 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4067 tcg_gen_shli_i32(t0, t0, 4); 4068 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4069 tcg_gen_shli_i32(t0, t0, 4); 4070 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4071 tcg_gen_shli_i32(t0, t0, 4); 4072 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4073 tcg_gen_shli_i32(t0, t0, 4); 4074 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4075 tcg_gen_shli_i32(t0, t0, 4); 4076 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4077 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4078 tcg_temp_free_i32(t0); 4079 } 4080 } 4081 4082 /* mfmsr */ 4083 static void gen_mfmsr(DisasContext *ctx) 4084 { 4085 CHK_SV; 4086 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4087 } 4088 4089 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 4090 { 4091 #if 0 4092 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 4093 printf("ERROR: try to access SPR %d !\n", sprn); 4094 #endif 4095 } 4096 #define SPR_NOACCESS (&spr_noaccess) 4097 4098 /* mfspr */ 4099 static inline void gen_op_mfspr(DisasContext *ctx) 4100 { 4101 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4102 uint32_t sprn = SPR(ctx->opcode); 4103 4104 #if defined(CONFIG_USER_ONLY) 4105 read_cb = ctx->spr_cb[sprn].uea_read; 4106 #else 4107 if (ctx->pr) { 4108 read_cb = ctx->spr_cb[sprn].uea_read; 4109 } else if (ctx->hv) { 4110 read_cb = ctx->spr_cb[sprn].hea_read; 4111 } else { 4112 read_cb = ctx->spr_cb[sprn].oea_read; 4113 } 4114 #endif 4115 if (likely(read_cb != NULL)) { 4116 if (likely(read_cb != SPR_NOACCESS)) { 4117 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4118 } else { 4119 /* Privilege exception */ 4120 /* This is a hack to avoid warnings when running Linux: 4121 * this OS breaks the PowerPC virtualisation model, 4122 * allowing userland application to read the PVR 4123 */ 4124 if (sprn != SPR_PVR) { 4125 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4126 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4127 ctx->base.pc_next - 4); 4128 } 4129 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4130 } 4131 } else { 4132 /* ISA 2.07 defines these as no-ops */ 4133 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4134 (sprn >= 808 && sprn <= 811)) { 4135 /* This is a nop */ 4136 return; 4137 } 4138 /* Not defined */ 4139 qemu_log_mask(LOG_GUEST_ERROR, 4140 "Trying to read invalid spr %d (0x%03x) at " 4141 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4142 4143 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4144 * it can generate a priv, a hv emu or a no-op 4145 */ 4146 if (sprn & 0x10) { 4147 if (ctx->pr) { 4148 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4149 } 4150 } else { 4151 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4152 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4153 } 4154 } 4155 } 4156 } 4157 4158 static void gen_mfspr(DisasContext *ctx) 4159 { 4160 gen_op_mfspr(ctx); 4161 } 4162 4163 /* mftb */ 4164 static void gen_mftb(DisasContext *ctx) 4165 { 4166 gen_op_mfspr(ctx); 4167 } 4168 4169 /* mtcrf mtocrf*/ 4170 static void gen_mtcrf(DisasContext *ctx) 4171 { 4172 uint32_t crm, crn; 4173 4174 crm = CRM(ctx->opcode); 4175 if (likely((ctx->opcode & 0x00100000))) { 4176 if (crm && ((crm & (crm - 1)) == 0)) { 4177 TCGv_i32 temp = tcg_temp_new_i32(); 4178 crn = ctz32 (crm); 4179 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4180 tcg_gen_shri_i32(temp, temp, crn * 4); 4181 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4182 tcg_temp_free_i32(temp); 4183 } 4184 } else { 4185 TCGv_i32 temp = tcg_temp_new_i32(); 4186 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4187 for (crn = 0 ; crn < 8 ; crn++) { 4188 if (crm & (1 << crn)) { 4189 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4190 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4191 } 4192 } 4193 tcg_temp_free_i32(temp); 4194 } 4195 } 4196 4197 /* mtmsr */ 4198 #if defined(TARGET_PPC64) 4199 static void gen_mtmsrd(DisasContext *ctx) 4200 { 4201 CHK_SV; 4202 4203 #if !defined(CONFIG_USER_ONLY) 4204 if (ctx->opcode & 0x00010000) { 4205 /* Special form that does not need any synchronisation */ 4206 TCGv t0 = tcg_temp_new(); 4207 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4208 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4209 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4210 tcg_temp_free(t0); 4211 } else { 4212 /* XXX: we need to update nip before the store 4213 * if we enter power saving mode, we will exit the loop 4214 * directly from ppc_store_msr 4215 */ 4216 gen_update_nip(ctx, ctx->base.pc_next); 4217 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4218 /* Must stop the translation as machine state (may have) changed */ 4219 /* Note that mtmsr is not always defined as context-synchronizing */ 4220 gen_stop_exception(ctx); 4221 } 4222 #endif /* !defined(CONFIG_USER_ONLY) */ 4223 } 4224 #endif /* defined(TARGET_PPC64) */ 4225 4226 static void gen_mtmsr(DisasContext *ctx) 4227 { 4228 CHK_SV; 4229 4230 #if !defined(CONFIG_USER_ONLY) 4231 if (ctx->opcode & 0x00010000) { 4232 /* Special form that does not need any synchronisation */ 4233 TCGv t0 = tcg_temp_new(); 4234 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4235 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4236 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4237 tcg_temp_free(t0); 4238 } else { 4239 TCGv msr = tcg_temp_new(); 4240 4241 /* XXX: we need to update nip before the store 4242 * if we enter power saving mode, we will exit the loop 4243 * directly from ppc_store_msr 4244 */ 4245 gen_update_nip(ctx, ctx->base.pc_next); 4246 #if defined(TARGET_PPC64) 4247 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4248 #else 4249 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4250 #endif 4251 gen_helper_store_msr(cpu_env, msr); 4252 tcg_temp_free(msr); 4253 /* Must stop the translation as machine state (may have) changed */ 4254 /* Note that mtmsr is not always defined as context-synchronizing */ 4255 gen_stop_exception(ctx); 4256 } 4257 #endif 4258 } 4259 4260 /* mtspr */ 4261 static void gen_mtspr(DisasContext *ctx) 4262 { 4263 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4264 uint32_t sprn = SPR(ctx->opcode); 4265 4266 #if defined(CONFIG_USER_ONLY) 4267 write_cb = ctx->spr_cb[sprn].uea_write; 4268 #else 4269 if (ctx->pr) { 4270 write_cb = ctx->spr_cb[sprn].uea_write; 4271 } else if (ctx->hv) { 4272 write_cb = ctx->spr_cb[sprn].hea_write; 4273 } else { 4274 write_cb = ctx->spr_cb[sprn].oea_write; 4275 } 4276 #endif 4277 if (likely(write_cb != NULL)) { 4278 if (likely(write_cb != SPR_NOACCESS)) { 4279 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4280 } else { 4281 /* Privilege exception */ 4282 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 4283 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4284 ctx->base.pc_next - 4); 4285 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4286 } 4287 } else { 4288 /* ISA 2.07 defines these as no-ops */ 4289 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4290 (sprn >= 808 && sprn <= 811)) { 4291 /* This is a nop */ 4292 return; 4293 } 4294 4295 /* Not defined */ 4296 qemu_log_mask(LOG_GUEST_ERROR, 4297 "Trying to write invalid spr %d (0x%03x) at " 4298 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4299 4300 4301 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4302 * it can generate a priv, a hv emu or a no-op 4303 */ 4304 if (sprn & 0x10) { 4305 if (ctx->pr) { 4306 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4307 } 4308 } else { 4309 if (ctx->pr || sprn == 0) { 4310 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4311 } 4312 } 4313 } 4314 } 4315 4316 #if defined(TARGET_PPC64) 4317 /* setb */ 4318 static void gen_setb(DisasContext *ctx) 4319 { 4320 TCGv_i32 t0 = tcg_temp_new_i32(); 4321 TCGv_i32 t8 = tcg_temp_new_i32(); 4322 TCGv_i32 tm1 = tcg_temp_new_i32(); 4323 int crf = crfS(ctx->opcode); 4324 4325 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4326 tcg_gen_movi_i32(t8, 8); 4327 tcg_gen_movi_i32(tm1, -1); 4328 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4329 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4330 4331 tcg_temp_free_i32(t0); 4332 tcg_temp_free_i32(t8); 4333 tcg_temp_free_i32(tm1); 4334 } 4335 #endif 4336 4337 /*** Cache management ***/ 4338 4339 /* dcbf */ 4340 static void gen_dcbf(DisasContext *ctx) 4341 { 4342 /* XXX: specification says this is treated as a load by the MMU */ 4343 TCGv t0; 4344 gen_set_access_type(ctx, ACCESS_CACHE); 4345 t0 = tcg_temp_new(); 4346 gen_addr_reg_index(ctx, t0); 4347 gen_qemu_ld8u(ctx, t0, t0); 4348 tcg_temp_free(t0); 4349 } 4350 4351 /* dcbi (Supervisor only) */ 4352 static void gen_dcbi(DisasContext *ctx) 4353 { 4354 #if defined(CONFIG_USER_ONLY) 4355 GEN_PRIV; 4356 #else 4357 TCGv EA, val; 4358 4359 CHK_SV; 4360 EA = tcg_temp_new(); 4361 gen_set_access_type(ctx, ACCESS_CACHE); 4362 gen_addr_reg_index(ctx, EA); 4363 val = tcg_temp_new(); 4364 /* XXX: specification says this should be treated as a store by the MMU */ 4365 gen_qemu_ld8u(ctx, val, EA); 4366 gen_qemu_st8(ctx, val, EA); 4367 tcg_temp_free(val); 4368 tcg_temp_free(EA); 4369 #endif /* defined(CONFIG_USER_ONLY) */ 4370 } 4371 4372 /* dcdst */ 4373 static void gen_dcbst(DisasContext *ctx) 4374 { 4375 /* XXX: specification say this is treated as a load by the MMU */ 4376 TCGv t0; 4377 gen_set_access_type(ctx, ACCESS_CACHE); 4378 t0 = tcg_temp_new(); 4379 gen_addr_reg_index(ctx, t0); 4380 gen_qemu_ld8u(ctx, t0, t0); 4381 tcg_temp_free(t0); 4382 } 4383 4384 /* dcbt */ 4385 static void gen_dcbt(DisasContext *ctx) 4386 { 4387 /* interpreted as no-op */ 4388 /* XXX: specification say this is treated as a load by the MMU 4389 * but does not generate any exception 4390 */ 4391 } 4392 4393 /* dcbtst */ 4394 static void gen_dcbtst(DisasContext *ctx) 4395 { 4396 /* interpreted as no-op */ 4397 /* XXX: specification say this is treated as a load by the MMU 4398 * but does not generate any exception 4399 */ 4400 } 4401 4402 /* dcbtls */ 4403 static void gen_dcbtls(DisasContext *ctx) 4404 { 4405 /* Always fails locking the cache */ 4406 TCGv t0 = tcg_temp_new(); 4407 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4408 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4409 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4410 tcg_temp_free(t0); 4411 } 4412 4413 /* dcbz */ 4414 static void gen_dcbz(DisasContext *ctx) 4415 { 4416 TCGv tcgv_addr; 4417 TCGv_i32 tcgv_op; 4418 4419 gen_set_access_type(ctx, ACCESS_CACHE); 4420 tcgv_addr = tcg_temp_new(); 4421 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4422 gen_addr_reg_index(ctx, tcgv_addr); 4423 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4424 tcg_temp_free(tcgv_addr); 4425 tcg_temp_free_i32(tcgv_op); 4426 } 4427 4428 /* dst / dstt */ 4429 static void gen_dst(DisasContext *ctx) 4430 { 4431 if (rA(ctx->opcode) == 0) { 4432 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4433 } else { 4434 /* interpreted as no-op */ 4435 } 4436 } 4437 4438 /* dstst /dststt */ 4439 static void gen_dstst(DisasContext *ctx) 4440 { 4441 if (rA(ctx->opcode) == 0) { 4442 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4443 } else { 4444 /* interpreted as no-op */ 4445 } 4446 4447 } 4448 4449 /* dss / dssall */ 4450 static void gen_dss(DisasContext *ctx) 4451 { 4452 /* interpreted as no-op */ 4453 } 4454 4455 /* icbi */ 4456 static void gen_icbi(DisasContext *ctx) 4457 { 4458 TCGv t0; 4459 gen_set_access_type(ctx, ACCESS_CACHE); 4460 t0 = tcg_temp_new(); 4461 gen_addr_reg_index(ctx, t0); 4462 gen_helper_icbi(cpu_env, t0); 4463 tcg_temp_free(t0); 4464 } 4465 4466 /* Optional: */ 4467 /* dcba */ 4468 static void gen_dcba(DisasContext *ctx) 4469 { 4470 /* interpreted as no-op */ 4471 /* XXX: specification say this is treated as a store by the MMU 4472 * but does not generate any exception 4473 */ 4474 } 4475 4476 /*** Segment register manipulation ***/ 4477 /* Supervisor only: */ 4478 4479 /* mfsr */ 4480 static void gen_mfsr(DisasContext *ctx) 4481 { 4482 #if defined(CONFIG_USER_ONLY) 4483 GEN_PRIV; 4484 #else 4485 TCGv t0; 4486 4487 CHK_SV; 4488 t0 = tcg_const_tl(SR(ctx->opcode)); 4489 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4490 tcg_temp_free(t0); 4491 #endif /* defined(CONFIG_USER_ONLY) */ 4492 } 4493 4494 /* mfsrin */ 4495 static void gen_mfsrin(DisasContext *ctx) 4496 { 4497 #if defined(CONFIG_USER_ONLY) 4498 GEN_PRIV; 4499 #else 4500 TCGv t0; 4501 4502 CHK_SV; 4503 t0 = tcg_temp_new(); 4504 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4505 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4506 tcg_temp_free(t0); 4507 #endif /* defined(CONFIG_USER_ONLY) */ 4508 } 4509 4510 /* mtsr */ 4511 static void gen_mtsr(DisasContext *ctx) 4512 { 4513 #if defined(CONFIG_USER_ONLY) 4514 GEN_PRIV; 4515 #else 4516 TCGv t0; 4517 4518 CHK_SV; 4519 t0 = tcg_const_tl(SR(ctx->opcode)); 4520 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4521 tcg_temp_free(t0); 4522 #endif /* defined(CONFIG_USER_ONLY) */ 4523 } 4524 4525 /* mtsrin */ 4526 static void gen_mtsrin(DisasContext *ctx) 4527 { 4528 #if defined(CONFIG_USER_ONLY) 4529 GEN_PRIV; 4530 #else 4531 TCGv t0; 4532 CHK_SV; 4533 4534 t0 = tcg_temp_new(); 4535 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4536 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4537 tcg_temp_free(t0); 4538 #endif /* defined(CONFIG_USER_ONLY) */ 4539 } 4540 4541 #if defined(TARGET_PPC64) 4542 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4543 4544 /* mfsr */ 4545 static void gen_mfsr_64b(DisasContext *ctx) 4546 { 4547 #if defined(CONFIG_USER_ONLY) 4548 GEN_PRIV; 4549 #else 4550 TCGv t0; 4551 4552 CHK_SV; 4553 t0 = tcg_const_tl(SR(ctx->opcode)); 4554 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4555 tcg_temp_free(t0); 4556 #endif /* defined(CONFIG_USER_ONLY) */ 4557 } 4558 4559 /* mfsrin */ 4560 static void gen_mfsrin_64b(DisasContext *ctx) 4561 { 4562 #if defined(CONFIG_USER_ONLY) 4563 GEN_PRIV; 4564 #else 4565 TCGv t0; 4566 4567 CHK_SV; 4568 t0 = tcg_temp_new(); 4569 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4570 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4571 tcg_temp_free(t0); 4572 #endif /* defined(CONFIG_USER_ONLY) */ 4573 } 4574 4575 /* mtsr */ 4576 static void gen_mtsr_64b(DisasContext *ctx) 4577 { 4578 #if defined(CONFIG_USER_ONLY) 4579 GEN_PRIV; 4580 #else 4581 TCGv t0; 4582 4583 CHK_SV; 4584 t0 = tcg_const_tl(SR(ctx->opcode)); 4585 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4586 tcg_temp_free(t0); 4587 #endif /* defined(CONFIG_USER_ONLY) */ 4588 } 4589 4590 /* mtsrin */ 4591 static void gen_mtsrin_64b(DisasContext *ctx) 4592 { 4593 #if defined(CONFIG_USER_ONLY) 4594 GEN_PRIV; 4595 #else 4596 TCGv t0; 4597 4598 CHK_SV; 4599 t0 = tcg_temp_new(); 4600 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4601 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4602 tcg_temp_free(t0); 4603 #endif /* defined(CONFIG_USER_ONLY) */ 4604 } 4605 4606 /* slbmte */ 4607 static void gen_slbmte(DisasContext *ctx) 4608 { 4609 #if defined(CONFIG_USER_ONLY) 4610 GEN_PRIV; 4611 #else 4612 CHK_SV; 4613 4614 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4615 cpu_gpr[rS(ctx->opcode)]); 4616 #endif /* defined(CONFIG_USER_ONLY) */ 4617 } 4618 4619 static void gen_slbmfee(DisasContext *ctx) 4620 { 4621 #if defined(CONFIG_USER_ONLY) 4622 GEN_PRIV; 4623 #else 4624 CHK_SV; 4625 4626 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4627 cpu_gpr[rB(ctx->opcode)]); 4628 #endif /* defined(CONFIG_USER_ONLY) */ 4629 } 4630 4631 static void gen_slbmfev(DisasContext *ctx) 4632 { 4633 #if defined(CONFIG_USER_ONLY) 4634 GEN_PRIV; 4635 #else 4636 CHK_SV; 4637 4638 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4639 cpu_gpr[rB(ctx->opcode)]); 4640 #endif /* defined(CONFIG_USER_ONLY) */ 4641 } 4642 4643 static void gen_slbfee_(DisasContext *ctx) 4644 { 4645 #if defined(CONFIG_USER_ONLY) 4646 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4647 #else 4648 TCGLabel *l1, *l2; 4649 4650 if (unlikely(ctx->pr)) { 4651 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4652 return; 4653 } 4654 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4655 cpu_gpr[rB(ctx->opcode)]); 4656 l1 = gen_new_label(); 4657 l2 = gen_new_label(); 4658 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4659 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4660 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4661 tcg_gen_br(l2); 4662 gen_set_label(l1); 4663 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4664 gen_set_label(l2); 4665 #endif 4666 } 4667 #endif /* defined(TARGET_PPC64) */ 4668 4669 /*** Lookaside buffer management ***/ 4670 /* Optional & supervisor only: */ 4671 4672 /* tlbia */ 4673 static void gen_tlbia(DisasContext *ctx) 4674 { 4675 #if defined(CONFIG_USER_ONLY) 4676 GEN_PRIV; 4677 #else 4678 CHK_HV; 4679 4680 gen_helper_tlbia(cpu_env); 4681 #endif /* defined(CONFIG_USER_ONLY) */ 4682 } 4683 4684 /* tlbiel */ 4685 static void gen_tlbiel(DisasContext *ctx) 4686 { 4687 #if defined(CONFIG_USER_ONLY) 4688 GEN_PRIV; 4689 #else 4690 CHK_SV; 4691 4692 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4693 #endif /* defined(CONFIG_USER_ONLY) */ 4694 } 4695 4696 /* tlbie */ 4697 static void gen_tlbie(DisasContext *ctx) 4698 { 4699 #if defined(CONFIG_USER_ONLY) 4700 GEN_PRIV; 4701 #else 4702 TCGv_i32 t1; 4703 4704 if (ctx->gtse) { 4705 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */ 4706 } else { 4707 CHK_HV; /* Else hypervisor privileged */ 4708 } 4709 4710 if (NARROW_MODE(ctx)) { 4711 TCGv t0 = tcg_temp_new(); 4712 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4713 gen_helper_tlbie(cpu_env, t0); 4714 tcg_temp_free(t0); 4715 } else { 4716 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4717 } 4718 t1 = tcg_temp_new_i32(); 4719 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4720 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4721 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4722 tcg_temp_free_i32(t1); 4723 #endif /* defined(CONFIG_USER_ONLY) */ 4724 } 4725 4726 /* tlbsync */ 4727 static void gen_tlbsync(DisasContext *ctx) 4728 { 4729 #if defined(CONFIG_USER_ONLY) 4730 GEN_PRIV; 4731 #else 4732 4733 if (ctx->gtse) { 4734 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 4735 } else { 4736 CHK_HV; /* Else hypervisor privileged */ 4737 } 4738 4739 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4740 if (ctx->insns_flags & PPC_BOOKE) { 4741 gen_check_tlb_flush(ctx, true); 4742 } 4743 #endif /* defined(CONFIG_USER_ONLY) */ 4744 } 4745 4746 #if defined(TARGET_PPC64) 4747 /* slbia */ 4748 static void gen_slbia(DisasContext *ctx) 4749 { 4750 #if defined(CONFIG_USER_ONLY) 4751 GEN_PRIV; 4752 #else 4753 CHK_SV; 4754 4755 gen_helper_slbia(cpu_env); 4756 #endif /* defined(CONFIG_USER_ONLY) */ 4757 } 4758 4759 /* slbie */ 4760 static void gen_slbie(DisasContext *ctx) 4761 { 4762 #if defined(CONFIG_USER_ONLY) 4763 GEN_PRIV; 4764 #else 4765 CHK_SV; 4766 4767 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4768 #endif /* defined(CONFIG_USER_ONLY) */ 4769 } 4770 4771 /* slbieg */ 4772 static void gen_slbieg(DisasContext *ctx) 4773 { 4774 #if defined(CONFIG_USER_ONLY) 4775 GEN_PRIV; 4776 #else 4777 CHK_SV; 4778 4779 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4780 #endif /* defined(CONFIG_USER_ONLY) */ 4781 } 4782 4783 /* slbsync */ 4784 static void gen_slbsync(DisasContext *ctx) 4785 { 4786 #if defined(CONFIG_USER_ONLY) 4787 GEN_PRIV; 4788 #else 4789 CHK_SV; 4790 gen_check_tlb_flush(ctx, true); 4791 #endif /* defined(CONFIG_USER_ONLY) */ 4792 } 4793 4794 #endif /* defined(TARGET_PPC64) */ 4795 4796 /*** External control ***/ 4797 /* Optional: */ 4798 4799 /* eciwx */ 4800 static void gen_eciwx(DisasContext *ctx) 4801 { 4802 TCGv t0; 4803 /* Should check EAR[E] ! */ 4804 gen_set_access_type(ctx, ACCESS_EXT); 4805 t0 = tcg_temp_new(); 4806 gen_addr_reg_index(ctx, t0); 4807 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4808 DEF_MEMOP(MO_UL | MO_ALIGN)); 4809 tcg_temp_free(t0); 4810 } 4811 4812 /* ecowx */ 4813 static void gen_ecowx(DisasContext *ctx) 4814 { 4815 TCGv t0; 4816 /* Should check EAR[E] ! */ 4817 gen_set_access_type(ctx, ACCESS_EXT); 4818 t0 = tcg_temp_new(); 4819 gen_addr_reg_index(ctx, t0); 4820 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4821 DEF_MEMOP(MO_UL | MO_ALIGN)); 4822 tcg_temp_free(t0); 4823 } 4824 4825 /* PowerPC 601 specific instructions */ 4826 4827 /* abs - abs. */ 4828 static void gen_abs(DisasContext *ctx) 4829 { 4830 TCGLabel *l1 = gen_new_label(); 4831 TCGLabel *l2 = gen_new_label(); 4832 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 4833 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4834 tcg_gen_br(l2); 4835 gen_set_label(l1); 4836 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4837 gen_set_label(l2); 4838 if (unlikely(Rc(ctx->opcode) != 0)) 4839 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4840 } 4841 4842 /* abso - abso. */ 4843 static void gen_abso(DisasContext *ctx) 4844 { 4845 TCGLabel *l1 = gen_new_label(); 4846 TCGLabel *l2 = gen_new_label(); 4847 TCGLabel *l3 = gen_new_label(); 4848 /* Start with XER OV disabled, the most likely case */ 4849 tcg_gen_movi_tl(cpu_ov, 0); 4850 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 4851 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 4852 tcg_gen_movi_tl(cpu_ov, 1); 4853 tcg_gen_movi_tl(cpu_so, 1); 4854 tcg_gen_br(l2); 4855 gen_set_label(l1); 4856 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4857 tcg_gen_br(l3); 4858 gen_set_label(l2); 4859 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4860 gen_set_label(l3); 4861 if (unlikely(Rc(ctx->opcode) != 0)) 4862 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4863 } 4864 4865 /* clcs */ 4866 static void gen_clcs(DisasContext *ctx) 4867 { 4868 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 4869 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4870 tcg_temp_free_i32(t0); 4871 /* Rc=1 sets CR0 to an undefined state */ 4872 } 4873 4874 /* div - div. */ 4875 static void gen_div(DisasContext *ctx) 4876 { 4877 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4878 cpu_gpr[rB(ctx->opcode)]); 4879 if (unlikely(Rc(ctx->opcode) != 0)) 4880 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4881 } 4882 4883 /* divo - divo. */ 4884 static void gen_divo(DisasContext *ctx) 4885 { 4886 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4887 cpu_gpr[rB(ctx->opcode)]); 4888 if (unlikely(Rc(ctx->opcode) != 0)) 4889 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4890 } 4891 4892 /* divs - divs. */ 4893 static void gen_divs(DisasContext *ctx) 4894 { 4895 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4896 cpu_gpr[rB(ctx->opcode)]); 4897 if (unlikely(Rc(ctx->opcode) != 0)) 4898 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4899 } 4900 4901 /* divso - divso. */ 4902 static void gen_divso(DisasContext *ctx) 4903 { 4904 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 4905 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4906 if (unlikely(Rc(ctx->opcode) != 0)) 4907 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4908 } 4909 4910 /* doz - doz. */ 4911 static void gen_doz(DisasContext *ctx) 4912 { 4913 TCGLabel *l1 = gen_new_label(); 4914 TCGLabel *l2 = gen_new_label(); 4915 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4916 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4917 tcg_gen_br(l2); 4918 gen_set_label(l1); 4919 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4920 gen_set_label(l2); 4921 if (unlikely(Rc(ctx->opcode) != 0)) 4922 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4923 } 4924 4925 /* dozo - dozo. */ 4926 static void gen_dozo(DisasContext *ctx) 4927 { 4928 TCGLabel *l1 = gen_new_label(); 4929 TCGLabel *l2 = gen_new_label(); 4930 TCGv t0 = tcg_temp_new(); 4931 TCGv t1 = tcg_temp_new(); 4932 TCGv t2 = tcg_temp_new(); 4933 /* Start with XER OV disabled, the most likely case */ 4934 tcg_gen_movi_tl(cpu_ov, 0); 4935 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4936 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4937 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4938 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 4939 tcg_gen_andc_tl(t1, t1, t2); 4940 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 4941 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 4942 tcg_gen_movi_tl(cpu_ov, 1); 4943 tcg_gen_movi_tl(cpu_so, 1); 4944 tcg_gen_br(l2); 4945 gen_set_label(l1); 4946 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4947 gen_set_label(l2); 4948 tcg_temp_free(t0); 4949 tcg_temp_free(t1); 4950 tcg_temp_free(t2); 4951 if (unlikely(Rc(ctx->opcode) != 0)) 4952 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4953 } 4954 4955 /* dozi */ 4956 static void gen_dozi(DisasContext *ctx) 4957 { 4958 target_long simm = SIMM(ctx->opcode); 4959 TCGLabel *l1 = gen_new_label(); 4960 TCGLabel *l2 = gen_new_label(); 4961 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 4962 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 4963 tcg_gen_br(l2); 4964 gen_set_label(l1); 4965 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4966 gen_set_label(l2); 4967 if (unlikely(Rc(ctx->opcode) != 0)) 4968 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4969 } 4970 4971 /* lscbx - lscbx. */ 4972 static void gen_lscbx(DisasContext *ctx) 4973 { 4974 TCGv t0 = tcg_temp_new(); 4975 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 4976 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 4977 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 4978 4979 gen_addr_reg_index(ctx, t0); 4980 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 4981 tcg_temp_free_i32(t1); 4982 tcg_temp_free_i32(t2); 4983 tcg_temp_free_i32(t3); 4984 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 4985 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 4986 if (unlikely(Rc(ctx->opcode) != 0)) 4987 gen_set_Rc0(ctx, t0); 4988 tcg_temp_free(t0); 4989 } 4990 4991 /* maskg - maskg. */ 4992 static void gen_maskg(DisasContext *ctx) 4993 { 4994 TCGLabel *l1 = gen_new_label(); 4995 TCGv t0 = tcg_temp_new(); 4996 TCGv t1 = tcg_temp_new(); 4997 TCGv t2 = tcg_temp_new(); 4998 TCGv t3 = tcg_temp_new(); 4999 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5000 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5001 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5002 tcg_gen_addi_tl(t2, t0, 1); 5003 tcg_gen_shr_tl(t2, t3, t2); 5004 tcg_gen_shr_tl(t3, t3, t1); 5005 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5006 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5007 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5008 gen_set_label(l1); 5009 tcg_temp_free(t0); 5010 tcg_temp_free(t1); 5011 tcg_temp_free(t2); 5012 tcg_temp_free(t3); 5013 if (unlikely(Rc(ctx->opcode) != 0)) 5014 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5015 } 5016 5017 /* maskir - maskir. */ 5018 static void gen_maskir(DisasContext *ctx) 5019 { 5020 TCGv t0 = tcg_temp_new(); 5021 TCGv t1 = tcg_temp_new(); 5022 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5023 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5024 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5025 tcg_temp_free(t0); 5026 tcg_temp_free(t1); 5027 if (unlikely(Rc(ctx->opcode) != 0)) 5028 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5029 } 5030 5031 /* mul - mul. */ 5032 static void gen_mul(DisasContext *ctx) 5033 { 5034 TCGv_i64 t0 = tcg_temp_new_i64(); 5035 TCGv_i64 t1 = tcg_temp_new_i64(); 5036 TCGv t2 = tcg_temp_new(); 5037 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5038 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5039 tcg_gen_mul_i64(t0, t0, t1); 5040 tcg_gen_trunc_i64_tl(t2, t0); 5041 gen_store_spr(SPR_MQ, t2); 5042 tcg_gen_shri_i64(t1, t0, 32); 5043 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5044 tcg_temp_free_i64(t0); 5045 tcg_temp_free_i64(t1); 5046 tcg_temp_free(t2); 5047 if (unlikely(Rc(ctx->opcode) != 0)) 5048 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5049 } 5050 5051 /* mulo - mulo. */ 5052 static void gen_mulo(DisasContext *ctx) 5053 { 5054 TCGLabel *l1 = gen_new_label(); 5055 TCGv_i64 t0 = tcg_temp_new_i64(); 5056 TCGv_i64 t1 = tcg_temp_new_i64(); 5057 TCGv t2 = tcg_temp_new(); 5058 /* Start with XER OV disabled, the most likely case */ 5059 tcg_gen_movi_tl(cpu_ov, 0); 5060 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5061 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5062 tcg_gen_mul_i64(t0, t0, t1); 5063 tcg_gen_trunc_i64_tl(t2, t0); 5064 gen_store_spr(SPR_MQ, t2); 5065 tcg_gen_shri_i64(t1, t0, 32); 5066 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5067 tcg_gen_ext32s_i64(t1, t0); 5068 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5069 tcg_gen_movi_tl(cpu_ov, 1); 5070 tcg_gen_movi_tl(cpu_so, 1); 5071 gen_set_label(l1); 5072 tcg_temp_free_i64(t0); 5073 tcg_temp_free_i64(t1); 5074 tcg_temp_free(t2); 5075 if (unlikely(Rc(ctx->opcode) != 0)) 5076 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5077 } 5078 5079 /* nabs - nabs. */ 5080 static void gen_nabs(DisasContext *ctx) 5081 { 5082 TCGLabel *l1 = gen_new_label(); 5083 TCGLabel *l2 = gen_new_label(); 5084 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5085 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5086 tcg_gen_br(l2); 5087 gen_set_label(l1); 5088 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5089 gen_set_label(l2); 5090 if (unlikely(Rc(ctx->opcode) != 0)) 5091 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5092 } 5093 5094 /* nabso - nabso. */ 5095 static void gen_nabso(DisasContext *ctx) 5096 { 5097 TCGLabel *l1 = gen_new_label(); 5098 TCGLabel *l2 = gen_new_label(); 5099 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5100 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5101 tcg_gen_br(l2); 5102 gen_set_label(l1); 5103 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5104 gen_set_label(l2); 5105 /* nabs never overflows */ 5106 tcg_gen_movi_tl(cpu_ov, 0); 5107 if (unlikely(Rc(ctx->opcode) != 0)) 5108 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5109 } 5110 5111 /* rlmi - rlmi. */ 5112 static void gen_rlmi(DisasContext *ctx) 5113 { 5114 uint32_t mb = MB(ctx->opcode); 5115 uint32_t me = ME(ctx->opcode); 5116 TCGv t0 = tcg_temp_new(); 5117 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5118 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5119 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5120 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 5121 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5122 tcg_temp_free(t0); 5123 if (unlikely(Rc(ctx->opcode) != 0)) 5124 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5125 } 5126 5127 /* rrib - rrib. */ 5128 static void gen_rrib(DisasContext *ctx) 5129 { 5130 TCGv t0 = tcg_temp_new(); 5131 TCGv t1 = tcg_temp_new(); 5132 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5133 tcg_gen_movi_tl(t1, 0x80000000); 5134 tcg_gen_shr_tl(t1, t1, t0); 5135 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5136 tcg_gen_and_tl(t0, t0, t1); 5137 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5138 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5139 tcg_temp_free(t0); 5140 tcg_temp_free(t1); 5141 if (unlikely(Rc(ctx->opcode) != 0)) 5142 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5143 } 5144 5145 /* sle - sle. */ 5146 static void gen_sle(DisasContext *ctx) 5147 { 5148 TCGv t0 = tcg_temp_new(); 5149 TCGv t1 = tcg_temp_new(); 5150 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5151 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5152 tcg_gen_subfi_tl(t1, 32, t1); 5153 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5154 tcg_gen_or_tl(t1, t0, t1); 5155 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5156 gen_store_spr(SPR_MQ, t1); 5157 tcg_temp_free(t0); 5158 tcg_temp_free(t1); 5159 if (unlikely(Rc(ctx->opcode) != 0)) 5160 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5161 } 5162 5163 /* sleq - sleq. */ 5164 static void gen_sleq(DisasContext *ctx) 5165 { 5166 TCGv t0 = tcg_temp_new(); 5167 TCGv t1 = tcg_temp_new(); 5168 TCGv t2 = tcg_temp_new(); 5169 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5170 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 5171 tcg_gen_shl_tl(t2, t2, t0); 5172 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5173 gen_load_spr(t1, SPR_MQ); 5174 gen_store_spr(SPR_MQ, t0); 5175 tcg_gen_and_tl(t0, t0, t2); 5176 tcg_gen_andc_tl(t1, t1, t2); 5177 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5178 tcg_temp_free(t0); 5179 tcg_temp_free(t1); 5180 tcg_temp_free(t2); 5181 if (unlikely(Rc(ctx->opcode) != 0)) 5182 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5183 } 5184 5185 /* sliq - sliq. */ 5186 static void gen_sliq(DisasContext *ctx) 5187 { 5188 int sh = SH(ctx->opcode); 5189 TCGv t0 = tcg_temp_new(); 5190 TCGv t1 = tcg_temp_new(); 5191 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5192 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5193 tcg_gen_or_tl(t1, t0, t1); 5194 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5195 gen_store_spr(SPR_MQ, t1); 5196 tcg_temp_free(t0); 5197 tcg_temp_free(t1); 5198 if (unlikely(Rc(ctx->opcode) != 0)) 5199 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5200 } 5201 5202 /* slliq - slliq. */ 5203 static void gen_slliq(DisasContext *ctx) 5204 { 5205 int sh = SH(ctx->opcode); 5206 TCGv t0 = tcg_temp_new(); 5207 TCGv t1 = tcg_temp_new(); 5208 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5209 gen_load_spr(t1, SPR_MQ); 5210 gen_store_spr(SPR_MQ, t0); 5211 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 5212 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 5213 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5214 tcg_temp_free(t0); 5215 tcg_temp_free(t1); 5216 if (unlikely(Rc(ctx->opcode) != 0)) 5217 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5218 } 5219 5220 /* sllq - sllq. */ 5221 static void gen_sllq(DisasContext *ctx) 5222 { 5223 TCGLabel *l1 = gen_new_label(); 5224 TCGLabel *l2 = gen_new_label(); 5225 TCGv t0 = tcg_temp_local_new(); 5226 TCGv t1 = tcg_temp_local_new(); 5227 TCGv t2 = tcg_temp_local_new(); 5228 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5229 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5230 tcg_gen_shl_tl(t1, t1, t2); 5231 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5232 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5233 gen_load_spr(t0, SPR_MQ); 5234 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5235 tcg_gen_br(l2); 5236 gen_set_label(l1); 5237 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5238 gen_load_spr(t2, SPR_MQ); 5239 tcg_gen_andc_tl(t1, t2, t1); 5240 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5241 gen_set_label(l2); 5242 tcg_temp_free(t0); 5243 tcg_temp_free(t1); 5244 tcg_temp_free(t2); 5245 if (unlikely(Rc(ctx->opcode) != 0)) 5246 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5247 } 5248 5249 /* slq - slq. */ 5250 static void gen_slq(DisasContext *ctx) 5251 { 5252 TCGLabel *l1 = gen_new_label(); 5253 TCGv t0 = tcg_temp_new(); 5254 TCGv t1 = tcg_temp_new(); 5255 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5256 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5257 tcg_gen_subfi_tl(t1, 32, t1); 5258 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5259 tcg_gen_or_tl(t1, t0, t1); 5260 gen_store_spr(SPR_MQ, t1); 5261 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5262 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5263 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5264 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5265 gen_set_label(l1); 5266 tcg_temp_free(t0); 5267 tcg_temp_free(t1); 5268 if (unlikely(Rc(ctx->opcode) != 0)) 5269 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5270 } 5271 5272 /* sraiq - sraiq. */ 5273 static void gen_sraiq(DisasContext *ctx) 5274 { 5275 int sh = SH(ctx->opcode); 5276 TCGLabel *l1 = gen_new_label(); 5277 TCGv t0 = tcg_temp_new(); 5278 TCGv t1 = tcg_temp_new(); 5279 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5280 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5281 tcg_gen_or_tl(t0, t0, t1); 5282 gen_store_spr(SPR_MQ, t0); 5283 tcg_gen_movi_tl(cpu_ca, 0); 5284 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5285 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5286 tcg_gen_movi_tl(cpu_ca, 1); 5287 gen_set_label(l1); 5288 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5289 tcg_temp_free(t0); 5290 tcg_temp_free(t1); 5291 if (unlikely(Rc(ctx->opcode) != 0)) 5292 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5293 } 5294 5295 /* sraq - sraq. */ 5296 static void gen_sraq(DisasContext *ctx) 5297 { 5298 TCGLabel *l1 = gen_new_label(); 5299 TCGLabel *l2 = gen_new_label(); 5300 TCGv t0 = tcg_temp_new(); 5301 TCGv t1 = tcg_temp_local_new(); 5302 TCGv t2 = tcg_temp_local_new(); 5303 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5304 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5305 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5306 tcg_gen_subfi_tl(t2, 32, t2); 5307 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5308 tcg_gen_or_tl(t0, t0, t2); 5309 gen_store_spr(SPR_MQ, t0); 5310 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5311 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5312 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5313 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5314 gen_set_label(l1); 5315 tcg_temp_free(t0); 5316 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5317 tcg_gen_movi_tl(cpu_ca, 0); 5318 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5319 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5320 tcg_gen_movi_tl(cpu_ca, 1); 5321 gen_set_label(l2); 5322 tcg_temp_free(t1); 5323 tcg_temp_free(t2); 5324 if (unlikely(Rc(ctx->opcode) != 0)) 5325 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5326 } 5327 5328 /* sre - sre. */ 5329 static void gen_sre(DisasContext *ctx) 5330 { 5331 TCGv t0 = tcg_temp_new(); 5332 TCGv t1 = tcg_temp_new(); 5333 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5334 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5335 tcg_gen_subfi_tl(t1, 32, t1); 5336 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5337 tcg_gen_or_tl(t1, t0, t1); 5338 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5339 gen_store_spr(SPR_MQ, t1); 5340 tcg_temp_free(t0); 5341 tcg_temp_free(t1); 5342 if (unlikely(Rc(ctx->opcode) != 0)) 5343 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5344 } 5345 5346 /* srea - srea. */ 5347 static void gen_srea(DisasContext *ctx) 5348 { 5349 TCGv t0 = tcg_temp_new(); 5350 TCGv t1 = tcg_temp_new(); 5351 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5352 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5353 gen_store_spr(SPR_MQ, t0); 5354 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5355 tcg_temp_free(t0); 5356 tcg_temp_free(t1); 5357 if (unlikely(Rc(ctx->opcode) != 0)) 5358 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5359 } 5360 5361 /* sreq */ 5362 static void gen_sreq(DisasContext *ctx) 5363 { 5364 TCGv t0 = tcg_temp_new(); 5365 TCGv t1 = tcg_temp_new(); 5366 TCGv t2 = tcg_temp_new(); 5367 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5368 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5369 tcg_gen_shr_tl(t1, t1, t0); 5370 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5371 gen_load_spr(t2, SPR_MQ); 5372 gen_store_spr(SPR_MQ, t0); 5373 tcg_gen_and_tl(t0, t0, t1); 5374 tcg_gen_andc_tl(t2, t2, t1); 5375 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5376 tcg_temp_free(t0); 5377 tcg_temp_free(t1); 5378 tcg_temp_free(t2); 5379 if (unlikely(Rc(ctx->opcode) != 0)) 5380 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5381 } 5382 5383 /* sriq */ 5384 static void gen_sriq(DisasContext *ctx) 5385 { 5386 int sh = SH(ctx->opcode); 5387 TCGv t0 = tcg_temp_new(); 5388 TCGv t1 = tcg_temp_new(); 5389 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5390 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5391 tcg_gen_or_tl(t1, t0, t1); 5392 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5393 gen_store_spr(SPR_MQ, t1); 5394 tcg_temp_free(t0); 5395 tcg_temp_free(t1); 5396 if (unlikely(Rc(ctx->opcode) != 0)) 5397 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5398 } 5399 5400 /* srliq */ 5401 static void gen_srliq(DisasContext *ctx) 5402 { 5403 int sh = SH(ctx->opcode); 5404 TCGv t0 = tcg_temp_new(); 5405 TCGv t1 = tcg_temp_new(); 5406 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5407 gen_load_spr(t1, SPR_MQ); 5408 gen_store_spr(SPR_MQ, t0); 5409 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5410 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5411 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5412 tcg_temp_free(t0); 5413 tcg_temp_free(t1); 5414 if (unlikely(Rc(ctx->opcode) != 0)) 5415 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5416 } 5417 5418 /* srlq */ 5419 static void gen_srlq(DisasContext *ctx) 5420 { 5421 TCGLabel *l1 = gen_new_label(); 5422 TCGLabel *l2 = gen_new_label(); 5423 TCGv t0 = tcg_temp_local_new(); 5424 TCGv t1 = tcg_temp_local_new(); 5425 TCGv t2 = tcg_temp_local_new(); 5426 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5427 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5428 tcg_gen_shr_tl(t2, t1, t2); 5429 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5430 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5431 gen_load_spr(t0, SPR_MQ); 5432 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5433 tcg_gen_br(l2); 5434 gen_set_label(l1); 5435 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5436 tcg_gen_and_tl(t0, t0, t2); 5437 gen_load_spr(t1, SPR_MQ); 5438 tcg_gen_andc_tl(t1, t1, t2); 5439 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5440 gen_set_label(l2); 5441 tcg_temp_free(t0); 5442 tcg_temp_free(t1); 5443 tcg_temp_free(t2); 5444 if (unlikely(Rc(ctx->opcode) != 0)) 5445 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5446 } 5447 5448 /* srq */ 5449 static void gen_srq(DisasContext *ctx) 5450 { 5451 TCGLabel *l1 = gen_new_label(); 5452 TCGv t0 = tcg_temp_new(); 5453 TCGv t1 = tcg_temp_new(); 5454 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5455 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5456 tcg_gen_subfi_tl(t1, 32, t1); 5457 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5458 tcg_gen_or_tl(t1, t0, t1); 5459 gen_store_spr(SPR_MQ, t1); 5460 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5461 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5462 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5463 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5464 gen_set_label(l1); 5465 tcg_temp_free(t0); 5466 tcg_temp_free(t1); 5467 if (unlikely(Rc(ctx->opcode) != 0)) 5468 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5469 } 5470 5471 /* PowerPC 602 specific instructions */ 5472 5473 /* dsa */ 5474 static void gen_dsa(DisasContext *ctx) 5475 { 5476 /* XXX: TODO */ 5477 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5478 } 5479 5480 /* esa */ 5481 static void gen_esa(DisasContext *ctx) 5482 { 5483 /* XXX: TODO */ 5484 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5485 } 5486 5487 /* mfrom */ 5488 static void gen_mfrom(DisasContext *ctx) 5489 { 5490 #if defined(CONFIG_USER_ONLY) 5491 GEN_PRIV; 5492 #else 5493 CHK_SV; 5494 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5495 #endif /* defined(CONFIG_USER_ONLY) */ 5496 } 5497 5498 /* 602 - 603 - G2 TLB management */ 5499 5500 /* tlbld */ 5501 static void gen_tlbld_6xx(DisasContext *ctx) 5502 { 5503 #if defined(CONFIG_USER_ONLY) 5504 GEN_PRIV; 5505 #else 5506 CHK_SV; 5507 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5508 #endif /* defined(CONFIG_USER_ONLY) */ 5509 } 5510 5511 /* tlbli */ 5512 static void gen_tlbli_6xx(DisasContext *ctx) 5513 { 5514 #if defined(CONFIG_USER_ONLY) 5515 GEN_PRIV; 5516 #else 5517 CHK_SV; 5518 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5519 #endif /* defined(CONFIG_USER_ONLY) */ 5520 } 5521 5522 /* 74xx TLB management */ 5523 5524 /* tlbld */ 5525 static void gen_tlbld_74xx(DisasContext *ctx) 5526 { 5527 #if defined(CONFIG_USER_ONLY) 5528 GEN_PRIV; 5529 #else 5530 CHK_SV; 5531 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5532 #endif /* defined(CONFIG_USER_ONLY) */ 5533 } 5534 5535 /* tlbli */ 5536 static void gen_tlbli_74xx(DisasContext *ctx) 5537 { 5538 #if defined(CONFIG_USER_ONLY) 5539 GEN_PRIV; 5540 #else 5541 CHK_SV; 5542 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5543 #endif /* defined(CONFIG_USER_ONLY) */ 5544 } 5545 5546 /* POWER instructions not in PowerPC 601 */ 5547 5548 /* clf */ 5549 static void gen_clf(DisasContext *ctx) 5550 { 5551 /* Cache line flush: implemented as no-op */ 5552 } 5553 5554 /* cli */ 5555 static void gen_cli(DisasContext *ctx) 5556 { 5557 #if defined(CONFIG_USER_ONLY) 5558 GEN_PRIV; 5559 #else 5560 /* Cache line invalidate: privileged and treated as no-op */ 5561 CHK_SV; 5562 #endif /* defined(CONFIG_USER_ONLY) */ 5563 } 5564 5565 /* dclst */ 5566 static void gen_dclst(DisasContext *ctx) 5567 { 5568 /* Data cache line store: treated as no-op */ 5569 } 5570 5571 static void gen_mfsri(DisasContext *ctx) 5572 { 5573 #if defined(CONFIG_USER_ONLY) 5574 GEN_PRIV; 5575 #else 5576 int ra = rA(ctx->opcode); 5577 int rd = rD(ctx->opcode); 5578 TCGv t0; 5579 5580 CHK_SV; 5581 t0 = tcg_temp_new(); 5582 gen_addr_reg_index(ctx, t0); 5583 tcg_gen_extract_tl(t0, t0, 28, 4); 5584 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5585 tcg_temp_free(t0); 5586 if (ra != 0 && ra != rd) 5587 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5588 #endif /* defined(CONFIG_USER_ONLY) */ 5589 } 5590 5591 static void gen_rac(DisasContext *ctx) 5592 { 5593 #if defined(CONFIG_USER_ONLY) 5594 GEN_PRIV; 5595 #else 5596 TCGv t0; 5597 5598 CHK_SV; 5599 t0 = tcg_temp_new(); 5600 gen_addr_reg_index(ctx, t0); 5601 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5602 tcg_temp_free(t0); 5603 #endif /* defined(CONFIG_USER_ONLY) */ 5604 } 5605 5606 static void gen_rfsvc(DisasContext *ctx) 5607 { 5608 #if defined(CONFIG_USER_ONLY) 5609 GEN_PRIV; 5610 #else 5611 CHK_SV; 5612 5613 gen_helper_rfsvc(cpu_env); 5614 gen_sync_exception(ctx); 5615 #endif /* defined(CONFIG_USER_ONLY) */ 5616 } 5617 5618 /* svc is not implemented for now */ 5619 5620 /* BookE specific instructions */ 5621 5622 /* XXX: not implemented on 440 ? */ 5623 static void gen_mfapidi(DisasContext *ctx) 5624 { 5625 /* XXX: TODO */ 5626 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5627 } 5628 5629 /* XXX: not implemented on 440 ? */ 5630 static void gen_tlbiva(DisasContext *ctx) 5631 { 5632 #if defined(CONFIG_USER_ONLY) 5633 GEN_PRIV; 5634 #else 5635 TCGv t0; 5636 5637 CHK_SV; 5638 t0 = tcg_temp_new(); 5639 gen_addr_reg_index(ctx, t0); 5640 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5641 tcg_temp_free(t0); 5642 #endif /* defined(CONFIG_USER_ONLY) */ 5643 } 5644 5645 /* All 405 MAC instructions are translated here */ 5646 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5647 int ra, int rb, int rt, int Rc) 5648 { 5649 TCGv t0, t1; 5650 5651 t0 = tcg_temp_local_new(); 5652 t1 = tcg_temp_local_new(); 5653 5654 switch (opc3 & 0x0D) { 5655 case 0x05: 5656 /* macchw - macchw. - macchwo - macchwo. */ 5657 /* macchws - macchws. - macchwso - macchwso. */ 5658 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5659 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5660 /* mulchw - mulchw. */ 5661 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5662 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5663 tcg_gen_ext16s_tl(t1, t1); 5664 break; 5665 case 0x04: 5666 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5667 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5668 /* mulchwu - mulchwu. */ 5669 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5670 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5671 tcg_gen_ext16u_tl(t1, t1); 5672 break; 5673 case 0x01: 5674 /* machhw - machhw. - machhwo - machhwo. */ 5675 /* machhws - machhws. - machhwso - machhwso. */ 5676 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5677 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5678 /* mulhhw - mulhhw. */ 5679 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5680 tcg_gen_ext16s_tl(t0, t0); 5681 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5682 tcg_gen_ext16s_tl(t1, t1); 5683 break; 5684 case 0x00: 5685 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5686 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5687 /* mulhhwu - mulhhwu. */ 5688 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5689 tcg_gen_ext16u_tl(t0, t0); 5690 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5691 tcg_gen_ext16u_tl(t1, t1); 5692 break; 5693 case 0x0D: 5694 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5695 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5696 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5697 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5698 /* mullhw - mullhw. */ 5699 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5700 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5701 break; 5702 case 0x0C: 5703 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5704 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5705 /* mullhwu - mullhwu. */ 5706 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5707 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5708 break; 5709 } 5710 if (opc2 & 0x04) { 5711 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5712 tcg_gen_mul_tl(t1, t0, t1); 5713 if (opc2 & 0x02) { 5714 /* nmultiply-and-accumulate (0x0E) */ 5715 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5716 } else { 5717 /* multiply-and-accumulate (0x0C) */ 5718 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5719 } 5720 5721 if (opc3 & 0x12) { 5722 /* Check overflow and/or saturate */ 5723 TCGLabel *l1 = gen_new_label(); 5724 5725 if (opc3 & 0x10) { 5726 /* Start with XER OV disabled, the most likely case */ 5727 tcg_gen_movi_tl(cpu_ov, 0); 5728 } 5729 if (opc3 & 0x01) { 5730 /* Signed */ 5731 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5732 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5733 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5734 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5735 if (opc3 & 0x02) { 5736 /* Saturate */ 5737 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5738 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5739 } 5740 } else { 5741 /* Unsigned */ 5742 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5743 if (opc3 & 0x02) { 5744 /* Saturate */ 5745 tcg_gen_movi_tl(t0, UINT32_MAX); 5746 } 5747 } 5748 if (opc3 & 0x10) { 5749 /* Check overflow */ 5750 tcg_gen_movi_tl(cpu_ov, 1); 5751 tcg_gen_movi_tl(cpu_so, 1); 5752 } 5753 gen_set_label(l1); 5754 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5755 } 5756 } else { 5757 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5758 } 5759 tcg_temp_free(t0); 5760 tcg_temp_free(t1); 5761 if (unlikely(Rc) != 0) { 5762 /* Update Rc0 */ 5763 gen_set_Rc0(ctx, cpu_gpr[rt]); 5764 } 5765 } 5766 5767 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5768 static void glue(gen_, name)(DisasContext *ctx) \ 5769 { \ 5770 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5771 rD(ctx->opcode), Rc(ctx->opcode)); \ 5772 } 5773 5774 /* macchw - macchw. */ 5775 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5776 /* macchwo - macchwo. */ 5777 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5778 /* macchws - macchws. */ 5779 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5780 /* macchwso - macchwso. */ 5781 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5782 /* macchwsu - macchwsu. */ 5783 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5784 /* macchwsuo - macchwsuo. */ 5785 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5786 /* macchwu - macchwu. */ 5787 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5788 /* macchwuo - macchwuo. */ 5789 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5790 /* machhw - machhw. */ 5791 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5792 /* machhwo - machhwo. */ 5793 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5794 /* machhws - machhws. */ 5795 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5796 /* machhwso - machhwso. */ 5797 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5798 /* machhwsu - machhwsu. */ 5799 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5800 /* machhwsuo - machhwsuo. */ 5801 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5802 /* machhwu - machhwu. */ 5803 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5804 /* machhwuo - machhwuo. */ 5805 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5806 /* maclhw - maclhw. */ 5807 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5808 /* maclhwo - maclhwo. */ 5809 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5810 /* maclhws - maclhws. */ 5811 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5812 /* maclhwso - maclhwso. */ 5813 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 5814 /* maclhwu - maclhwu. */ 5815 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 5816 /* maclhwuo - maclhwuo. */ 5817 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 5818 /* maclhwsu - maclhwsu. */ 5819 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 5820 /* maclhwsuo - maclhwsuo. */ 5821 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 5822 /* nmacchw - nmacchw. */ 5823 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 5824 /* nmacchwo - nmacchwo. */ 5825 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 5826 /* nmacchws - nmacchws. */ 5827 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 5828 /* nmacchwso - nmacchwso. */ 5829 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 5830 /* nmachhw - nmachhw. */ 5831 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 5832 /* nmachhwo - nmachhwo. */ 5833 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 5834 /* nmachhws - nmachhws. */ 5835 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 5836 /* nmachhwso - nmachhwso. */ 5837 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 5838 /* nmaclhw - nmaclhw. */ 5839 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 5840 /* nmaclhwo - nmaclhwo. */ 5841 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 5842 /* nmaclhws - nmaclhws. */ 5843 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 5844 /* nmaclhwso - nmaclhwso. */ 5845 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 5846 5847 /* mulchw - mulchw. */ 5848 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 5849 /* mulchwu - mulchwu. */ 5850 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 5851 /* mulhhw - mulhhw. */ 5852 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 5853 /* mulhhwu - mulhhwu. */ 5854 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 5855 /* mullhw - mullhw. */ 5856 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 5857 /* mullhwu - mullhwu. */ 5858 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 5859 5860 /* mfdcr */ 5861 static void gen_mfdcr(DisasContext *ctx) 5862 { 5863 #if defined(CONFIG_USER_ONLY) 5864 GEN_PRIV; 5865 #else 5866 TCGv dcrn; 5867 5868 CHK_SV; 5869 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5870 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 5871 tcg_temp_free(dcrn); 5872 #endif /* defined(CONFIG_USER_ONLY) */ 5873 } 5874 5875 /* mtdcr */ 5876 static void gen_mtdcr(DisasContext *ctx) 5877 { 5878 #if defined(CONFIG_USER_ONLY) 5879 GEN_PRIV; 5880 #else 5881 TCGv dcrn; 5882 5883 CHK_SV; 5884 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5885 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 5886 tcg_temp_free(dcrn); 5887 #endif /* defined(CONFIG_USER_ONLY) */ 5888 } 5889 5890 /* mfdcrx */ 5891 /* XXX: not implemented on 440 ? */ 5892 static void gen_mfdcrx(DisasContext *ctx) 5893 { 5894 #if defined(CONFIG_USER_ONLY) 5895 GEN_PRIV; 5896 #else 5897 CHK_SV; 5898 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5899 cpu_gpr[rA(ctx->opcode)]); 5900 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5901 #endif /* defined(CONFIG_USER_ONLY) */ 5902 } 5903 5904 /* mtdcrx */ 5905 /* XXX: not implemented on 440 ? */ 5906 static void gen_mtdcrx(DisasContext *ctx) 5907 { 5908 #if defined(CONFIG_USER_ONLY) 5909 GEN_PRIV; 5910 #else 5911 CHK_SV; 5912 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5913 cpu_gpr[rS(ctx->opcode)]); 5914 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5915 #endif /* defined(CONFIG_USER_ONLY) */ 5916 } 5917 5918 /* mfdcrux (PPC 460) : user-mode access to DCR */ 5919 static void gen_mfdcrux(DisasContext *ctx) 5920 { 5921 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5922 cpu_gpr[rA(ctx->opcode)]); 5923 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5924 } 5925 5926 /* mtdcrux (PPC 460) : user-mode access to DCR */ 5927 static void gen_mtdcrux(DisasContext *ctx) 5928 { 5929 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5930 cpu_gpr[rS(ctx->opcode)]); 5931 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5932 } 5933 5934 /* dccci */ 5935 static void gen_dccci(DisasContext *ctx) 5936 { 5937 CHK_SV; 5938 /* interpreted as no-op */ 5939 } 5940 5941 /* dcread */ 5942 static void gen_dcread(DisasContext *ctx) 5943 { 5944 #if defined(CONFIG_USER_ONLY) 5945 GEN_PRIV; 5946 #else 5947 TCGv EA, val; 5948 5949 CHK_SV; 5950 gen_set_access_type(ctx, ACCESS_CACHE); 5951 EA = tcg_temp_new(); 5952 gen_addr_reg_index(ctx, EA); 5953 val = tcg_temp_new(); 5954 gen_qemu_ld32u(ctx, val, EA); 5955 tcg_temp_free(val); 5956 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 5957 tcg_temp_free(EA); 5958 #endif /* defined(CONFIG_USER_ONLY) */ 5959 } 5960 5961 /* icbt */ 5962 static void gen_icbt_40x(DisasContext *ctx) 5963 { 5964 /* interpreted as no-op */ 5965 /* XXX: specification say this is treated as a load by the MMU 5966 * but does not generate any exception 5967 */ 5968 } 5969 5970 /* iccci */ 5971 static void gen_iccci(DisasContext *ctx) 5972 { 5973 CHK_SV; 5974 /* interpreted as no-op */ 5975 } 5976 5977 /* icread */ 5978 static void gen_icread(DisasContext *ctx) 5979 { 5980 CHK_SV; 5981 /* interpreted as no-op */ 5982 } 5983 5984 /* rfci (supervisor only) */ 5985 static void gen_rfci_40x(DisasContext *ctx) 5986 { 5987 #if defined(CONFIG_USER_ONLY) 5988 GEN_PRIV; 5989 #else 5990 CHK_SV; 5991 /* Restore CPU state */ 5992 gen_helper_40x_rfci(cpu_env); 5993 gen_sync_exception(ctx); 5994 #endif /* defined(CONFIG_USER_ONLY) */ 5995 } 5996 5997 static void gen_rfci(DisasContext *ctx) 5998 { 5999 #if defined(CONFIG_USER_ONLY) 6000 GEN_PRIV; 6001 #else 6002 CHK_SV; 6003 /* Restore CPU state */ 6004 gen_helper_rfci(cpu_env); 6005 gen_sync_exception(ctx); 6006 #endif /* defined(CONFIG_USER_ONLY) */ 6007 } 6008 6009 /* BookE specific */ 6010 6011 /* XXX: not implemented on 440 ? */ 6012 static void gen_rfdi(DisasContext *ctx) 6013 { 6014 #if defined(CONFIG_USER_ONLY) 6015 GEN_PRIV; 6016 #else 6017 CHK_SV; 6018 /* Restore CPU state */ 6019 gen_helper_rfdi(cpu_env); 6020 gen_sync_exception(ctx); 6021 #endif /* defined(CONFIG_USER_ONLY) */ 6022 } 6023 6024 /* XXX: not implemented on 440 ? */ 6025 static void gen_rfmci(DisasContext *ctx) 6026 { 6027 #if defined(CONFIG_USER_ONLY) 6028 GEN_PRIV; 6029 #else 6030 CHK_SV; 6031 /* Restore CPU state */ 6032 gen_helper_rfmci(cpu_env); 6033 gen_sync_exception(ctx); 6034 #endif /* defined(CONFIG_USER_ONLY) */ 6035 } 6036 6037 /* TLB management - PowerPC 405 implementation */ 6038 6039 /* tlbre */ 6040 static void gen_tlbre_40x(DisasContext *ctx) 6041 { 6042 #if defined(CONFIG_USER_ONLY) 6043 GEN_PRIV; 6044 #else 6045 CHK_SV; 6046 switch (rB(ctx->opcode)) { 6047 case 0: 6048 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6049 cpu_gpr[rA(ctx->opcode)]); 6050 break; 6051 case 1: 6052 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6053 cpu_gpr[rA(ctx->opcode)]); 6054 break; 6055 default: 6056 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6057 break; 6058 } 6059 #endif /* defined(CONFIG_USER_ONLY) */ 6060 } 6061 6062 /* tlbsx - tlbsx. */ 6063 static void gen_tlbsx_40x(DisasContext *ctx) 6064 { 6065 #if defined(CONFIG_USER_ONLY) 6066 GEN_PRIV; 6067 #else 6068 TCGv t0; 6069 6070 CHK_SV; 6071 t0 = tcg_temp_new(); 6072 gen_addr_reg_index(ctx, t0); 6073 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6074 tcg_temp_free(t0); 6075 if (Rc(ctx->opcode)) { 6076 TCGLabel *l1 = gen_new_label(); 6077 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6078 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6079 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6080 gen_set_label(l1); 6081 } 6082 #endif /* defined(CONFIG_USER_ONLY) */ 6083 } 6084 6085 /* tlbwe */ 6086 static void gen_tlbwe_40x(DisasContext *ctx) 6087 { 6088 #if defined(CONFIG_USER_ONLY) 6089 GEN_PRIV; 6090 #else 6091 CHK_SV; 6092 6093 switch (rB(ctx->opcode)) { 6094 case 0: 6095 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6096 cpu_gpr[rS(ctx->opcode)]); 6097 break; 6098 case 1: 6099 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6100 cpu_gpr[rS(ctx->opcode)]); 6101 break; 6102 default: 6103 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6104 break; 6105 } 6106 #endif /* defined(CONFIG_USER_ONLY) */ 6107 } 6108 6109 /* TLB management - PowerPC 440 implementation */ 6110 6111 /* tlbre */ 6112 static void gen_tlbre_440(DisasContext *ctx) 6113 { 6114 #if defined(CONFIG_USER_ONLY) 6115 GEN_PRIV; 6116 #else 6117 CHK_SV; 6118 6119 switch (rB(ctx->opcode)) { 6120 case 0: 6121 case 1: 6122 case 2: 6123 { 6124 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6125 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6126 t0, cpu_gpr[rA(ctx->opcode)]); 6127 tcg_temp_free_i32(t0); 6128 } 6129 break; 6130 default: 6131 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6132 break; 6133 } 6134 #endif /* defined(CONFIG_USER_ONLY) */ 6135 } 6136 6137 /* tlbsx - tlbsx. */ 6138 static void gen_tlbsx_440(DisasContext *ctx) 6139 { 6140 #if defined(CONFIG_USER_ONLY) 6141 GEN_PRIV; 6142 #else 6143 TCGv t0; 6144 6145 CHK_SV; 6146 t0 = tcg_temp_new(); 6147 gen_addr_reg_index(ctx, t0); 6148 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6149 tcg_temp_free(t0); 6150 if (Rc(ctx->opcode)) { 6151 TCGLabel *l1 = gen_new_label(); 6152 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6153 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6154 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6155 gen_set_label(l1); 6156 } 6157 #endif /* defined(CONFIG_USER_ONLY) */ 6158 } 6159 6160 /* tlbwe */ 6161 static void gen_tlbwe_440(DisasContext *ctx) 6162 { 6163 #if defined(CONFIG_USER_ONLY) 6164 GEN_PRIV; 6165 #else 6166 CHK_SV; 6167 switch (rB(ctx->opcode)) { 6168 case 0: 6169 case 1: 6170 case 2: 6171 { 6172 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6173 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 6174 cpu_gpr[rS(ctx->opcode)]); 6175 tcg_temp_free_i32(t0); 6176 } 6177 break; 6178 default: 6179 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6180 break; 6181 } 6182 #endif /* defined(CONFIG_USER_ONLY) */ 6183 } 6184 6185 /* TLB management - PowerPC BookE 2.06 implementation */ 6186 6187 /* tlbre */ 6188 static void gen_tlbre_booke206(DisasContext *ctx) 6189 { 6190 #if defined(CONFIG_USER_ONLY) 6191 GEN_PRIV; 6192 #else 6193 CHK_SV; 6194 gen_helper_booke206_tlbre(cpu_env); 6195 #endif /* defined(CONFIG_USER_ONLY) */ 6196 } 6197 6198 /* tlbsx - tlbsx. */ 6199 static void gen_tlbsx_booke206(DisasContext *ctx) 6200 { 6201 #if defined(CONFIG_USER_ONLY) 6202 GEN_PRIV; 6203 #else 6204 TCGv t0; 6205 6206 CHK_SV; 6207 if (rA(ctx->opcode)) { 6208 t0 = tcg_temp_new(); 6209 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6210 } else { 6211 t0 = tcg_const_tl(0); 6212 } 6213 6214 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6215 gen_helper_booke206_tlbsx(cpu_env, t0); 6216 tcg_temp_free(t0); 6217 #endif /* defined(CONFIG_USER_ONLY) */ 6218 } 6219 6220 /* tlbwe */ 6221 static void gen_tlbwe_booke206(DisasContext *ctx) 6222 { 6223 #if defined(CONFIG_USER_ONLY) 6224 GEN_PRIV; 6225 #else 6226 CHK_SV; 6227 gen_helper_booke206_tlbwe(cpu_env); 6228 #endif /* defined(CONFIG_USER_ONLY) */ 6229 } 6230 6231 static void gen_tlbivax_booke206(DisasContext *ctx) 6232 { 6233 #if defined(CONFIG_USER_ONLY) 6234 GEN_PRIV; 6235 #else 6236 TCGv t0; 6237 6238 CHK_SV; 6239 t0 = tcg_temp_new(); 6240 gen_addr_reg_index(ctx, t0); 6241 gen_helper_booke206_tlbivax(cpu_env, t0); 6242 tcg_temp_free(t0); 6243 #endif /* defined(CONFIG_USER_ONLY) */ 6244 } 6245 6246 static void gen_tlbilx_booke206(DisasContext *ctx) 6247 { 6248 #if defined(CONFIG_USER_ONLY) 6249 GEN_PRIV; 6250 #else 6251 TCGv t0; 6252 6253 CHK_SV; 6254 t0 = tcg_temp_new(); 6255 gen_addr_reg_index(ctx, t0); 6256 6257 switch((ctx->opcode >> 21) & 0x3) { 6258 case 0: 6259 gen_helper_booke206_tlbilx0(cpu_env, t0); 6260 break; 6261 case 1: 6262 gen_helper_booke206_tlbilx1(cpu_env, t0); 6263 break; 6264 case 3: 6265 gen_helper_booke206_tlbilx3(cpu_env, t0); 6266 break; 6267 default: 6268 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6269 break; 6270 } 6271 6272 tcg_temp_free(t0); 6273 #endif /* defined(CONFIG_USER_ONLY) */ 6274 } 6275 6276 6277 /* wrtee */ 6278 static void gen_wrtee(DisasContext *ctx) 6279 { 6280 #if defined(CONFIG_USER_ONLY) 6281 GEN_PRIV; 6282 #else 6283 TCGv t0; 6284 6285 CHK_SV; 6286 t0 = tcg_temp_new(); 6287 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6288 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6289 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6290 tcg_temp_free(t0); 6291 /* Stop translation to have a chance to raise an exception 6292 * if we just set msr_ee to 1 6293 */ 6294 gen_stop_exception(ctx); 6295 #endif /* defined(CONFIG_USER_ONLY) */ 6296 } 6297 6298 /* wrteei */ 6299 static void gen_wrteei(DisasContext *ctx) 6300 { 6301 #if defined(CONFIG_USER_ONLY) 6302 GEN_PRIV; 6303 #else 6304 CHK_SV; 6305 if (ctx->opcode & 0x00008000) { 6306 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6307 /* Stop translation to have a chance to raise an exception */ 6308 gen_stop_exception(ctx); 6309 } else { 6310 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6311 } 6312 #endif /* defined(CONFIG_USER_ONLY) */ 6313 } 6314 6315 /* PowerPC 440 specific instructions */ 6316 6317 /* dlmzb */ 6318 static void gen_dlmzb(DisasContext *ctx) 6319 { 6320 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6321 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6322 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6323 tcg_temp_free_i32(t0); 6324 } 6325 6326 /* mbar replaces eieio on 440 */ 6327 static void gen_mbar(DisasContext *ctx) 6328 { 6329 /* interpreted as no-op */ 6330 } 6331 6332 /* msync replaces sync on 440 */ 6333 static void gen_msync_4xx(DisasContext *ctx) 6334 { 6335 /* interpreted as no-op */ 6336 } 6337 6338 /* icbt */ 6339 static void gen_icbt_440(DisasContext *ctx) 6340 { 6341 /* interpreted as no-op */ 6342 /* XXX: specification say this is treated as a load by the MMU 6343 * but does not generate any exception 6344 */ 6345 } 6346 6347 /* Embedded.Processor Control */ 6348 6349 static void gen_msgclr(DisasContext *ctx) 6350 { 6351 #if defined(CONFIG_USER_ONLY) 6352 GEN_PRIV; 6353 #else 6354 CHK_HV; 6355 /* 64-bit server processors compliant with arch 2.x */ 6356 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6357 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6358 } else { 6359 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6360 } 6361 #endif /* defined(CONFIG_USER_ONLY) */ 6362 } 6363 6364 static void gen_msgsnd(DisasContext *ctx) 6365 { 6366 #if defined(CONFIG_USER_ONLY) 6367 GEN_PRIV; 6368 #else 6369 CHK_HV; 6370 /* 64-bit server processors compliant with arch 2.x */ 6371 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6372 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6373 } else { 6374 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6375 } 6376 #endif /* defined(CONFIG_USER_ONLY) */ 6377 } 6378 6379 static void gen_msgsync(DisasContext *ctx) 6380 { 6381 #if defined(CONFIG_USER_ONLY) 6382 GEN_PRIV; 6383 #else 6384 CHK_HV; 6385 #endif /* defined(CONFIG_USER_ONLY) */ 6386 /* interpreted as no-op */ 6387 } 6388 6389 #if defined(TARGET_PPC64) 6390 static void gen_maddld(DisasContext *ctx) 6391 { 6392 TCGv_i64 t1 = tcg_temp_new_i64(); 6393 6394 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6395 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6396 tcg_temp_free_i64(t1); 6397 } 6398 6399 /* maddhd maddhdu */ 6400 static void gen_maddhd_maddhdu(DisasContext *ctx) 6401 { 6402 TCGv_i64 lo = tcg_temp_new_i64(); 6403 TCGv_i64 hi = tcg_temp_new_i64(); 6404 TCGv_i64 t1 = tcg_temp_new_i64(); 6405 6406 if (Rc(ctx->opcode)) { 6407 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6408 cpu_gpr[rB(ctx->opcode)]); 6409 tcg_gen_movi_i64(t1, 0); 6410 } else { 6411 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6412 cpu_gpr[rB(ctx->opcode)]); 6413 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6414 } 6415 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6416 cpu_gpr[rC(ctx->opcode)], t1); 6417 tcg_temp_free_i64(lo); 6418 tcg_temp_free_i64(hi); 6419 tcg_temp_free_i64(t1); 6420 } 6421 #endif /* defined(TARGET_PPC64) */ 6422 6423 static void gen_tbegin(DisasContext *ctx) 6424 { 6425 if (unlikely(!ctx->tm_enabled)) { 6426 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6427 return; 6428 } 6429 gen_helper_tbegin(cpu_env); 6430 } 6431 6432 #define GEN_TM_NOOP(name) \ 6433 static inline void gen_##name(DisasContext *ctx) \ 6434 { \ 6435 if (unlikely(!ctx->tm_enabled)) { \ 6436 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6437 return; \ 6438 } \ 6439 /* Because tbegin always fails in QEMU, these user \ 6440 * space instructions all have a simple implementation: \ 6441 * \ 6442 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6443 * = 0b0 || 0b00 || 0b0 \ 6444 */ \ 6445 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6446 } 6447 6448 GEN_TM_NOOP(tend); 6449 GEN_TM_NOOP(tabort); 6450 GEN_TM_NOOP(tabortwc); 6451 GEN_TM_NOOP(tabortwci); 6452 GEN_TM_NOOP(tabortdc); 6453 GEN_TM_NOOP(tabortdci); 6454 GEN_TM_NOOP(tsr); 6455 static inline void gen_cp_abort(DisasContext *ctx) 6456 { 6457 // Do Nothing 6458 } 6459 6460 #define GEN_CP_PASTE_NOOP(name) \ 6461 static inline void gen_##name(DisasContext *ctx) \ 6462 { \ 6463 /* Generate invalid exception until \ 6464 * we have an implementation of the copy \ 6465 * paste facility \ 6466 */ \ 6467 gen_invalid(ctx); \ 6468 } 6469 6470 GEN_CP_PASTE_NOOP(copy) 6471 GEN_CP_PASTE_NOOP(paste) 6472 6473 static void gen_tcheck(DisasContext *ctx) 6474 { 6475 if (unlikely(!ctx->tm_enabled)) { 6476 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6477 return; 6478 } 6479 /* Because tbegin always fails, the tcheck implementation 6480 * is simple: 6481 * 6482 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6483 * = 0b1 || 0b00 || 0b0 6484 */ 6485 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6486 } 6487 6488 #if defined(CONFIG_USER_ONLY) 6489 #define GEN_TM_PRIV_NOOP(name) \ 6490 static inline void gen_##name(DisasContext *ctx) \ 6491 { \ 6492 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6493 } 6494 6495 #else 6496 6497 #define GEN_TM_PRIV_NOOP(name) \ 6498 static inline void gen_##name(DisasContext *ctx) \ 6499 { \ 6500 CHK_SV; \ 6501 if (unlikely(!ctx->tm_enabled)) { \ 6502 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6503 return; \ 6504 } \ 6505 /* Because tbegin always fails, the implementation is \ 6506 * simple: \ 6507 * \ 6508 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6509 * = 0b0 || 0b00 | 0b0 \ 6510 */ \ 6511 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6512 } 6513 6514 #endif 6515 6516 GEN_TM_PRIV_NOOP(treclaim); 6517 GEN_TM_PRIV_NOOP(trechkpt); 6518 6519 #include "translate/fp-impl.inc.c" 6520 6521 #include "translate/vmx-impl.inc.c" 6522 6523 #include "translate/vsx-impl.inc.c" 6524 6525 #include "translate/dfp-impl.inc.c" 6526 6527 #include "translate/spe-impl.inc.c" 6528 6529 /* Handles lfdp, lxsd, lxssp */ 6530 static void gen_dform39(DisasContext *ctx) 6531 { 6532 switch (ctx->opcode & 0x3) { 6533 case 0: /* lfdp */ 6534 if (ctx->insns_flags2 & PPC2_ISA205) { 6535 return gen_lfdp(ctx); 6536 } 6537 break; 6538 case 2: /* lxsd */ 6539 if (ctx->insns_flags2 & PPC2_ISA300) { 6540 return gen_lxsd(ctx); 6541 } 6542 break; 6543 case 3: /* lxssp */ 6544 if (ctx->insns_flags2 & PPC2_ISA300) { 6545 return gen_lxssp(ctx); 6546 } 6547 break; 6548 } 6549 return gen_invalid(ctx); 6550 } 6551 6552 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 6553 static void gen_dform3D(DisasContext *ctx) 6554 { 6555 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 6556 switch (ctx->opcode & 0x7) { 6557 case 1: /* lxv */ 6558 if (ctx->insns_flags2 & PPC2_ISA300) { 6559 return gen_lxv(ctx); 6560 } 6561 break; 6562 case 5: /* stxv */ 6563 if (ctx->insns_flags2 & PPC2_ISA300) { 6564 return gen_stxv(ctx); 6565 } 6566 break; 6567 } 6568 } else { /* DS-FORM */ 6569 switch (ctx->opcode & 0x3) { 6570 case 0: /* stfdp */ 6571 if (ctx->insns_flags2 & PPC2_ISA205) { 6572 return gen_stfdp(ctx); 6573 } 6574 break; 6575 case 2: /* stxsd */ 6576 if (ctx->insns_flags2 & PPC2_ISA300) { 6577 return gen_stxsd(ctx); 6578 } 6579 break; 6580 case 3: /* stxssp */ 6581 if (ctx->insns_flags2 & PPC2_ISA300) { 6582 return gen_stxssp(ctx); 6583 } 6584 break; 6585 } 6586 } 6587 return gen_invalid(ctx); 6588 } 6589 6590 static opcode_t opcodes[] = { 6591 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6592 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6593 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6594 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6595 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6596 #if defined(TARGET_PPC64) 6597 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6598 #endif 6599 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6600 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6601 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6602 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6603 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6604 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6605 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6606 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6607 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6608 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6609 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6610 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6611 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6612 #if defined(TARGET_PPC64) 6613 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6614 #endif 6615 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6616 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6617 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6618 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6619 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6620 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6621 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6622 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 6623 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6624 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 6625 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6626 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6627 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6628 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6629 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6630 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6631 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6632 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6633 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6634 #if defined(TARGET_PPC64) 6635 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6636 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6637 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6638 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6639 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6640 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6641 #endif 6642 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6643 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6644 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6645 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6646 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6647 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6648 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6649 #if defined(TARGET_PPC64) 6650 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6651 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6652 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6653 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6654 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6655 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6656 PPC_NONE, PPC2_ISA300), 6657 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6658 PPC_NONE, PPC2_ISA300), 6659 #endif 6660 #if defined(TARGET_PPC64) 6661 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6662 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6663 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6664 #endif 6665 /* handles lfdp, lxsd, lxssp */ 6666 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6667 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 6668 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6669 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6670 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6671 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6672 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6673 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6674 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6675 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 6676 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6677 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6678 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6679 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6680 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 6681 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 6682 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6683 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6684 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6685 #if defined(TARGET_PPC64) 6686 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 6687 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 6688 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6689 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6690 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6691 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6692 #endif 6693 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6694 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6695 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 6696 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6697 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6698 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6699 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6700 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6701 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6702 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6703 #if defined(TARGET_PPC64) 6704 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6705 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6706 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6707 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6708 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6709 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6710 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6711 #endif 6712 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6713 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6714 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6715 #if defined(TARGET_PPC64) 6716 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6717 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6718 #endif 6719 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6720 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6721 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6722 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6723 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6724 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6725 #if defined(TARGET_PPC64) 6726 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6727 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6728 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 6729 #endif 6730 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6731 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6732 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6733 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6734 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6735 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6736 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6737 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6738 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6739 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6740 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 6741 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6742 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6743 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6744 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6745 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6746 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6747 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6748 #if defined(TARGET_PPC64) 6749 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6750 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6751 PPC_SEGMENT_64B), 6752 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6753 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6754 PPC_SEGMENT_64B), 6755 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6756 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6757 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6758 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6759 #endif 6760 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6761 /* XXX Those instructions will need to be handled differently for 6762 * different ISA versions */ 6763 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6764 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6765 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 6766 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 6767 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6768 #if defined(TARGET_PPC64) 6769 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6770 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6771 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 6772 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6773 #endif 6774 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6775 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6776 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6777 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6778 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6779 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6780 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6781 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6782 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6783 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6784 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 6785 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6786 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 6787 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 6788 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 6789 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 6790 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 6791 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 6792 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 6793 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6794 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 6795 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 6796 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 6797 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 6798 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 6799 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 6800 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 6801 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 6802 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 6803 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 6804 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 6805 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 6806 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 6807 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 6808 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 6809 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 6810 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 6811 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 6812 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 6813 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 6814 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 6815 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 6816 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 6817 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 6818 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 6819 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 6820 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 6821 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 6822 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 6823 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6824 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6825 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 6826 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 6827 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6828 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6829 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 6830 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 6831 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 6832 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 6833 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 6834 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 6835 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 6836 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 6837 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 6838 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 6839 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 6840 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 6841 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 6842 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 6843 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 6844 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 6845 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 6846 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 6847 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 6848 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 6849 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 6850 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 6851 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 6852 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 6853 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 6854 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 6855 PPC_NONE, PPC2_BOOKE206), 6856 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 6857 PPC_NONE, PPC2_BOOKE206), 6858 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 6859 PPC_NONE, PPC2_BOOKE206), 6860 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 6861 PPC_NONE, PPC2_BOOKE206), 6862 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 6863 PPC_NONE, PPC2_BOOKE206), 6864 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 6865 PPC_NONE, PPC2_PRCNTL), 6866 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 6867 PPC_NONE, PPC2_PRCNTL), 6868 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 6869 PPC_NONE, PPC2_PRCNTL), 6870 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 6871 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 6872 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 6873 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 6874 PPC_BOOKE, PPC2_BOOKE206), 6875 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE), 6876 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 6877 PPC_BOOKE, PPC2_BOOKE206), 6878 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 6879 PPC_440_SPEC), 6880 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 6881 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 6882 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 6883 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 6884 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 6885 #if defined(TARGET_PPC64) 6886 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 6887 PPC2_ISA300), 6888 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6889 #endif 6890 6891 #undef GEN_INT_ARITH_ADD 6892 #undef GEN_INT_ARITH_ADD_CONST 6893 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 6894 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 6895 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 6896 add_ca, compute_ca, compute_ov) \ 6897 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 6898 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 6899 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 6900 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 6901 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 6902 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 6903 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 6904 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 6905 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 6906 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 6907 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 6908 6909 #undef GEN_INT_ARITH_DIVW 6910 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 6911 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 6912 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 6913 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 6914 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 6915 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 6916 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6917 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6918 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6919 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6920 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6921 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6922 6923 #if defined(TARGET_PPC64) 6924 #undef GEN_INT_ARITH_DIVD 6925 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 6926 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6927 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 6928 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 6929 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 6930 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 6931 6932 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6933 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6934 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6935 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6936 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6937 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6938 6939 #undef GEN_INT_ARITH_MUL_HELPER 6940 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 6941 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6942 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 6943 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 6944 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 6945 #endif 6946 6947 #undef GEN_INT_ARITH_SUBF 6948 #undef GEN_INT_ARITH_SUBF_CONST 6949 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 6950 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 6951 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 6952 add_ca, compute_ca, compute_ov) \ 6953 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 6954 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 6955 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 6956 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 6957 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 6958 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 6959 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 6960 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 6961 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 6962 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 6963 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 6964 6965 #undef GEN_LOGICAL1 6966 #undef GEN_LOGICAL2 6967 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 6968 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 6969 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 6970 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 6971 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 6972 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 6973 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 6974 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 6975 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 6976 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 6977 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 6978 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 6979 #if defined(TARGET_PPC64) 6980 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 6981 #endif 6982 6983 #if defined(TARGET_PPC64) 6984 #undef GEN_PPC64_R2 6985 #undef GEN_PPC64_R4 6986 #define GEN_PPC64_R2(name, opc1, opc2) \ 6987 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 6988 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 6989 PPC_64B) 6990 #define GEN_PPC64_R4(name, opc1, opc2) \ 6991 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 6992 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 6993 PPC_64B), \ 6994 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 6995 PPC_64B), \ 6996 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 6997 PPC_64B) 6998 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 6999 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 7000 GEN_PPC64_R4(rldic, 0x1E, 0x04), 7001 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 7002 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 7003 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 7004 #endif 7005 7006 #undef GEN_LD 7007 #undef GEN_LDU 7008 #undef GEN_LDUX 7009 #undef GEN_LDX_E 7010 #undef GEN_LDS 7011 #define GEN_LD(name, ldop, opc, type) \ 7012 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7013 #define GEN_LDU(name, ldop, opc, type) \ 7014 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 7015 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 7016 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7017 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 7018 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 7019 #define GEN_LDS(name, ldop, op, type) \ 7020 GEN_LD(name, ldop, op | 0x20, type) \ 7021 GEN_LDU(name, ldop, op | 0x21, type) \ 7022 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 7023 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 7024 7025 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 7026 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 7027 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 7028 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 7029 #if defined(TARGET_PPC64) 7030 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 7031 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 7032 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 7033 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 7034 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 7035 7036 /* HV/P7 and later only */ 7037 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 7038 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 7039 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 7040 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 7041 #endif 7042 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 7043 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 7044 7045 #undef GEN_ST 7046 #undef GEN_STU 7047 #undef GEN_STUX 7048 #undef GEN_STX_E 7049 #undef GEN_STS 7050 #define GEN_ST(name, stop, opc, type) \ 7051 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7052 #define GEN_STU(name, stop, opc, type) \ 7053 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 7054 #define GEN_STUX(name, stop, opc2, opc3, type) \ 7055 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7056 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 7057 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 7058 #define GEN_STS(name, stop, op, type) \ 7059 GEN_ST(name, stop, op | 0x20, type) \ 7060 GEN_STU(name, stop, op | 0x21, type) \ 7061 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 7062 GEN_STX(name, stop, 0x17, op | 0x00, type) 7063 7064 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 7065 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 7066 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 7067 #if defined(TARGET_PPC64) 7068 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 7069 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 7070 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 7071 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 7072 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 7073 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 7074 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 7075 #endif 7076 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 7077 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 7078 7079 #undef GEN_CRLOGIC 7080 #define GEN_CRLOGIC(name, tcg_op, opc) \ 7081 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 7082 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 7083 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 7084 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 7085 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 7086 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 7087 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 7088 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 7089 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 7090 7091 #undef GEN_MAC_HANDLER 7092 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 7093 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 7094 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 7095 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 7096 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 7097 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 7098 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 7099 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 7100 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 7101 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 7102 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 7103 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 7104 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 7105 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 7106 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 7107 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 7108 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 7109 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 7110 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 7111 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 7112 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 7113 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 7114 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 7115 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 7116 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 7117 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 7118 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 7119 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 7120 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 7121 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 7122 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 7123 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 7124 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 7125 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 7126 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 7127 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 7128 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 7129 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 7130 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 7131 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 7132 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 7133 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 7134 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 7135 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 7136 7137 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 7138 PPC_NONE, PPC2_TM), 7139 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 7140 PPC_NONE, PPC2_TM), 7141 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 7142 PPC_NONE, PPC2_TM), 7143 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 7144 PPC_NONE, PPC2_TM), 7145 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 7146 PPC_NONE, PPC2_TM), 7147 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 7148 PPC_NONE, PPC2_TM), 7149 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 7150 PPC_NONE, PPC2_TM), 7151 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 7152 PPC_NONE, PPC2_TM), 7153 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 7154 PPC_NONE, PPC2_TM), 7155 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 7156 PPC_NONE, PPC2_TM), 7157 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 7158 PPC_NONE, PPC2_TM), 7159 7160 #include "translate/fp-ops.inc.c" 7161 7162 #include "translate/vmx-ops.inc.c" 7163 7164 #include "translate/vsx-ops.inc.c" 7165 7166 #include "translate/dfp-ops.inc.c" 7167 7168 #include "translate/spe-ops.inc.c" 7169 }; 7170 7171 #include "helper_regs.h" 7172 #include "translate_init.inc.c" 7173 7174 /*****************************************************************************/ 7175 /* Misc PowerPC helpers */ 7176 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 7177 int flags) 7178 { 7179 #define RGPL 4 7180 #define RFPL 4 7181 7182 PowerPCCPU *cpu = POWERPC_CPU(cs); 7183 CPUPPCState *env = &cpu->env; 7184 int i; 7185 7186 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 7187 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 7188 env->nip, env->lr, env->ctr, cpu_read_xer(env), 7189 cs->cpu_index); 7190 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 7191 TARGET_FMT_lx " iidx %d didx %d\n", 7192 env->msr, env->spr[SPR_HID0], 7193 env->hflags, env->immu_idx, env->dmmu_idx); 7194 #if !defined(NO_TIMER_DUMP) 7195 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 7196 #if !defined(CONFIG_USER_ONLY) 7197 " DECR %08" PRIu32 7198 #endif 7199 "\n", 7200 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 7201 #if !defined(CONFIG_USER_ONLY) 7202 , cpu_ppc_load_decr(env) 7203 #endif 7204 ); 7205 #endif 7206 for (i = 0; i < 32; i++) { 7207 if ((i & (RGPL - 1)) == 0) 7208 cpu_fprintf(f, "GPR%02d", i); 7209 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 7210 if ((i & (RGPL - 1)) == (RGPL - 1)) 7211 cpu_fprintf(f, "\n"); 7212 } 7213 cpu_fprintf(f, "CR "); 7214 for (i = 0; i < 8; i++) 7215 cpu_fprintf(f, "%01x", env->crf[i]); 7216 cpu_fprintf(f, " ["); 7217 for (i = 0; i < 8; i++) { 7218 char a = '-'; 7219 if (env->crf[i] & 0x08) 7220 a = 'L'; 7221 else if (env->crf[i] & 0x04) 7222 a = 'G'; 7223 else if (env->crf[i] & 0x02) 7224 a = 'E'; 7225 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 7226 } 7227 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 7228 env->reserve_addr); 7229 7230 if (flags & CPU_DUMP_FPU) { 7231 for (i = 0; i < 32; i++) { 7232 if ((i & (RFPL - 1)) == 0) { 7233 cpu_fprintf(f, "FPR%02d", i); 7234 } 7235 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i])); 7236 if ((i & (RFPL - 1)) == (RFPL - 1)) { 7237 cpu_fprintf(f, "\n"); 7238 } 7239 } 7240 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 7241 } 7242 7243 #if !defined(CONFIG_USER_ONLY) 7244 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 7245 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 7246 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 7247 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 7248 7249 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 7250 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 7251 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 7252 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 7253 7254 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 7255 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 7256 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 7257 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 7258 7259 #if defined(TARGET_PPC64) 7260 if (env->excp_model == POWERPC_EXCP_POWER7 || 7261 env->excp_model == POWERPC_EXCP_POWER8) { 7262 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 7263 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 7264 } 7265 #endif 7266 if (env->excp_model == POWERPC_EXCP_BOOKE) { 7267 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 7268 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 7269 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 7270 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 7271 7272 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 7273 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 7274 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 7275 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 7276 7277 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 7278 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 7279 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 7280 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 7281 7282 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 7283 " EPR " TARGET_FMT_lx "\n", 7284 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 7285 env->spr[SPR_BOOKE_EPR]); 7286 7287 /* FSL-specific */ 7288 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 7289 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 7290 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 7291 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 7292 7293 /* 7294 * IVORs are left out as they are large and do not change often -- 7295 * they can be read with "p $ivor0", "p $ivor1", etc. 7296 */ 7297 } 7298 7299 #if defined(TARGET_PPC64) 7300 if (env->flags & POWERPC_FLAG_CFAR) { 7301 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 7302 } 7303 #endif 7304 7305 if (env->spr_cb[SPR_LPCR].name) 7306 cpu_fprintf(f, " LPCR " TARGET_FMT_lx "\n", env->spr[SPR_LPCR]); 7307 7308 switch (env->mmu_model) { 7309 case POWERPC_MMU_32B: 7310 case POWERPC_MMU_601: 7311 case POWERPC_MMU_SOFT_6xx: 7312 case POWERPC_MMU_SOFT_74xx: 7313 #if defined(TARGET_PPC64) 7314 case POWERPC_MMU_64B: 7315 case POWERPC_MMU_2_03: 7316 case POWERPC_MMU_2_06: 7317 case POWERPC_MMU_2_07: 7318 case POWERPC_MMU_3_00: 7319 #endif 7320 if (env->spr_cb[SPR_SDR1].name) { /* SDR1 Exists */ 7321 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " ", env->spr[SPR_SDR1]); 7322 } 7323 if (env->spr_cb[SPR_PTCR].name) { /* PTCR Exists */ 7324 cpu_fprintf(f, " PTCR " TARGET_FMT_lx " ", env->spr[SPR_PTCR]); 7325 } 7326 cpu_fprintf(f, " DAR " TARGET_FMT_lx " DSISR " TARGET_FMT_lx "\n", 7327 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 7328 break; 7329 case POWERPC_MMU_BOOKE206: 7330 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 7331 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 7332 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 7333 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 7334 7335 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 7336 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 7337 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 7338 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 7339 7340 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 7341 " TLB1CFG " TARGET_FMT_lx "\n", 7342 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 7343 env->spr[SPR_BOOKE_TLB1CFG]); 7344 break; 7345 default: 7346 break; 7347 } 7348 #endif 7349 7350 #undef RGPL 7351 #undef RFPL 7352 } 7353 7354 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 7355 fprintf_function cpu_fprintf, int flags) 7356 { 7357 #if defined(DO_PPC_STATISTICS) 7358 PowerPCCPU *cpu = POWERPC_CPU(cs); 7359 opc_handler_t **t1, **t2, **t3, *handler; 7360 int op1, op2, op3; 7361 7362 t1 = cpu->env.opcodes; 7363 for (op1 = 0; op1 < 64; op1++) { 7364 handler = t1[op1]; 7365 if (is_indirect_opcode(handler)) { 7366 t2 = ind_table(handler); 7367 for (op2 = 0; op2 < 32; op2++) { 7368 handler = t2[op2]; 7369 if (is_indirect_opcode(handler)) { 7370 t3 = ind_table(handler); 7371 for (op3 = 0; op3 < 32; op3++) { 7372 handler = t3[op3]; 7373 if (handler->count == 0) 7374 continue; 7375 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 7376 "%016" PRIx64 " %" PRId64 "\n", 7377 op1, op2, op3, op1, (op3 << 5) | op2, 7378 handler->oname, 7379 handler->count, handler->count); 7380 } 7381 } else { 7382 if (handler->count == 0) 7383 continue; 7384 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 7385 "%016" PRIx64 " %" PRId64 "\n", 7386 op1, op2, op1, op2, handler->oname, 7387 handler->count, handler->count); 7388 } 7389 } 7390 } else { 7391 if (handler->count == 0) 7392 continue; 7393 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 7394 " %" PRId64 "\n", 7395 op1, op1, handler->oname, 7396 handler->count, handler->count); 7397 } 7398 } 7399 #endif 7400 } 7401 7402 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 7403 { 7404 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7405 CPUPPCState *env = cs->env_ptr; 7406 int bound; 7407 7408 ctx->exception = POWERPC_EXCP_NONE; 7409 ctx->spr_cb = env->spr_cb; 7410 ctx->pr = msr_pr; 7411 ctx->mem_idx = env->dmmu_idx; 7412 ctx->dr = msr_dr; 7413 #if !defined(CONFIG_USER_ONLY) 7414 ctx->hv = msr_hv || !env->has_hv_mode; 7415 #endif 7416 ctx->insns_flags = env->insns_flags; 7417 ctx->insns_flags2 = env->insns_flags2; 7418 ctx->access_type = -1; 7419 ctx->need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7420 ctx->le_mode = !!(env->hflags & (1 << MSR_LE)); 7421 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 7422 #if defined(TARGET_PPC64) 7423 ctx->sf_mode = msr_is_64bit(env, env->msr); 7424 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7425 #endif 7426 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 7427 || env->mmu_model == POWERPC_MMU_601 7428 || (env->mmu_model & POWERPC_MMU_64B); 7429 7430 ctx->fpu_enabled = !!msr_fp; 7431 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7432 ctx->spe_enabled = !!msr_spe; 7433 else 7434 ctx->spe_enabled = false; 7435 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7436 ctx->altivec_enabled = !!msr_vr; 7437 else 7438 ctx->altivec_enabled = false; 7439 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7440 ctx->vsx_enabled = !!msr_vsx; 7441 } else { 7442 ctx->vsx_enabled = false; 7443 } 7444 #if defined(TARGET_PPC64) 7445 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7446 ctx->tm_enabled = !!msr_tm; 7447 } else { 7448 ctx->tm_enabled = false; 7449 } 7450 #endif 7451 ctx->gtse = !!(env->spr[SPR_LPCR] & LPCR_GTSE); 7452 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7453 ctx->singlestep_enabled = CPU_SINGLE_STEP; 7454 else 7455 ctx->singlestep_enabled = 0; 7456 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7457 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7458 if (unlikely(ctx->base.singlestep_enabled)) { 7459 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7460 } 7461 #if defined (DO_SINGLE_STEP) && 0 7462 /* Single step trace mode */ 7463 msr_se = 1; 7464 #endif 7465 7466 bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4; 7467 ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 7468 } 7469 7470 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 7471 { 7472 } 7473 7474 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 7475 { 7476 tcg_gen_insn_start(dcbase->pc_next); 7477 } 7478 7479 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs, 7480 const CPUBreakpoint *bp) 7481 { 7482 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7483 7484 gen_debug_exception(ctx); 7485 dcbase->is_jmp = DISAS_NORETURN; 7486 /* The address covered by the breakpoint must be included in 7487 [tb->pc, tb->pc + tb->size) in order to for it to be 7488 properly cleared -- thus we increment the PC here so that 7489 the logic setting tb->size below does the right thing. */ 7490 ctx->base.pc_next += 4; 7491 return true; 7492 } 7493 7494 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 7495 { 7496 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7497 CPUPPCState *env = cs->env_ptr; 7498 opc_handler_t **table, *handler; 7499 7500 LOG_DISAS("----------------\n"); 7501 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7502 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 7503 7504 if (unlikely(need_byteswap(ctx))) { 7505 ctx->opcode = bswap32(cpu_ldl_code(env, ctx->base.pc_next)); 7506 } else { 7507 ctx->opcode = cpu_ldl_code(env, ctx->base.pc_next); 7508 } 7509 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7510 ctx->opcode, opc1(ctx->opcode), opc2(ctx->opcode), 7511 opc3(ctx->opcode), opc4(ctx->opcode), 7512 ctx->le_mode ? "little" : "big"); 7513 ctx->base.pc_next += 4; 7514 table = env->opcodes; 7515 handler = table[opc1(ctx->opcode)]; 7516 if (is_indirect_opcode(handler)) { 7517 table = ind_table(handler); 7518 handler = table[opc2(ctx->opcode)]; 7519 if (is_indirect_opcode(handler)) { 7520 table = ind_table(handler); 7521 handler = table[opc3(ctx->opcode)]; 7522 if (is_indirect_opcode(handler)) { 7523 table = ind_table(handler); 7524 handler = table[opc4(ctx->opcode)]; 7525 } 7526 } 7527 } 7528 /* Is opcode *REALLY* valid ? */ 7529 if (unlikely(handler->handler == &gen_invalid)) { 7530 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7531 "%02x - %02x - %02x - %02x (%08x) " 7532 TARGET_FMT_lx " %d\n", 7533 opc1(ctx->opcode), opc2(ctx->opcode), 7534 opc3(ctx->opcode), opc4(ctx->opcode), 7535 ctx->opcode, ctx->base.pc_next - 4, (int)msr_ir); 7536 } else { 7537 uint32_t inval; 7538 7539 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 7540 && Rc(ctx->opcode))) { 7541 inval = handler->inval2; 7542 } else { 7543 inval = handler->inval1; 7544 } 7545 7546 if (unlikely((ctx->opcode & inval) != 0)) { 7547 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7548 "%02x - %02x - %02x - %02x (%08x) " 7549 TARGET_FMT_lx "\n", ctx->opcode & inval, 7550 opc1(ctx->opcode), opc2(ctx->opcode), 7551 opc3(ctx->opcode), opc4(ctx->opcode), 7552 ctx->opcode, ctx->base.pc_next - 4); 7553 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7554 ctx->base.is_jmp = DISAS_NORETURN; 7555 return; 7556 } 7557 } 7558 (*(handler->handler))(ctx); 7559 #if defined(DO_PPC_STATISTICS) 7560 handler->count++; 7561 #endif 7562 /* Check trace mode exceptions */ 7563 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP && 7564 (ctx->base.pc_next <= 0x100 || ctx->base.pc_next > 0xF00) && 7565 ctx->exception != POWERPC_SYSCALL && 7566 ctx->exception != POWERPC_EXCP_TRAP && 7567 ctx->exception != POWERPC_EXCP_BRANCH)) { 7568 gen_exception_nip(ctx, POWERPC_EXCP_TRACE, ctx->base.pc_next); 7569 } 7570 7571 if (tcg_check_temp_count()) { 7572 qemu_log("Opcode %02x %02x %02x %02x (%08x) leaked " 7573 "temporaries\n", opc1(ctx->opcode), opc2(ctx->opcode), 7574 opc3(ctx->opcode), opc4(ctx->opcode), ctx->opcode); 7575 } 7576 7577 ctx->base.is_jmp = ctx->exception == POWERPC_EXCP_NONE ? 7578 DISAS_NEXT : DISAS_NORETURN; 7579 } 7580 7581 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 7582 { 7583 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7584 7585 if (ctx->exception == POWERPC_EXCP_NONE) { 7586 gen_goto_tb(ctx, 0, ctx->base.pc_next); 7587 } else if (ctx->exception != POWERPC_EXCP_BRANCH) { 7588 if (unlikely(ctx->base.singlestep_enabled)) { 7589 gen_debug_exception(ctx); 7590 } 7591 /* Generate the return instruction */ 7592 tcg_gen_exit_tb(NULL, 0); 7593 } 7594 } 7595 7596 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 7597 { 7598 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 7599 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 7600 } 7601 7602 static const TranslatorOps ppc_tr_ops = { 7603 .init_disas_context = ppc_tr_init_disas_context, 7604 .tb_start = ppc_tr_tb_start, 7605 .insn_start = ppc_tr_insn_start, 7606 .breakpoint_check = ppc_tr_breakpoint_check, 7607 .translate_insn = ppc_tr_translate_insn, 7608 .tb_stop = ppc_tr_tb_stop, 7609 .disas_log = ppc_tr_disas_log, 7610 }; 7611 7612 void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb) 7613 { 7614 DisasContext ctx; 7615 7616 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb); 7617 } 7618 7619 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7620 target_ulong *data) 7621 { 7622 env->nip = data[0]; 7623 } 7624