1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "tcg-op-gvec.h" 28 #include "qemu/host-utils.h" 29 #include "exec/cpu_ldst.h" 30 31 #include "exec/helper-proto.h" 32 #include "exec/helper-gen.h" 33 34 #include "trace-tcg.h" 35 #include "exec/translator.h" 36 #include "exec/log.h" 37 #include "qemu/atomic128.h" 38 39 40 #define CPU_SINGLE_STEP 0x1 41 #define CPU_BRANCH_STEP 0x2 42 #define GDBSTUB_SINGLE_STEP 0x4 43 44 /* Include definitions for instructions classes and implementations flags */ 45 //#define PPC_DEBUG_DISAS 46 //#define DO_PPC_STATISTICS 47 48 #ifdef PPC_DEBUG_DISAS 49 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 50 #else 51 # define LOG_DISAS(...) do { } while (0) 52 #endif 53 /*****************************************************************************/ 54 /* Code translation helpers */ 55 56 /* global register indexes */ 57 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 58 + 10*4 + 22*5 /* SPE GPRh */ 59 + 8*5 /* CRF */]; 60 static TCGv cpu_gpr[32]; 61 static TCGv cpu_gprh[32]; 62 static TCGv_i32 cpu_crf[8]; 63 static TCGv cpu_nip; 64 static TCGv cpu_msr; 65 static TCGv cpu_ctr; 66 static TCGv cpu_lr; 67 #if defined(TARGET_PPC64) 68 static TCGv cpu_cfar; 69 #endif 70 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 71 static TCGv cpu_reserve; 72 static TCGv cpu_reserve_val; 73 static TCGv cpu_fpscr; 74 static TCGv_i32 cpu_access_type; 75 76 #include "exec/gen-icount.h" 77 78 void ppc_translate_init(void) 79 { 80 int i; 81 char* p; 82 size_t cpu_reg_names_size; 83 84 p = cpu_reg_names; 85 cpu_reg_names_size = sizeof(cpu_reg_names); 86 87 for (i = 0; i < 8; i++) { 88 snprintf(p, cpu_reg_names_size, "crf%d", i); 89 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 90 offsetof(CPUPPCState, crf[i]), p); 91 p += 5; 92 cpu_reg_names_size -= 5; 93 } 94 95 for (i = 0; i < 32; i++) { 96 snprintf(p, cpu_reg_names_size, "r%d", i); 97 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 98 offsetof(CPUPPCState, gpr[i]), p); 99 p += (i < 10) ? 3 : 4; 100 cpu_reg_names_size -= (i < 10) ? 3 : 4; 101 snprintf(p, cpu_reg_names_size, "r%dH", i); 102 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 103 offsetof(CPUPPCState, gprh[i]), p); 104 p += (i < 10) ? 4 : 5; 105 cpu_reg_names_size -= (i < 10) ? 4 : 5; 106 } 107 108 cpu_nip = tcg_global_mem_new(cpu_env, 109 offsetof(CPUPPCState, nip), "nip"); 110 111 cpu_msr = tcg_global_mem_new(cpu_env, 112 offsetof(CPUPPCState, msr), "msr"); 113 114 cpu_ctr = tcg_global_mem_new(cpu_env, 115 offsetof(CPUPPCState, ctr), "ctr"); 116 117 cpu_lr = tcg_global_mem_new(cpu_env, 118 offsetof(CPUPPCState, lr), "lr"); 119 120 #if defined(TARGET_PPC64) 121 cpu_cfar = tcg_global_mem_new(cpu_env, 122 offsetof(CPUPPCState, cfar), "cfar"); 123 #endif 124 125 cpu_xer = tcg_global_mem_new(cpu_env, 126 offsetof(CPUPPCState, xer), "xer"); 127 cpu_so = tcg_global_mem_new(cpu_env, 128 offsetof(CPUPPCState, so), "SO"); 129 cpu_ov = tcg_global_mem_new(cpu_env, 130 offsetof(CPUPPCState, ov), "OV"); 131 cpu_ca = tcg_global_mem_new(cpu_env, 132 offsetof(CPUPPCState, ca), "CA"); 133 cpu_ov32 = tcg_global_mem_new(cpu_env, 134 offsetof(CPUPPCState, ov32), "OV32"); 135 cpu_ca32 = tcg_global_mem_new(cpu_env, 136 offsetof(CPUPPCState, ca32), "CA32"); 137 138 cpu_reserve = tcg_global_mem_new(cpu_env, 139 offsetof(CPUPPCState, reserve_addr), 140 "reserve_addr"); 141 cpu_reserve_val = tcg_global_mem_new(cpu_env, 142 offsetof(CPUPPCState, reserve_val), 143 "reserve_val"); 144 145 cpu_fpscr = tcg_global_mem_new(cpu_env, 146 offsetof(CPUPPCState, fpscr), "fpscr"); 147 148 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 149 offsetof(CPUPPCState, access_type), "access_type"); 150 } 151 152 /* internal defines */ 153 struct DisasContext { 154 DisasContextBase base; 155 uint32_t opcode; 156 uint32_t exception; 157 /* Routine used to access memory */ 158 bool pr, hv, dr, le_mode; 159 bool lazy_tlb_flush; 160 bool need_access_type; 161 int mem_idx; 162 int access_type; 163 /* Translation flags */ 164 TCGMemOp default_tcg_memop_mask; 165 #if defined(TARGET_PPC64) 166 bool sf_mode; 167 bool has_cfar; 168 #endif 169 bool fpu_enabled; 170 bool altivec_enabled; 171 bool vsx_enabled; 172 bool spe_enabled; 173 bool tm_enabled; 174 bool gtse; 175 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 176 int singlestep_enabled; 177 uint32_t flags; 178 uint64_t insns_flags; 179 uint64_t insns_flags2; 180 }; 181 182 /* Return true iff byteswap is needed in a scalar memop */ 183 static inline bool need_byteswap(const DisasContext *ctx) 184 { 185 #if defined(TARGET_WORDS_BIGENDIAN) 186 return ctx->le_mode; 187 #else 188 return !ctx->le_mode; 189 #endif 190 } 191 192 /* True when active word size < size of target_long. */ 193 #ifdef TARGET_PPC64 194 # define NARROW_MODE(C) (!(C)->sf_mode) 195 #else 196 # define NARROW_MODE(C) 0 197 #endif 198 199 struct opc_handler_t { 200 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 201 uint32_t inval1; 202 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 203 uint32_t inval2; 204 /* instruction type */ 205 uint64_t type; 206 /* extended instruction type */ 207 uint64_t type2; 208 /* handler */ 209 void (*handler)(DisasContext *ctx); 210 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 211 const char *oname; 212 #endif 213 #if defined(DO_PPC_STATISTICS) 214 uint64_t count; 215 #endif 216 }; 217 218 /* SPR load/store helpers */ 219 static inline void gen_load_spr(TCGv t, int reg) 220 { 221 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 222 } 223 224 static inline void gen_store_spr(int reg, TCGv t) 225 { 226 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 227 } 228 229 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 230 { 231 if (ctx->need_access_type && ctx->access_type != access_type) { 232 tcg_gen_movi_i32(cpu_access_type, access_type); 233 ctx->access_type = access_type; 234 } 235 } 236 237 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 238 { 239 if (NARROW_MODE(ctx)) { 240 nip = (uint32_t)nip; 241 } 242 tcg_gen_movi_tl(cpu_nip, nip); 243 } 244 245 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 246 { 247 TCGv_i32 t0, t1; 248 249 /* These are all synchronous exceptions, we set the PC back to 250 * the faulting instruction 251 */ 252 if (ctx->exception == POWERPC_EXCP_NONE) { 253 gen_update_nip(ctx, ctx->base.pc_next - 4); 254 } 255 t0 = tcg_const_i32(excp); 256 t1 = tcg_const_i32(error); 257 gen_helper_raise_exception_err(cpu_env, t0, t1); 258 tcg_temp_free_i32(t0); 259 tcg_temp_free_i32(t1); 260 ctx->exception = (excp); 261 } 262 263 static void gen_exception(DisasContext *ctx, uint32_t excp) 264 { 265 TCGv_i32 t0; 266 267 /* These are all synchronous exceptions, we set the PC back to 268 * the faulting instruction 269 */ 270 if (ctx->exception == POWERPC_EXCP_NONE) { 271 gen_update_nip(ctx, ctx->base.pc_next - 4); 272 } 273 t0 = tcg_const_i32(excp); 274 gen_helper_raise_exception(cpu_env, t0); 275 tcg_temp_free_i32(t0); 276 ctx->exception = (excp); 277 } 278 279 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 280 target_ulong nip) 281 { 282 TCGv_i32 t0; 283 284 gen_update_nip(ctx, nip); 285 t0 = tcg_const_i32(excp); 286 gen_helper_raise_exception(cpu_env, t0); 287 tcg_temp_free_i32(t0); 288 ctx->exception = (excp); 289 } 290 291 /* 292 * Tells the caller what is the appropriate exception to generate and prepares 293 * SPR registers for this exception. 294 * 295 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or 296 * POWERPC_EXCP_DEBUG (on BookE). 297 */ 298 static uint32_t gen_prep_dbgex(DisasContext *ctx) 299 { 300 if (ctx->flags & POWERPC_FLAG_DE) { 301 target_ulong dbsr = 0; 302 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) { 303 dbsr = DBCR0_ICMP; 304 } else { 305 /* Must have been branch */ 306 dbsr = DBCR0_BRT; 307 } 308 TCGv t0 = tcg_temp_new(); 309 gen_load_spr(t0, SPR_BOOKE_DBSR); 310 tcg_gen_ori_tl(t0, t0, dbsr); 311 gen_store_spr(SPR_BOOKE_DBSR, t0); 312 tcg_temp_free(t0); 313 return POWERPC_EXCP_DEBUG; 314 } else { 315 return POWERPC_EXCP_TRACE; 316 } 317 } 318 319 static void gen_debug_exception(DisasContext *ctx) 320 { 321 TCGv_i32 t0; 322 323 /* These are all synchronous exceptions, we set the PC back to 324 * the faulting instruction 325 */ 326 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 327 (ctx->exception != POWERPC_EXCP_SYNC)) { 328 gen_update_nip(ctx, ctx->base.pc_next); 329 } 330 t0 = tcg_const_i32(EXCP_DEBUG); 331 gen_helper_raise_exception(cpu_env, t0); 332 tcg_temp_free_i32(t0); 333 } 334 335 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 336 { 337 /* Will be converted to program check if needed */ 338 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 339 } 340 341 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 342 { 343 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 344 } 345 346 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 347 { 348 /* Will be converted to program check if needed */ 349 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 350 } 351 352 /* Stop translation */ 353 static inline void gen_stop_exception(DisasContext *ctx) 354 { 355 gen_update_nip(ctx, ctx->base.pc_next); 356 ctx->exception = POWERPC_EXCP_STOP; 357 } 358 359 #ifndef CONFIG_USER_ONLY 360 /* No need to update nip here, as execution flow will change */ 361 static inline void gen_sync_exception(DisasContext *ctx) 362 { 363 ctx->exception = POWERPC_EXCP_SYNC; 364 } 365 #endif 366 367 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 368 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 369 370 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 371 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 372 373 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 374 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 375 376 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 377 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 378 379 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 380 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 381 382 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 383 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 384 385 typedef struct opcode_t { 386 unsigned char opc1, opc2, opc3, opc4; 387 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 388 unsigned char pad[4]; 389 #endif 390 opc_handler_t handler; 391 const char *oname; 392 } opcode_t; 393 394 /* Helpers for priv. check */ 395 #define GEN_PRIV \ 396 do { \ 397 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 398 } while (0) 399 400 #if defined(CONFIG_USER_ONLY) 401 #define CHK_HV GEN_PRIV 402 #define CHK_SV GEN_PRIV 403 #define CHK_HVRM GEN_PRIV 404 #else 405 #define CHK_HV \ 406 do { \ 407 if (unlikely(ctx->pr || !ctx->hv)) { \ 408 GEN_PRIV; \ 409 } \ 410 } while (0) 411 #define CHK_SV \ 412 do { \ 413 if (unlikely(ctx->pr)) { \ 414 GEN_PRIV; \ 415 } \ 416 } while (0) 417 #define CHK_HVRM \ 418 do { \ 419 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 420 GEN_PRIV; \ 421 } \ 422 } while (0) 423 #endif 424 425 #define CHK_NONE 426 427 /*****************************************************************************/ 428 /* PowerPC instructions table */ 429 430 #if defined(DO_PPC_STATISTICS) 431 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 432 { \ 433 .opc1 = op1, \ 434 .opc2 = op2, \ 435 .opc3 = op3, \ 436 .opc4 = 0xff, \ 437 .handler = { \ 438 .inval1 = invl, \ 439 .type = _typ, \ 440 .type2 = _typ2, \ 441 .handler = &gen_##name, \ 442 .oname = stringify(name), \ 443 }, \ 444 .oname = stringify(name), \ 445 } 446 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 447 { \ 448 .opc1 = op1, \ 449 .opc2 = op2, \ 450 .opc3 = op3, \ 451 .opc4 = 0xff, \ 452 .handler = { \ 453 .inval1 = invl1, \ 454 .inval2 = invl2, \ 455 .type = _typ, \ 456 .type2 = _typ2, \ 457 .handler = &gen_##name, \ 458 .oname = stringify(name), \ 459 }, \ 460 .oname = stringify(name), \ 461 } 462 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 463 { \ 464 .opc1 = op1, \ 465 .opc2 = op2, \ 466 .opc3 = op3, \ 467 .opc4 = 0xff, \ 468 .handler = { \ 469 .inval1 = invl, \ 470 .type = _typ, \ 471 .type2 = _typ2, \ 472 .handler = &gen_##name, \ 473 .oname = onam, \ 474 }, \ 475 .oname = onam, \ 476 } 477 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 478 { \ 479 .opc1 = op1, \ 480 .opc2 = op2, \ 481 .opc3 = op3, \ 482 .opc4 = op4, \ 483 .handler = { \ 484 .inval1 = invl, \ 485 .type = _typ, \ 486 .type2 = _typ2, \ 487 .handler = &gen_##name, \ 488 .oname = stringify(name), \ 489 }, \ 490 .oname = stringify(name), \ 491 } 492 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 493 { \ 494 .opc1 = op1, \ 495 .opc2 = op2, \ 496 .opc3 = op3, \ 497 .opc4 = op4, \ 498 .handler = { \ 499 .inval1 = invl, \ 500 .type = _typ, \ 501 .type2 = _typ2, \ 502 .handler = &gen_##name, \ 503 .oname = onam, \ 504 }, \ 505 .oname = onam, \ 506 } 507 #else 508 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 509 { \ 510 .opc1 = op1, \ 511 .opc2 = op2, \ 512 .opc3 = op3, \ 513 .opc4 = 0xff, \ 514 .handler = { \ 515 .inval1 = invl, \ 516 .type = _typ, \ 517 .type2 = _typ2, \ 518 .handler = &gen_##name, \ 519 }, \ 520 .oname = stringify(name), \ 521 } 522 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 523 { \ 524 .opc1 = op1, \ 525 .opc2 = op2, \ 526 .opc3 = op3, \ 527 .opc4 = 0xff, \ 528 .handler = { \ 529 .inval1 = invl1, \ 530 .inval2 = invl2, \ 531 .type = _typ, \ 532 .type2 = _typ2, \ 533 .handler = &gen_##name, \ 534 }, \ 535 .oname = stringify(name), \ 536 } 537 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 538 { \ 539 .opc1 = op1, \ 540 .opc2 = op2, \ 541 .opc3 = op3, \ 542 .opc4 = 0xff, \ 543 .handler = { \ 544 .inval1 = invl, \ 545 .type = _typ, \ 546 .type2 = _typ2, \ 547 .handler = &gen_##name, \ 548 }, \ 549 .oname = onam, \ 550 } 551 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 552 { \ 553 .opc1 = op1, \ 554 .opc2 = op2, \ 555 .opc3 = op3, \ 556 .opc4 = op4, \ 557 .handler = { \ 558 .inval1 = invl, \ 559 .type = _typ, \ 560 .type2 = _typ2, \ 561 .handler = &gen_##name, \ 562 }, \ 563 .oname = stringify(name), \ 564 } 565 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 566 { \ 567 .opc1 = op1, \ 568 .opc2 = op2, \ 569 .opc3 = op3, \ 570 .opc4 = op4, \ 571 .handler = { \ 572 .inval1 = invl, \ 573 .type = _typ, \ 574 .type2 = _typ2, \ 575 .handler = &gen_##name, \ 576 }, \ 577 .oname = onam, \ 578 } 579 #endif 580 581 /* Invalid instruction */ 582 static void gen_invalid(DisasContext *ctx) 583 { 584 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 585 } 586 587 static opc_handler_t invalid_handler = { 588 .inval1 = 0xFFFFFFFF, 589 .inval2 = 0xFFFFFFFF, 590 .type = PPC_NONE, 591 .type2 = PPC_NONE, 592 .handler = gen_invalid, 593 }; 594 595 /*** Integer comparison ***/ 596 597 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 598 { 599 TCGv t0 = tcg_temp_new(); 600 TCGv t1 = tcg_temp_new(); 601 TCGv_i32 t = tcg_temp_new_i32(); 602 603 tcg_gen_movi_tl(t0, CRF_EQ); 604 tcg_gen_movi_tl(t1, CRF_LT); 605 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), t0, arg0, arg1, t1, t0); 606 tcg_gen_movi_tl(t1, CRF_GT); 607 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), t0, arg0, arg1, t1, t0); 608 609 tcg_gen_trunc_tl_i32(t, t0); 610 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 611 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 612 613 tcg_temp_free(t0); 614 tcg_temp_free(t1); 615 tcg_temp_free_i32(t); 616 } 617 618 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 619 { 620 TCGv t0 = tcg_const_tl(arg1); 621 gen_op_cmp(arg0, t0, s, crf); 622 tcg_temp_free(t0); 623 } 624 625 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 626 { 627 TCGv t0, t1; 628 t0 = tcg_temp_new(); 629 t1 = tcg_temp_new(); 630 if (s) { 631 tcg_gen_ext32s_tl(t0, arg0); 632 tcg_gen_ext32s_tl(t1, arg1); 633 } else { 634 tcg_gen_ext32u_tl(t0, arg0); 635 tcg_gen_ext32u_tl(t1, arg1); 636 } 637 gen_op_cmp(t0, t1, s, crf); 638 tcg_temp_free(t1); 639 tcg_temp_free(t0); 640 } 641 642 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 643 { 644 TCGv t0 = tcg_const_tl(arg1); 645 gen_op_cmp32(arg0, t0, s, crf); 646 tcg_temp_free(t0); 647 } 648 649 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 650 { 651 if (NARROW_MODE(ctx)) { 652 gen_op_cmpi32(reg, 0, 1, 0); 653 } else { 654 gen_op_cmpi(reg, 0, 1, 0); 655 } 656 } 657 658 /* cmp */ 659 static void gen_cmp(DisasContext *ctx) 660 { 661 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 662 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 663 1, crfD(ctx->opcode)); 664 } else { 665 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 666 1, crfD(ctx->opcode)); 667 } 668 } 669 670 /* cmpi */ 671 static void gen_cmpi(DisasContext *ctx) 672 { 673 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 674 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 675 1, crfD(ctx->opcode)); 676 } else { 677 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 678 1, crfD(ctx->opcode)); 679 } 680 } 681 682 /* cmpl */ 683 static void gen_cmpl(DisasContext *ctx) 684 { 685 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 686 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 687 0, crfD(ctx->opcode)); 688 } else { 689 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 690 0, crfD(ctx->opcode)); 691 } 692 } 693 694 /* cmpli */ 695 static void gen_cmpli(DisasContext *ctx) 696 { 697 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 698 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 699 0, crfD(ctx->opcode)); 700 } else { 701 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 702 0, crfD(ctx->opcode)); 703 } 704 } 705 706 /* cmprb - range comparison: isupper, isaplha, islower*/ 707 static void gen_cmprb(DisasContext *ctx) 708 { 709 TCGv_i32 src1 = tcg_temp_new_i32(); 710 TCGv_i32 src2 = tcg_temp_new_i32(); 711 TCGv_i32 src2lo = tcg_temp_new_i32(); 712 TCGv_i32 src2hi = tcg_temp_new_i32(); 713 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 714 715 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 716 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 717 718 tcg_gen_andi_i32(src1, src1, 0xFF); 719 tcg_gen_ext8u_i32(src2lo, src2); 720 tcg_gen_shri_i32(src2, src2, 8); 721 tcg_gen_ext8u_i32(src2hi, src2); 722 723 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 724 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 725 tcg_gen_and_i32(crf, src2lo, src2hi); 726 727 if (ctx->opcode & 0x00200000) { 728 tcg_gen_shri_i32(src2, src2, 8); 729 tcg_gen_ext8u_i32(src2lo, src2); 730 tcg_gen_shri_i32(src2, src2, 8); 731 tcg_gen_ext8u_i32(src2hi, src2); 732 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 733 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 734 tcg_gen_and_i32(src2lo, src2lo, src2hi); 735 tcg_gen_or_i32(crf, crf, src2lo); 736 } 737 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 738 tcg_temp_free_i32(src1); 739 tcg_temp_free_i32(src2); 740 tcg_temp_free_i32(src2lo); 741 tcg_temp_free_i32(src2hi); 742 } 743 744 #if defined(TARGET_PPC64) 745 /* cmpeqb */ 746 static void gen_cmpeqb(DisasContext *ctx) 747 { 748 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 749 cpu_gpr[rB(ctx->opcode)]); 750 } 751 #endif 752 753 /* isel (PowerPC 2.03 specification) */ 754 static void gen_isel(DisasContext *ctx) 755 { 756 uint32_t bi = rC(ctx->opcode); 757 uint32_t mask = 0x08 >> (bi & 0x03); 758 TCGv t0 = tcg_temp_new(); 759 TCGv zr; 760 761 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 762 tcg_gen_andi_tl(t0, t0, mask); 763 764 zr = tcg_const_tl(0); 765 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 766 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 767 cpu_gpr[rB(ctx->opcode)]); 768 tcg_temp_free(zr); 769 tcg_temp_free(t0); 770 } 771 772 /* cmpb: PowerPC 2.05 specification */ 773 static void gen_cmpb(DisasContext *ctx) 774 { 775 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 776 cpu_gpr[rB(ctx->opcode)]); 777 } 778 779 /*** Integer arithmetic ***/ 780 781 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 782 TCGv arg1, TCGv arg2, int sub) 783 { 784 TCGv t0 = tcg_temp_new(); 785 786 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 787 tcg_gen_xor_tl(t0, arg1, arg2); 788 if (sub) { 789 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 790 } else { 791 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 792 } 793 tcg_temp_free(t0); 794 if (NARROW_MODE(ctx)) { 795 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 796 if (is_isa300(ctx)) { 797 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 798 } 799 } else { 800 if (is_isa300(ctx)) { 801 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 802 } 803 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 804 } 805 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 806 } 807 808 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 809 TCGv res, TCGv arg0, TCGv arg1, 810 TCGv ca32, int sub) 811 { 812 TCGv t0; 813 814 if (!is_isa300(ctx)) { 815 return; 816 } 817 818 t0 = tcg_temp_new(); 819 if (sub) { 820 tcg_gen_eqv_tl(t0, arg0, arg1); 821 } else { 822 tcg_gen_xor_tl(t0, arg0, arg1); 823 } 824 tcg_gen_xor_tl(t0, t0, res); 825 tcg_gen_extract_tl(ca32, t0, 32, 1); 826 tcg_temp_free(t0); 827 } 828 829 /* Common add function */ 830 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 831 TCGv arg2, TCGv ca, TCGv ca32, 832 bool add_ca, bool compute_ca, 833 bool compute_ov, bool compute_rc0) 834 { 835 TCGv t0 = ret; 836 837 if (compute_ca || compute_ov) { 838 t0 = tcg_temp_new(); 839 } 840 841 if (compute_ca) { 842 if (NARROW_MODE(ctx)) { 843 /* Caution: a non-obvious corner case of the spec is that we 844 must produce the *entire* 64-bit addition, but produce the 845 carry into bit 32. */ 846 TCGv t1 = tcg_temp_new(); 847 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 848 tcg_gen_add_tl(t0, arg1, arg2); 849 if (add_ca) { 850 tcg_gen_add_tl(t0, t0, ca); 851 } 852 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 853 tcg_temp_free(t1); 854 tcg_gen_extract_tl(ca, ca, 32, 1); 855 if (is_isa300(ctx)) { 856 tcg_gen_mov_tl(ca32, ca); 857 } 858 } else { 859 TCGv zero = tcg_const_tl(0); 860 if (add_ca) { 861 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 862 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 863 } else { 864 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 865 } 866 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 867 tcg_temp_free(zero); 868 } 869 } else { 870 tcg_gen_add_tl(t0, arg1, arg2); 871 if (add_ca) { 872 tcg_gen_add_tl(t0, t0, ca); 873 } 874 } 875 876 if (compute_ov) { 877 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 878 } 879 if (unlikely(compute_rc0)) { 880 gen_set_Rc0(ctx, t0); 881 } 882 883 if (t0 != ret) { 884 tcg_gen_mov_tl(ret, t0); 885 tcg_temp_free(t0); 886 } 887 } 888 /* Add functions with two operands */ 889 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 890 static void glue(gen_, name)(DisasContext *ctx) \ 891 { \ 892 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 893 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 894 ca, glue(ca, 32), \ 895 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 896 } 897 /* Add functions with one operand and one immediate */ 898 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 899 add_ca, compute_ca, compute_ov) \ 900 static void glue(gen_, name)(DisasContext *ctx) \ 901 { \ 902 TCGv t0 = tcg_const_tl(const_val); \ 903 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 904 cpu_gpr[rA(ctx->opcode)], t0, \ 905 ca, glue(ca, 32), \ 906 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 907 tcg_temp_free(t0); \ 908 } 909 910 /* add add. addo addo. */ 911 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 912 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 913 /* addc addc. addco addco. */ 914 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 915 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 916 /* adde adde. addeo addeo. */ 917 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 918 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 919 /* addme addme. addmeo addmeo. */ 920 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 921 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 922 /* addex */ 923 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 924 /* addze addze. addzeo addzeo.*/ 925 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 926 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 927 /* addi */ 928 static void gen_addi(DisasContext *ctx) 929 { 930 target_long simm = SIMM(ctx->opcode); 931 932 if (rA(ctx->opcode) == 0) { 933 /* li case */ 934 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 935 } else { 936 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 937 cpu_gpr[rA(ctx->opcode)], simm); 938 } 939 } 940 /* addic addic.*/ 941 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 942 { 943 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 944 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 945 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 946 tcg_temp_free(c); 947 } 948 949 static void gen_addic(DisasContext *ctx) 950 { 951 gen_op_addic(ctx, 0); 952 } 953 954 static void gen_addic_(DisasContext *ctx) 955 { 956 gen_op_addic(ctx, 1); 957 } 958 959 /* addis */ 960 static void gen_addis(DisasContext *ctx) 961 { 962 target_long simm = SIMM(ctx->opcode); 963 964 if (rA(ctx->opcode) == 0) { 965 /* lis case */ 966 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 967 } else { 968 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 969 cpu_gpr[rA(ctx->opcode)], simm << 16); 970 } 971 } 972 973 /* addpcis */ 974 static void gen_addpcis(DisasContext *ctx) 975 { 976 target_long d = DX(ctx->opcode); 977 978 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16)); 979 } 980 981 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 982 TCGv arg2, int sign, int compute_ov) 983 { 984 TCGv_i32 t0 = tcg_temp_new_i32(); 985 TCGv_i32 t1 = tcg_temp_new_i32(); 986 TCGv_i32 t2 = tcg_temp_new_i32(); 987 TCGv_i32 t3 = tcg_temp_new_i32(); 988 989 tcg_gen_trunc_tl_i32(t0, arg1); 990 tcg_gen_trunc_tl_i32(t1, arg2); 991 if (sign) { 992 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 993 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 994 tcg_gen_and_i32(t2, t2, t3); 995 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 996 tcg_gen_or_i32(t2, t2, t3); 997 tcg_gen_movi_i32(t3, 0); 998 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 999 tcg_gen_div_i32(t3, t0, t1); 1000 tcg_gen_extu_i32_tl(ret, t3); 1001 } else { 1002 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1003 tcg_gen_movi_i32(t3, 0); 1004 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1005 tcg_gen_divu_i32(t3, t0, t1); 1006 tcg_gen_extu_i32_tl(ret, t3); 1007 } 1008 if (compute_ov) { 1009 tcg_gen_extu_i32_tl(cpu_ov, t2); 1010 if (is_isa300(ctx)) { 1011 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1012 } 1013 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1014 } 1015 tcg_temp_free_i32(t0); 1016 tcg_temp_free_i32(t1); 1017 tcg_temp_free_i32(t2); 1018 tcg_temp_free_i32(t3); 1019 1020 if (unlikely(Rc(ctx->opcode) != 0)) 1021 gen_set_Rc0(ctx, ret); 1022 } 1023 /* Div functions */ 1024 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1025 static void glue(gen_, name)(DisasContext *ctx) \ 1026 { \ 1027 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1028 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1029 sign, compute_ov); \ 1030 } 1031 /* divwu divwu. divwuo divwuo. */ 1032 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1033 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1034 /* divw divw. divwo divwo. */ 1035 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1036 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1037 1038 /* div[wd]eu[o][.] */ 1039 #define GEN_DIVE(name, hlpr, compute_ov) \ 1040 static void gen_##name(DisasContext *ctx) \ 1041 { \ 1042 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1043 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1044 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1045 tcg_temp_free_i32(t0); \ 1046 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1047 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1048 } \ 1049 } 1050 1051 GEN_DIVE(divweu, divweu, 0); 1052 GEN_DIVE(divweuo, divweu, 1); 1053 GEN_DIVE(divwe, divwe, 0); 1054 GEN_DIVE(divweo, divwe, 1); 1055 1056 #if defined(TARGET_PPC64) 1057 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1058 TCGv arg2, int sign, int compute_ov) 1059 { 1060 TCGv_i64 t0 = tcg_temp_new_i64(); 1061 TCGv_i64 t1 = tcg_temp_new_i64(); 1062 TCGv_i64 t2 = tcg_temp_new_i64(); 1063 TCGv_i64 t3 = tcg_temp_new_i64(); 1064 1065 tcg_gen_mov_i64(t0, arg1); 1066 tcg_gen_mov_i64(t1, arg2); 1067 if (sign) { 1068 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1069 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1070 tcg_gen_and_i64(t2, t2, t3); 1071 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1072 tcg_gen_or_i64(t2, t2, t3); 1073 tcg_gen_movi_i64(t3, 0); 1074 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1075 tcg_gen_div_i64(ret, t0, t1); 1076 } else { 1077 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1078 tcg_gen_movi_i64(t3, 0); 1079 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1080 tcg_gen_divu_i64(ret, t0, t1); 1081 } 1082 if (compute_ov) { 1083 tcg_gen_mov_tl(cpu_ov, t2); 1084 if (is_isa300(ctx)) { 1085 tcg_gen_mov_tl(cpu_ov32, t2); 1086 } 1087 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1088 } 1089 tcg_temp_free_i64(t0); 1090 tcg_temp_free_i64(t1); 1091 tcg_temp_free_i64(t2); 1092 tcg_temp_free_i64(t3); 1093 1094 if (unlikely(Rc(ctx->opcode) != 0)) 1095 gen_set_Rc0(ctx, ret); 1096 } 1097 1098 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1099 static void glue(gen_, name)(DisasContext *ctx) \ 1100 { \ 1101 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1102 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1103 sign, compute_ov); \ 1104 } 1105 /* divdu divdu. divduo divduo. */ 1106 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1107 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1108 /* divd divd. divdo divdo. */ 1109 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1110 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1111 1112 GEN_DIVE(divdeu, divdeu, 0); 1113 GEN_DIVE(divdeuo, divdeu, 1); 1114 GEN_DIVE(divde, divde, 0); 1115 GEN_DIVE(divdeo, divde, 1); 1116 #endif 1117 1118 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1119 TCGv arg2, int sign) 1120 { 1121 TCGv_i32 t0 = tcg_temp_new_i32(); 1122 TCGv_i32 t1 = tcg_temp_new_i32(); 1123 1124 tcg_gen_trunc_tl_i32(t0, arg1); 1125 tcg_gen_trunc_tl_i32(t1, arg2); 1126 if (sign) { 1127 TCGv_i32 t2 = tcg_temp_new_i32(); 1128 TCGv_i32 t3 = tcg_temp_new_i32(); 1129 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1130 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1131 tcg_gen_and_i32(t2, t2, t3); 1132 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1133 tcg_gen_or_i32(t2, t2, t3); 1134 tcg_gen_movi_i32(t3, 0); 1135 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1136 tcg_gen_rem_i32(t3, t0, t1); 1137 tcg_gen_ext_i32_tl(ret, t3); 1138 tcg_temp_free_i32(t2); 1139 tcg_temp_free_i32(t3); 1140 } else { 1141 TCGv_i32 t2 = tcg_const_i32(1); 1142 TCGv_i32 t3 = tcg_const_i32(0); 1143 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1144 tcg_gen_remu_i32(t3, t0, t1); 1145 tcg_gen_extu_i32_tl(ret, t3); 1146 tcg_temp_free_i32(t2); 1147 tcg_temp_free_i32(t3); 1148 } 1149 tcg_temp_free_i32(t0); 1150 tcg_temp_free_i32(t1); 1151 } 1152 1153 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1154 static void glue(gen_, name)(DisasContext *ctx) \ 1155 { \ 1156 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1157 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1158 sign); \ 1159 } 1160 1161 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1162 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1163 1164 #if defined(TARGET_PPC64) 1165 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1166 TCGv arg2, int sign) 1167 { 1168 TCGv_i64 t0 = tcg_temp_new_i64(); 1169 TCGv_i64 t1 = tcg_temp_new_i64(); 1170 1171 tcg_gen_mov_i64(t0, arg1); 1172 tcg_gen_mov_i64(t1, arg2); 1173 if (sign) { 1174 TCGv_i64 t2 = tcg_temp_new_i64(); 1175 TCGv_i64 t3 = tcg_temp_new_i64(); 1176 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1177 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1178 tcg_gen_and_i64(t2, t2, t3); 1179 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1180 tcg_gen_or_i64(t2, t2, t3); 1181 tcg_gen_movi_i64(t3, 0); 1182 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1183 tcg_gen_rem_i64(ret, t0, t1); 1184 tcg_temp_free_i64(t2); 1185 tcg_temp_free_i64(t3); 1186 } else { 1187 TCGv_i64 t2 = tcg_const_i64(1); 1188 TCGv_i64 t3 = tcg_const_i64(0); 1189 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1190 tcg_gen_remu_i64(ret, t0, t1); 1191 tcg_temp_free_i64(t2); 1192 tcg_temp_free_i64(t3); 1193 } 1194 tcg_temp_free_i64(t0); 1195 tcg_temp_free_i64(t1); 1196 } 1197 1198 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1199 static void glue(gen_, name)(DisasContext *ctx) \ 1200 { \ 1201 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1202 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1203 sign); \ 1204 } 1205 1206 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1207 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1208 #endif 1209 1210 /* mulhw mulhw. */ 1211 static void gen_mulhw(DisasContext *ctx) 1212 { 1213 TCGv_i32 t0 = tcg_temp_new_i32(); 1214 TCGv_i32 t1 = tcg_temp_new_i32(); 1215 1216 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1217 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1218 tcg_gen_muls2_i32(t0, t1, t0, t1); 1219 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1220 tcg_temp_free_i32(t0); 1221 tcg_temp_free_i32(t1); 1222 if (unlikely(Rc(ctx->opcode) != 0)) 1223 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1224 } 1225 1226 /* mulhwu mulhwu. */ 1227 static void gen_mulhwu(DisasContext *ctx) 1228 { 1229 TCGv_i32 t0 = tcg_temp_new_i32(); 1230 TCGv_i32 t1 = tcg_temp_new_i32(); 1231 1232 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1233 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1234 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1235 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1236 tcg_temp_free_i32(t0); 1237 tcg_temp_free_i32(t1); 1238 if (unlikely(Rc(ctx->opcode) != 0)) 1239 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1240 } 1241 1242 /* mullw mullw. */ 1243 static void gen_mullw(DisasContext *ctx) 1244 { 1245 #if defined(TARGET_PPC64) 1246 TCGv_i64 t0, t1; 1247 t0 = tcg_temp_new_i64(); 1248 t1 = tcg_temp_new_i64(); 1249 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1250 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1251 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1252 tcg_temp_free(t0); 1253 tcg_temp_free(t1); 1254 #else 1255 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1256 cpu_gpr[rB(ctx->opcode)]); 1257 #endif 1258 if (unlikely(Rc(ctx->opcode) != 0)) 1259 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1260 } 1261 1262 /* mullwo mullwo. */ 1263 static void gen_mullwo(DisasContext *ctx) 1264 { 1265 TCGv_i32 t0 = tcg_temp_new_i32(); 1266 TCGv_i32 t1 = tcg_temp_new_i32(); 1267 1268 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1269 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1270 tcg_gen_muls2_i32(t0, t1, t0, t1); 1271 #if defined(TARGET_PPC64) 1272 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1273 #else 1274 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1275 #endif 1276 1277 tcg_gen_sari_i32(t0, t0, 31); 1278 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1279 tcg_gen_extu_i32_tl(cpu_ov, t0); 1280 if (is_isa300(ctx)) { 1281 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1282 } 1283 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1284 1285 tcg_temp_free_i32(t0); 1286 tcg_temp_free_i32(t1); 1287 if (unlikely(Rc(ctx->opcode) != 0)) 1288 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1289 } 1290 1291 /* mulli */ 1292 static void gen_mulli(DisasContext *ctx) 1293 { 1294 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1295 SIMM(ctx->opcode)); 1296 } 1297 1298 #if defined(TARGET_PPC64) 1299 /* mulhd mulhd. */ 1300 static void gen_mulhd(DisasContext *ctx) 1301 { 1302 TCGv lo = tcg_temp_new(); 1303 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1304 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1305 tcg_temp_free(lo); 1306 if (unlikely(Rc(ctx->opcode) != 0)) { 1307 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1308 } 1309 } 1310 1311 /* mulhdu mulhdu. */ 1312 static void gen_mulhdu(DisasContext *ctx) 1313 { 1314 TCGv lo = tcg_temp_new(); 1315 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1316 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1317 tcg_temp_free(lo); 1318 if (unlikely(Rc(ctx->opcode) != 0)) { 1319 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1320 } 1321 } 1322 1323 /* mulld mulld. */ 1324 static void gen_mulld(DisasContext *ctx) 1325 { 1326 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1327 cpu_gpr[rB(ctx->opcode)]); 1328 if (unlikely(Rc(ctx->opcode) != 0)) 1329 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1330 } 1331 1332 /* mulldo mulldo. */ 1333 static void gen_mulldo(DisasContext *ctx) 1334 { 1335 TCGv_i64 t0 = tcg_temp_new_i64(); 1336 TCGv_i64 t1 = tcg_temp_new_i64(); 1337 1338 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1339 cpu_gpr[rB(ctx->opcode)]); 1340 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1341 1342 tcg_gen_sari_i64(t0, t0, 63); 1343 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1344 if (is_isa300(ctx)) { 1345 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1346 } 1347 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1348 1349 tcg_temp_free_i64(t0); 1350 tcg_temp_free_i64(t1); 1351 1352 if (unlikely(Rc(ctx->opcode) != 0)) { 1353 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1354 } 1355 } 1356 #endif 1357 1358 /* Common subf function */ 1359 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1360 TCGv arg2, bool add_ca, bool compute_ca, 1361 bool compute_ov, bool compute_rc0) 1362 { 1363 TCGv t0 = ret; 1364 1365 if (compute_ca || compute_ov) { 1366 t0 = tcg_temp_new(); 1367 } 1368 1369 if (compute_ca) { 1370 /* dest = ~arg1 + arg2 [+ ca]. */ 1371 if (NARROW_MODE(ctx)) { 1372 /* Caution: a non-obvious corner case of the spec is that we 1373 must produce the *entire* 64-bit addition, but produce the 1374 carry into bit 32. */ 1375 TCGv inv1 = tcg_temp_new(); 1376 TCGv t1 = tcg_temp_new(); 1377 tcg_gen_not_tl(inv1, arg1); 1378 if (add_ca) { 1379 tcg_gen_add_tl(t0, arg2, cpu_ca); 1380 } else { 1381 tcg_gen_addi_tl(t0, arg2, 1); 1382 } 1383 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1384 tcg_gen_add_tl(t0, t0, inv1); 1385 tcg_temp_free(inv1); 1386 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1387 tcg_temp_free(t1); 1388 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 1389 if (is_isa300(ctx)) { 1390 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 1391 } 1392 } else if (add_ca) { 1393 TCGv zero, inv1 = tcg_temp_new(); 1394 tcg_gen_not_tl(inv1, arg1); 1395 zero = tcg_const_tl(0); 1396 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1397 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1398 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 1399 tcg_temp_free(zero); 1400 tcg_temp_free(inv1); 1401 } else { 1402 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1403 tcg_gen_sub_tl(t0, arg2, arg1); 1404 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 1405 } 1406 } else if (add_ca) { 1407 /* Since we're ignoring carry-out, we can simplify the 1408 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1409 tcg_gen_sub_tl(t0, arg2, arg1); 1410 tcg_gen_add_tl(t0, t0, cpu_ca); 1411 tcg_gen_subi_tl(t0, t0, 1); 1412 } else { 1413 tcg_gen_sub_tl(t0, arg2, arg1); 1414 } 1415 1416 if (compute_ov) { 1417 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1418 } 1419 if (unlikely(compute_rc0)) { 1420 gen_set_Rc0(ctx, t0); 1421 } 1422 1423 if (t0 != ret) { 1424 tcg_gen_mov_tl(ret, t0); 1425 tcg_temp_free(t0); 1426 } 1427 } 1428 /* Sub functions with Two operands functions */ 1429 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1430 static void glue(gen_, name)(DisasContext *ctx) \ 1431 { \ 1432 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1433 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1434 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1435 } 1436 /* Sub functions with one operand and one immediate */ 1437 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1438 add_ca, compute_ca, compute_ov) \ 1439 static void glue(gen_, name)(DisasContext *ctx) \ 1440 { \ 1441 TCGv t0 = tcg_const_tl(const_val); \ 1442 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1443 cpu_gpr[rA(ctx->opcode)], t0, \ 1444 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1445 tcg_temp_free(t0); \ 1446 } 1447 /* subf subf. subfo subfo. */ 1448 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1449 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1450 /* subfc subfc. subfco subfco. */ 1451 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1452 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1453 /* subfe subfe. subfeo subfo. */ 1454 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1455 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1456 /* subfme subfme. subfmeo subfmeo. */ 1457 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1458 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1459 /* subfze subfze. subfzeo subfzeo.*/ 1460 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1461 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1462 1463 /* subfic */ 1464 static void gen_subfic(DisasContext *ctx) 1465 { 1466 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1467 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1468 c, 0, 1, 0, 0); 1469 tcg_temp_free(c); 1470 } 1471 1472 /* neg neg. nego nego. */ 1473 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1474 { 1475 TCGv zero = tcg_const_tl(0); 1476 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1477 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1478 tcg_temp_free(zero); 1479 } 1480 1481 static void gen_neg(DisasContext *ctx) 1482 { 1483 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 1484 if (unlikely(Rc(ctx->opcode))) { 1485 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1486 } 1487 } 1488 1489 static void gen_nego(DisasContext *ctx) 1490 { 1491 gen_op_arith_neg(ctx, 1); 1492 } 1493 1494 /*** Integer logical ***/ 1495 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1496 static void glue(gen_, name)(DisasContext *ctx) \ 1497 { \ 1498 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1499 cpu_gpr[rB(ctx->opcode)]); \ 1500 if (unlikely(Rc(ctx->opcode) != 0)) \ 1501 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1502 } 1503 1504 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1505 static void glue(gen_, name)(DisasContext *ctx) \ 1506 { \ 1507 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1508 if (unlikely(Rc(ctx->opcode) != 0)) \ 1509 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1510 } 1511 1512 /* and & and. */ 1513 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1514 /* andc & andc. */ 1515 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1516 1517 /* andi. */ 1518 static void gen_andi_(DisasContext *ctx) 1519 { 1520 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1521 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1522 } 1523 1524 /* andis. */ 1525 static void gen_andis_(DisasContext *ctx) 1526 { 1527 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1528 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1529 } 1530 1531 /* cntlzw */ 1532 static void gen_cntlzw(DisasContext *ctx) 1533 { 1534 TCGv_i32 t = tcg_temp_new_i32(); 1535 1536 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1537 tcg_gen_clzi_i32(t, t, 32); 1538 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1539 tcg_temp_free_i32(t); 1540 1541 if (unlikely(Rc(ctx->opcode) != 0)) 1542 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1543 } 1544 1545 /* cnttzw */ 1546 static void gen_cnttzw(DisasContext *ctx) 1547 { 1548 TCGv_i32 t = tcg_temp_new_i32(); 1549 1550 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1551 tcg_gen_ctzi_i32(t, t, 32); 1552 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1553 tcg_temp_free_i32(t); 1554 1555 if (unlikely(Rc(ctx->opcode) != 0)) { 1556 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1557 } 1558 } 1559 1560 /* eqv & eqv. */ 1561 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1562 /* extsb & extsb. */ 1563 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1564 /* extsh & extsh. */ 1565 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1566 /* nand & nand. */ 1567 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1568 /* nor & nor. */ 1569 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1570 1571 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1572 static void gen_pause(DisasContext *ctx) 1573 { 1574 TCGv_i32 t0 = tcg_const_i32(0); 1575 tcg_gen_st_i32(t0, cpu_env, 1576 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1577 tcg_temp_free_i32(t0); 1578 1579 /* Stop translation, this gives other CPUs a chance to run */ 1580 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 1581 } 1582 #endif /* defined(TARGET_PPC64) */ 1583 1584 /* or & or. */ 1585 static void gen_or(DisasContext *ctx) 1586 { 1587 int rs, ra, rb; 1588 1589 rs = rS(ctx->opcode); 1590 ra = rA(ctx->opcode); 1591 rb = rB(ctx->opcode); 1592 /* Optimisation for mr. ri case */ 1593 if (rs != ra || rs != rb) { 1594 if (rs != rb) 1595 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1596 else 1597 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1598 if (unlikely(Rc(ctx->opcode) != 0)) 1599 gen_set_Rc0(ctx, cpu_gpr[ra]); 1600 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1601 gen_set_Rc0(ctx, cpu_gpr[rs]); 1602 #if defined(TARGET_PPC64) 1603 } else if (rs != 0) { /* 0 is nop */ 1604 int prio = 0; 1605 1606 switch (rs) { 1607 case 1: 1608 /* Set process priority to low */ 1609 prio = 2; 1610 break; 1611 case 6: 1612 /* Set process priority to medium-low */ 1613 prio = 3; 1614 break; 1615 case 2: 1616 /* Set process priority to normal */ 1617 prio = 4; 1618 break; 1619 #if !defined(CONFIG_USER_ONLY) 1620 case 31: 1621 if (!ctx->pr) { 1622 /* Set process priority to very low */ 1623 prio = 1; 1624 } 1625 break; 1626 case 5: 1627 if (!ctx->pr) { 1628 /* Set process priority to medium-hight */ 1629 prio = 5; 1630 } 1631 break; 1632 case 3: 1633 if (!ctx->pr) { 1634 /* Set process priority to high */ 1635 prio = 6; 1636 } 1637 break; 1638 case 7: 1639 if (ctx->hv && !ctx->pr) { 1640 /* Set process priority to very high */ 1641 prio = 7; 1642 } 1643 break; 1644 #endif 1645 default: 1646 break; 1647 } 1648 if (prio) { 1649 TCGv t0 = tcg_temp_new(); 1650 gen_load_spr(t0, SPR_PPR); 1651 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1652 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1653 gen_store_spr(SPR_PPR, t0); 1654 tcg_temp_free(t0); 1655 } 1656 #if !defined(CONFIG_USER_ONLY) 1657 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1658 * CPU and the kernel hangs. This applies to all encodings other 1659 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1660 * and all currently undefined. 1661 */ 1662 gen_pause(ctx); 1663 #endif 1664 #endif 1665 } 1666 } 1667 /* orc & orc. */ 1668 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1669 1670 /* xor & xor. */ 1671 static void gen_xor(DisasContext *ctx) 1672 { 1673 /* Optimisation for "set to zero" case */ 1674 if (rS(ctx->opcode) != rB(ctx->opcode)) 1675 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1676 else 1677 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1678 if (unlikely(Rc(ctx->opcode) != 0)) 1679 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1680 } 1681 1682 /* ori */ 1683 static void gen_ori(DisasContext *ctx) 1684 { 1685 target_ulong uimm = UIMM(ctx->opcode); 1686 1687 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1688 return; 1689 } 1690 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1691 } 1692 1693 /* oris */ 1694 static void gen_oris(DisasContext *ctx) 1695 { 1696 target_ulong uimm = UIMM(ctx->opcode); 1697 1698 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1699 /* NOP */ 1700 return; 1701 } 1702 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1703 } 1704 1705 /* xori */ 1706 static void gen_xori(DisasContext *ctx) 1707 { 1708 target_ulong uimm = UIMM(ctx->opcode); 1709 1710 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1711 /* NOP */ 1712 return; 1713 } 1714 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1715 } 1716 1717 /* xoris */ 1718 static void gen_xoris(DisasContext *ctx) 1719 { 1720 target_ulong uimm = UIMM(ctx->opcode); 1721 1722 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1723 /* NOP */ 1724 return; 1725 } 1726 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1727 } 1728 1729 /* popcntb : PowerPC 2.03 specification */ 1730 static void gen_popcntb(DisasContext *ctx) 1731 { 1732 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1733 } 1734 1735 static void gen_popcntw(DisasContext *ctx) 1736 { 1737 #if defined(TARGET_PPC64) 1738 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1739 #else 1740 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1741 #endif 1742 } 1743 1744 #if defined(TARGET_PPC64) 1745 /* popcntd: PowerPC 2.06 specification */ 1746 static void gen_popcntd(DisasContext *ctx) 1747 { 1748 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1749 } 1750 #endif 1751 1752 /* prtyw: PowerPC 2.05 specification */ 1753 static void gen_prtyw(DisasContext *ctx) 1754 { 1755 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1756 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1757 TCGv t0 = tcg_temp_new(); 1758 tcg_gen_shri_tl(t0, rs, 16); 1759 tcg_gen_xor_tl(ra, rs, t0); 1760 tcg_gen_shri_tl(t0, ra, 8); 1761 tcg_gen_xor_tl(ra, ra, t0); 1762 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1763 tcg_temp_free(t0); 1764 } 1765 1766 #if defined(TARGET_PPC64) 1767 /* prtyd: PowerPC 2.05 specification */ 1768 static void gen_prtyd(DisasContext *ctx) 1769 { 1770 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1771 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1772 TCGv t0 = tcg_temp_new(); 1773 tcg_gen_shri_tl(t0, rs, 32); 1774 tcg_gen_xor_tl(ra, rs, t0); 1775 tcg_gen_shri_tl(t0, ra, 16); 1776 tcg_gen_xor_tl(ra, ra, t0); 1777 tcg_gen_shri_tl(t0, ra, 8); 1778 tcg_gen_xor_tl(ra, ra, t0); 1779 tcg_gen_andi_tl(ra, ra, 1); 1780 tcg_temp_free(t0); 1781 } 1782 #endif 1783 1784 #if defined(TARGET_PPC64) 1785 /* bpermd */ 1786 static void gen_bpermd(DisasContext *ctx) 1787 { 1788 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1789 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1790 } 1791 #endif 1792 1793 #if defined(TARGET_PPC64) 1794 /* extsw & extsw. */ 1795 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1796 1797 /* cntlzd */ 1798 static void gen_cntlzd(DisasContext *ctx) 1799 { 1800 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1801 if (unlikely(Rc(ctx->opcode) != 0)) 1802 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1803 } 1804 1805 /* cnttzd */ 1806 static void gen_cnttzd(DisasContext *ctx) 1807 { 1808 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1809 if (unlikely(Rc(ctx->opcode) != 0)) { 1810 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1811 } 1812 } 1813 1814 /* darn */ 1815 static void gen_darn(DisasContext *ctx) 1816 { 1817 int l = L(ctx->opcode); 1818 1819 if (l == 0) { 1820 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1821 } else if (l <= 2) { 1822 /* Return 64-bit random for both CRN and RRN */ 1823 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1824 } else { 1825 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1826 } 1827 } 1828 #endif 1829 1830 /*** Integer rotate ***/ 1831 1832 /* rlwimi & rlwimi. */ 1833 static void gen_rlwimi(DisasContext *ctx) 1834 { 1835 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1836 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1837 uint32_t sh = SH(ctx->opcode); 1838 uint32_t mb = MB(ctx->opcode); 1839 uint32_t me = ME(ctx->opcode); 1840 1841 if (sh == (31-me) && mb <= me) { 1842 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1843 } else { 1844 target_ulong mask; 1845 TCGv t1; 1846 1847 #if defined(TARGET_PPC64) 1848 mb += 32; 1849 me += 32; 1850 #endif 1851 mask = MASK(mb, me); 1852 1853 t1 = tcg_temp_new(); 1854 if (mask <= 0xffffffffu) { 1855 TCGv_i32 t0 = tcg_temp_new_i32(); 1856 tcg_gen_trunc_tl_i32(t0, t_rs); 1857 tcg_gen_rotli_i32(t0, t0, sh); 1858 tcg_gen_extu_i32_tl(t1, t0); 1859 tcg_temp_free_i32(t0); 1860 } else { 1861 #if defined(TARGET_PPC64) 1862 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1863 tcg_gen_rotli_i64(t1, t1, sh); 1864 #else 1865 g_assert_not_reached(); 1866 #endif 1867 } 1868 1869 tcg_gen_andi_tl(t1, t1, mask); 1870 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1871 tcg_gen_or_tl(t_ra, t_ra, t1); 1872 tcg_temp_free(t1); 1873 } 1874 if (unlikely(Rc(ctx->opcode) != 0)) { 1875 gen_set_Rc0(ctx, t_ra); 1876 } 1877 } 1878 1879 /* rlwinm & rlwinm. */ 1880 static void gen_rlwinm(DisasContext *ctx) 1881 { 1882 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1883 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1884 int sh = SH(ctx->opcode); 1885 int mb = MB(ctx->opcode); 1886 int me = ME(ctx->opcode); 1887 int len = me - mb + 1; 1888 int rsh = (32 - sh) & 31; 1889 1890 if (sh != 0 && len > 0 && me == (31 - sh)) { 1891 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 1892 } else if (me == 31 && rsh + len <= 32) { 1893 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 1894 } else { 1895 target_ulong mask; 1896 #if defined(TARGET_PPC64) 1897 mb += 32; 1898 me += 32; 1899 #endif 1900 mask = MASK(mb, me); 1901 if (sh == 0) { 1902 tcg_gen_andi_tl(t_ra, t_rs, mask); 1903 } else if (mask <= 0xffffffffu) { 1904 TCGv_i32 t0 = tcg_temp_new_i32(); 1905 tcg_gen_trunc_tl_i32(t0, t_rs); 1906 tcg_gen_rotli_i32(t0, t0, sh); 1907 tcg_gen_andi_i32(t0, t0, mask); 1908 tcg_gen_extu_i32_tl(t_ra, t0); 1909 tcg_temp_free_i32(t0); 1910 } else { 1911 #if defined(TARGET_PPC64) 1912 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1913 tcg_gen_rotli_i64(t_ra, t_ra, sh); 1914 tcg_gen_andi_i64(t_ra, t_ra, mask); 1915 #else 1916 g_assert_not_reached(); 1917 #endif 1918 } 1919 } 1920 if (unlikely(Rc(ctx->opcode) != 0)) { 1921 gen_set_Rc0(ctx, t_ra); 1922 } 1923 } 1924 1925 /* rlwnm & rlwnm. */ 1926 static void gen_rlwnm(DisasContext *ctx) 1927 { 1928 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1929 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1930 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 1931 uint32_t mb = MB(ctx->opcode); 1932 uint32_t me = ME(ctx->opcode); 1933 target_ulong mask; 1934 1935 #if defined(TARGET_PPC64) 1936 mb += 32; 1937 me += 32; 1938 #endif 1939 mask = MASK(mb, me); 1940 1941 if (mask <= 0xffffffffu) { 1942 TCGv_i32 t0 = tcg_temp_new_i32(); 1943 TCGv_i32 t1 = tcg_temp_new_i32(); 1944 tcg_gen_trunc_tl_i32(t0, t_rb); 1945 tcg_gen_trunc_tl_i32(t1, t_rs); 1946 tcg_gen_andi_i32(t0, t0, 0x1f); 1947 tcg_gen_rotl_i32(t1, t1, t0); 1948 tcg_gen_extu_i32_tl(t_ra, t1); 1949 tcg_temp_free_i32(t0); 1950 tcg_temp_free_i32(t1); 1951 } else { 1952 #if defined(TARGET_PPC64) 1953 TCGv_i64 t0 = tcg_temp_new_i64(); 1954 tcg_gen_andi_i64(t0, t_rb, 0x1f); 1955 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1956 tcg_gen_rotl_i64(t_ra, t_ra, t0); 1957 tcg_temp_free_i64(t0); 1958 #else 1959 g_assert_not_reached(); 1960 #endif 1961 } 1962 1963 tcg_gen_andi_tl(t_ra, t_ra, mask); 1964 1965 if (unlikely(Rc(ctx->opcode) != 0)) { 1966 gen_set_Rc0(ctx, t_ra); 1967 } 1968 } 1969 1970 #if defined(TARGET_PPC64) 1971 #define GEN_PPC64_R2(name, opc1, opc2) \ 1972 static void glue(gen_, name##0)(DisasContext *ctx) \ 1973 { \ 1974 gen_##name(ctx, 0); \ 1975 } \ 1976 \ 1977 static void glue(gen_, name##1)(DisasContext *ctx) \ 1978 { \ 1979 gen_##name(ctx, 1); \ 1980 } 1981 #define GEN_PPC64_R4(name, opc1, opc2) \ 1982 static void glue(gen_, name##0)(DisasContext *ctx) \ 1983 { \ 1984 gen_##name(ctx, 0, 0); \ 1985 } \ 1986 \ 1987 static void glue(gen_, name##1)(DisasContext *ctx) \ 1988 { \ 1989 gen_##name(ctx, 0, 1); \ 1990 } \ 1991 \ 1992 static void glue(gen_, name##2)(DisasContext *ctx) \ 1993 { \ 1994 gen_##name(ctx, 1, 0); \ 1995 } \ 1996 \ 1997 static void glue(gen_, name##3)(DisasContext *ctx) \ 1998 { \ 1999 gen_##name(ctx, 1, 1); \ 2000 } 2001 2002 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2003 { 2004 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2005 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2006 int len = me - mb + 1; 2007 int rsh = (64 - sh) & 63; 2008 2009 if (sh != 0 && len > 0 && me == (63 - sh)) { 2010 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2011 } else if (me == 63 && rsh + len <= 64) { 2012 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2013 } else { 2014 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2015 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2016 } 2017 if (unlikely(Rc(ctx->opcode) != 0)) { 2018 gen_set_Rc0(ctx, t_ra); 2019 } 2020 } 2021 2022 /* rldicl - rldicl. */ 2023 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2024 { 2025 uint32_t sh, mb; 2026 2027 sh = SH(ctx->opcode) | (shn << 5); 2028 mb = MB(ctx->opcode) | (mbn << 5); 2029 gen_rldinm(ctx, mb, 63, sh); 2030 } 2031 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2032 2033 /* rldicr - rldicr. */ 2034 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2035 { 2036 uint32_t sh, me; 2037 2038 sh = SH(ctx->opcode) | (shn << 5); 2039 me = MB(ctx->opcode) | (men << 5); 2040 gen_rldinm(ctx, 0, me, sh); 2041 } 2042 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2043 2044 /* rldic - rldic. */ 2045 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2046 { 2047 uint32_t sh, mb; 2048 2049 sh = SH(ctx->opcode) | (shn << 5); 2050 mb = MB(ctx->opcode) | (mbn << 5); 2051 gen_rldinm(ctx, mb, 63 - sh, sh); 2052 } 2053 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2054 2055 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2056 { 2057 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2058 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2059 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2060 TCGv t0; 2061 2062 t0 = tcg_temp_new(); 2063 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2064 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2065 tcg_temp_free(t0); 2066 2067 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2068 if (unlikely(Rc(ctx->opcode) != 0)) { 2069 gen_set_Rc0(ctx, t_ra); 2070 } 2071 } 2072 2073 /* rldcl - rldcl. */ 2074 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2075 { 2076 uint32_t mb; 2077 2078 mb = MB(ctx->opcode) | (mbn << 5); 2079 gen_rldnm(ctx, mb, 63); 2080 } 2081 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2082 2083 /* rldcr - rldcr. */ 2084 static inline void gen_rldcr(DisasContext *ctx, int men) 2085 { 2086 uint32_t me; 2087 2088 me = MB(ctx->opcode) | (men << 5); 2089 gen_rldnm(ctx, 0, me); 2090 } 2091 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2092 2093 /* rldimi - rldimi. */ 2094 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2095 { 2096 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2097 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2098 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2099 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2100 uint32_t me = 63 - sh; 2101 2102 if (mb <= me) { 2103 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2104 } else { 2105 target_ulong mask = MASK(mb, me); 2106 TCGv t1 = tcg_temp_new(); 2107 2108 tcg_gen_rotli_tl(t1, t_rs, sh); 2109 tcg_gen_andi_tl(t1, t1, mask); 2110 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2111 tcg_gen_or_tl(t_ra, t_ra, t1); 2112 tcg_temp_free(t1); 2113 } 2114 if (unlikely(Rc(ctx->opcode) != 0)) { 2115 gen_set_Rc0(ctx, t_ra); 2116 } 2117 } 2118 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2119 #endif 2120 2121 /*** Integer shift ***/ 2122 2123 /* slw & slw. */ 2124 static void gen_slw(DisasContext *ctx) 2125 { 2126 TCGv t0, t1; 2127 2128 t0 = tcg_temp_new(); 2129 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2130 #if defined(TARGET_PPC64) 2131 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2132 tcg_gen_sari_tl(t0, t0, 0x3f); 2133 #else 2134 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2135 tcg_gen_sari_tl(t0, t0, 0x1f); 2136 #endif 2137 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2138 t1 = tcg_temp_new(); 2139 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2140 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2141 tcg_temp_free(t1); 2142 tcg_temp_free(t0); 2143 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2144 if (unlikely(Rc(ctx->opcode) != 0)) 2145 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2146 } 2147 2148 /* sraw & sraw. */ 2149 static void gen_sraw(DisasContext *ctx) 2150 { 2151 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2152 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2153 if (unlikely(Rc(ctx->opcode) != 0)) 2154 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2155 } 2156 2157 /* srawi & srawi. */ 2158 static void gen_srawi(DisasContext *ctx) 2159 { 2160 int sh = SH(ctx->opcode); 2161 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2162 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2163 if (sh == 0) { 2164 tcg_gen_ext32s_tl(dst, src); 2165 tcg_gen_movi_tl(cpu_ca, 0); 2166 if (is_isa300(ctx)) { 2167 tcg_gen_movi_tl(cpu_ca32, 0); 2168 } 2169 } else { 2170 TCGv t0; 2171 tcg_gen_ext32s_tl(dst, src); 2172 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2173 t0 = tcg_temp_new(); 2174 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2175 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2176 tcg_temp_free(t0); 2177 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2178 if (is_isa300(ctx)) { 2179 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2180 } 2181 tcg_gen_sari_tl(dst, dst, sh); 2182 } 2183 if (unlikely(Rc(ctx->opcode) != 0)) { 2184 gen_set_Rc0(ctx, dst); 2185 } 2186 } 2187 2188 /* srw & srw. */ 2189 static void gen_srw(DisasContext *ctx) 2190 { 2191 TCGv t0, t1; 2192 2193 t0 = tcg_temp_new(); 2194 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2195 #if defined(TARGET_PPC64) 2196 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2197 tcg_gen_sari_tl(t0, t0, 0x3f); 2198 #else 2199 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2200 tcg_gen_sari_tl(t0, t0, 0x1f); 2201 #endif 2202 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2203 tcg_gen_ext32u_tl(t0, t0); 2204 t1 = tcg_temp_new(); 2205 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2206 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2207 tcg_temp_free(t1); 2208 tcg_temp_free(t0); 2209 if (unlikely(Rc(ctx->opcode) != 0)) 2210 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2211 } 2212 2213 #if defined(TARGET_PPC64) 2214 /* sld & sld. */ 2215 static void gen_sld(DisasContext *ctx) 2216 { 2217 TCGv t0, t1; 2218 2219 t0 = tcg_temp_new(); 2220 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2221 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2222 tcg_gen_sari_tl(t0, t0, 0x3f); 2223 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2224 t1 = tcg_temp_new(); 2225 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2226 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2227 tcg_temp_free(t1); 2228 tcg_temp_free(t0); 2229 if (unlikely(Rc(ctx->opcode) != 0)) 2230 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2231 } 2232 2233 /* srad & srad. */ 2234 static void gen_srad(DisasContext *ctx) 2235 { 2236 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2237 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2238 if (unlikely(Rc(ctx->opcode) != 0)) 2239 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2240 } 2241 /* sradi & sradi. */ 2242 static inline void gen_sradi(DisasContext *ctx, int n) 2243 { 2244 int sh = SH(ctx->opcode) + (n << 5); 2245 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2246 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2247 if (sh == 0) { 2248 tcg_gen_mov_tl(dst, src); 2249 tcg_gen_movi_tl(cpu_ca, 0); 2250 if (is_isa300(ctx)) { 2251 tcg_gen_movi_tl(cpu_ca32, 0); 2252 } 2253 } else { 2254 TCGv t0; 2255 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2256 t0 = tcg_temp_new(); 2257 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2258 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2259 tcg_temp_free(t0); 2260 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2261 if (is_isa300(ctx)) { 2262 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2263 } 2264 tcg_gen_sari_tl(dst, src, sh); 2265 } 2266 if (unlikely(Rc(ctx->opcode) != 0)) { 2267 gen_set_Rc0(ctx, dst); 2268 } 2269 } 2270 2271 static void gen_sradi0(DisasContext *ctx) 2272 { 2273 gen_sradi(ctx, 0); 2274 } 2275 2276 static void gen_sradi1(DisasContext *ctx) 2277 { 2278 gen_sradi(ctx, 1); 2279 } 2280 2281 /* extswsli & extswsli. */ 2282 static inline void gen_extswsli(DisasContext *ctx, int n) 2283 { 2284 int sh = SH(ctx->opcode) + (n << 5); 2285 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2286 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2287 2288 tcg_gen_ext32s_tl(dst, src); 2289 tcg_gen_shli_tl(dst, dst, sh); 2290 if (unlikely(Rc(ctx->opcode) != 0)) { 2291 gen_set_Rc0(ctx, dst); 2292 } 2293 } 2294 2295 static void gen_extswsli0(DisasContext *ctx) 2296 { 2297 gen_extswsli(ctx, 0); 2298 } 2299 2300 static void gen_extswsli1(DisasContext *ctx) 2301 { 2302 gen_extswsli(ctx, 1); 2303 } 2304 2305 /* srd & srd. */ 2306 static void gen_srd(DisasContext *ctx) 2307 { 2308 TCGv t0, t1; 2309 2310 t0 = tcg_temp_new(); 2311 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2312 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2313 tcg_gen_sari_tl(t0, t0, 0x3f); 2314 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2315 t1 = tcg_temp_new(); 2316 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2317 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2318 tcg_temp_free(t1); 2319 tcg_temp_free(t0); 2320 if (unlikely(Rc(ctx->opcode) != 0)) 2321 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2322 } 2323 #endif 2324 2325 /*** Addressing modes ***/ 2326 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2327 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2328 target_long maskl) 2329 { 2330 target_long simm = SIMM(ctx->opcode); 2331 2332 simm &= ~maskl; 2333 if (rA(ctx->opcode) == 0) { 2334 if (NARROW_MODE(ctx)) { 2335 simm = (uint32_t)simm; 2336 } 2337 tcg_gen_movi_tl(EA, simm); 2338 } else if (likely(simm != 0)) { 2339 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2340 if (NARROW_MODE(ctx)) { 2341 tcg_gen_ext32u_tl(EA, EA); 2342 } 2343 } else { 2344 if (NARROW_MODE(ctx)) { 2345 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2346 } else { 2347 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2348 } 2349 } 2350 } 2351 2352 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2353 { 2354 if (rA(ctx->opcode) == 0) { 2355 if (NARROW_MODE(ctx)) { 2356 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2357 } else { 2358 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2359 } 2360 } else { 2361 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2362 if (NARROW_MODE(ctx)) { 2363 tcg_gen_ext32u_tl(EA, EA); 2364 } 2365 } 2366 } 2367 2368 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2369 { 2370 if (rA(ctx->opcode) == 0) { 2371 tcg_gen_movi_tl(EA, 0); 2372 } else if (NARROW_MODE(ctx)) { 2373 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2374 } else { 2375 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2376 } 2377 } 2378 2379 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2380 target_long val) 2381 { 2382 tcg_gen_addi_tl(ret, arg1, val); 2383 if (NARROW_MODE(ctx)) { 2384 tcg_gen_ext32u_tl(ret, ret); 2385 } 2386 } 2387 2388 static inline void gen_align_no_le(DisasContext *ctx) 2389 { 2390 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2391 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2392 } 2393 2394 /*** Integer load ***/ 2395 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2396 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2397 2398 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2399 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2400 TCGv val, \ 2401 TCGv addr) \ 2402 { \ 2403 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2404 } 2405 2406 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2407 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2408 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2409 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2410 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2411 2412 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2413 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2414 2415 #define GEN_QEMU_LOAD_64(ldop, op) \ 2416 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2417 TCGv_i64 val, \ 2418 TCGv addr) \ 2419 { \ 2420 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2421 } 2422 2423 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2424 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2425 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2426 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2427 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2428 2429 #if defined(TARGET_PPC64) 2430 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2431 #endif 2432 2433 #define GEN_QEMU_STORE_TL(stop, op) \ 2434 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2435 TCGv val, \ 2436 TCGv addr) \ 2437 { \ 2438 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2439 } 2440 2441 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2442 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2443 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2444 2445 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2446 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2447 2448 #define GEN_QEMU_STORE_64(stop, op) \ 2449 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2450 TCGv_i64 val, \ 2451 TCGv addr) \ 2452 { \ 2453 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2454 } 2455 2456 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2457 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2458 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2459 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2460 2461 #if defined(TARGET_PPC64) 2462 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2463 #endif 2464 2465 #define GEN_LD(name, ldop, opc, type) \ 2466 static void glue(gen_, name)(DisasContext *ctx) \ 2467 { \ 2468 TCGv EA; \ 2469 gen_set_access_type(ctx, ACCESS_INT); \ 2470 EA = tcg_temp_new(); \ 2471 gen_addr_imm_index(ctx, EA, 0); \ 2472 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2473 tcg_temp_free(EA); \ 2474 } 2475 2476 #define GEN_LDU(name, ldop, opc, type) \ 2477 static void glue(gen_, name##u)(DisasContext *ctx) \ 2478 { \ 2479 TCGv EA; \ 2480 if (unlikely(rA(ctx->opcode) == 0 || \ 2481 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2482 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2483 return; \ 2484 } \ 2485 gen_set_access_type(ctx, ACCESS_INT); \ 2486 EA = tcg_temp_new(); \ 2487 if (type == PPC_64B) \ 2488 gen_addr_imm_index(ctx, EA, 0x03); \ 2489 else \ 2490 gen_addr_imm_index(ctx, EA, 0); \ 2491 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2492 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2493 tcg_temp_free(EA); \ 2494 } 2495 2496 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2497 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2498 { \ 2499 TCGv EA; \ 2500 if (unlikely(rA(ctx->opcode) == 0 || \ 2501 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2502 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2503 return; \ 2504 } \ 2505 gen_set_access_type(ctx, ACCESS_INT); \ 2506 EA = tcg_temp_new(); \ 2507 gen_addr_reg_index(ctx, EA); \ 2508 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2509 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2510 tcg_temp_free(EA); \ 2511 } 2512 2513 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2514 static void glue(gen_, name##x)(DisasContext *ctx) \ 2515 { \ 2516 TCGv EA; \ 2517 chk; \ 2518 gen_set_access_type(ctx, ACCESS_INT); \ 2519 EA = tcg_temp_new(); \ 2520 gen_addr_reg_index(ctx, EA); \ 2521 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2522 tcg_temp_free(EA); \ 2523 } 2524 2525 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2526 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2527 2528 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2529 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2530 2531 #define GEN_LDS(name, ldop, op, type) \ 2532 GEN_LD(name, ldop, op | 0x20, type); \ 2533 GEN_LDU(name, ldop, op | 0x21, type); \ 2534 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2535 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2536 2537 /* lbz lbzu lbzux lbzx */ 2538 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2539 /* lha lhau lhaux lhax */ 2540 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2541 /* lhz lhzu lhzux lhzx */ 2542 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2543 /* lwz lwzu lwzux lwzx */ 2544 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2545 2546 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 2547 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2548 { \ 2549 TCGv EA; \ 2550 CHK_SV; \ 2551 gen_set_access_type(ctx, ACCESS_INT); \ 2552 EA = tcg_temp_new(); \ 2553 gen_addr_reg_index(ctx, EA); \ 2554 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 2555 tcg_temp_free(EA); \ 2556 } 2557 2558 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 2559 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 2560 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 2561 #if defined(TARGET_PPC64) 2562 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 2563 #endif 2564 2565 #if defined(TARGET_PPC64) 2566 /* lwaux */ 2567 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2568 /* lwax */ 2569 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2570 /* ldux */ 2571 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2572 /* ldx */ 2573 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2574 2575 /* CI load/store variants */ 2576 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2577 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2578 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2579 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2580 2581 static void gen_ld(DisasContext *ctx) 2582 { 2583 TCGv EA; 2584 if (Rc(ctx->opcode)) { 2585 if (unlikely(rA(ctx->opcode) == 0 || 2586 rA(ctx->opcode) == rD(ctx->opcode))) { 2587 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2588 return; 2589 } 2590 } 2591 gen_set_access_type(ctx, ACCESS_INT); 2592 EA = tcg_temp_new(); 2593 gen_addr_imm_index(ctx, EA, 0x03); 2594 if (ctx->opcode & 0x02) { 2595 /* lwa (lwau is undefined) */ 2596 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2597 } else { 2598 /* ld - ldu */ 2599 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2600 } 2601 if (Rc(ctx->opcode)) 2602 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2603 tcg_temp_free(EA); 2604 } 2605 2606 /* lq */ 2607 static void gen_lq(DisasContext *ctx) 2608 { 2609 int ra, rd; 2610 TCGv EA, hi, lo; 2611 2612 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2613 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2614 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2615 2616 if (!legal_in_user_mode && ctx->pr) { 2617 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2618 return; 2619 } 2620 2621 if (!le_is_supported && ctx->le_mode) { 2622 gen_align_no_le(ctx); 2623 return; 2624 } 2625 ra = rA(ctx->opcode); 2626 rd = rD(ctx->opcode); 2627 if (unlikely((rd & 1) || rd == ra)) { 2628 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2629 return; 2630 } 2631 2632 gen_set_access_type(ctx, ACCESS_INT); 2633 EA = tcg_temp_new(); 2634 gen_addr_imm_index(ctx, EA, 0x0F); 2635 2636 /* Note that the low part is always in RD+1, even in LE mode. */ 2637 lo = cpu_gpr[rd + 1]; 2638 hi = cpu_gpr[rd]; 2639 2640 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2641 if (HAVE_ATOMIC128) { 2642 TCGv_i32 oi = tcg_temp_new_i32(); 2643 if (ctx->le_mode) { 2644 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2645 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 2646 } else { 2647 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2648 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 2649 } 2650 tcg_temp_free_i32(oi); 2651 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 2652 } else { 2653 /* Restart with exclusive lock. */ 2654 gen_helper_exit_atomic(cpu_env); 2655 ctx->base.is_jmp = DISAS_NORETURN; 2656 } 2657 } else if (ctx->le_mode) { 2658 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2659 gen_addr_add(ctx, EA, EA, 8); 2660 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2661 } else { 2662 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2663 gen_addr_add(ctx, EA, EA, 8); 2664 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2665 } 2666 tcg_temp_free(EA); 2667 } 2668 #endif 2669 2670 /*** Integer store ***/ 2671 #define GEN_ST(name, stop, opc, type) \ 2672 static void glue(gen_, name)(DisasContext *ctx) \ 2673 { \ 2674 TCGv EA; \ 2675 gen_set_access_type(ctx, ACCESS_INT); \ 2676 EA = tcg_temp_new(); \ 2677 gen_addr_imm_index(ctx, EA, 0); \ 2678 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2679 tcg_temp_free(EA); \ 2680 } 2681 2682 #define GEN_STU(name, stop, opc, type) \ 2683 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2684 { \ 2685 TCGv EA; \ 2686 if (unlikely(rA(ctx->opcode) == 0)) { \ 2687 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2688 return; \ 2689 } \ 2690 gen_set_access_type(ctx, ACCESS_INT); \ 2691 EA = tcg_temp_new(); \ 2692 if (type == PPC_64B) \ 2693 gen_addr_imm_index(ctx, EA, 0x03); \ 2694 else \ 2695 gen_addr_imm_index(ctx, EA, 0); \ 2696 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2697 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2698 tcg_temp_free(EA); \ 2699 } 2700 2701 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2702 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2703 { \ 2704 TCGv EA; \ 2705 if (unlikely(rA(ctx->opcode) == 0)) { \ 2706 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2707 return; \ 2708 } \ 2709 gen_set_access_type(ctx, ACCESS_INT); \ 2710 EA = tcg_temp_new(); \ 2711 gen_addr_reg_index(ctx, EA); \ 2712 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2713 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2714 tcg_temp_free(EA); \ 2715 } 2716 2717 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2718 static void glue(gen_, name##x)(DisasContext *ctx) \ 2719 { \ 2720 TCGv EA; \ 2721 chk; \ 2722 gen_set_access_type(ctx, ACCESS_INT); \ 2723 EA = tcg_temp_new(); \ 2724 gen_addr_reg_index(ctx, EA); \ 2725 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2726 tcg_temp_free(EA); \ 2727 } 2728 #define GEN_STX(name, stop, opc2, opc3, type) \ 2729 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2730 2731 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2732 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2733 2734 #define GEN_STS(name, stop, op, type) \ 2735 GEN_ST(name, stop, op | 0x20, type); \ 2736 GEN_STU(name, stop, op | 0x21, type); \ 2737 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2738 GEN_STX(name, stop, 0x17, op | 0x00, type) 2739 2740 /* stb stbu stbux stbx */ 2741 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2742 /* sth sthu sthux sthx */ 2743 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2744 /* stw stwu stwux stwx */ 2745 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2746 2747 #define GEN_STEPX(name, stop, opc2, opc3) \ 2748 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2749 { \ 2750 TCGv EA; \ 2751 CHK_SV; \ 2752 gen_set_access_type(ctx, ACCESS_INT); \ 2753 EA = tcg_temp_new(); \ 2754 gen_addr_reg_index(ctx, EA); \ 2755 tcg_gen_qemu_st_tl( \ 2756 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 2757 tcg_temp_free(EA); \ 2758 } 2759 2760 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 2761 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 2762 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 2763 #if defined(TARGET_PPC64) 2764 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04) 2765 #endif 2766 2767 #if defined(TARGET_PPC64) 2768 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2769 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2770 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2771 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2772 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2773 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2774 2775 static void gen_std(DisasContext *ctx) 2776 { 2777 int rs; 2778 TCGv EA; 2779 2780 rs = rS(ctx->opcode); 2781 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2782 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2783 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2784 TCGv hi, lo; 2785 2786 if (!(ctx->insns_flags & PPC_64BX)) { 2787 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2788 } 2789 2790 if (!legal_in_user_mode && ctx->pr) { 2791 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2792 return; 2793 } 2794 2795 if (!le_is_supported && ctx->le_mode) { 2796 gen_align_no_le(ctx); 2797 return; 2798 } 2799 2800 if (unlikely(rs & 1)) { 2801 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2802 return; 2803 } 2804 gen_set_access_type(ctx, ACCESS_INT); 2805 EA = tcg_temp_new(); 2806 gen_addr_imm_index(ctx, EA, 0x03); 2807 2808 /* Note that the low part is always in RS+1, even in LE mode. */ 2809 lo = cpu_gpr[rs + 1]; 2810 hi = cpu_gpr[rs]; 2811 2812 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2813 if (HAVE_ATOMIC128) { 2814 TCGv_i32 oi = tcg_temp_new_i32(); 2815 if (ctx->le_mode) { 2816 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2817 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 2818 } else { 2819 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2820 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 2821 } 2822 tcg_temp_free_i32(oi); 2823 } else { 2824 /* Restart with exclusive lock. */ 2825 gen_helper_exit_atomic(cpu_env); 2826 ctx->base.is_jmp = DISAS_NORETURN; 2827 } 2828 } else if (ctx->le_mode) { 2829 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2830 gen_addr_add(ctx, EA, EA, 8); 2831 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2832 } else { 2833 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2834 gen_addr_add(ctx, EA, EA, 8); 2835 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2836 } 2837 tcg_temp_free(EA); 2838 } else { 2839 /* std / stdu */ 2840 if (Rc(ctx->opcode)) { 2841 if (unlikely(rA(ctx->opcode) == 0)) { 2842 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2843 return; 2844 } 2845 } 2846 gen_set_access_type(ctx, ACCESS_INT); 2847 EA = tcg_temp_new(); 2848 gen_addr_imm_index(ctx, EA, 0x03); 2849 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2850 if (Rc(ctx->opcode)) 2851 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2852 tcg_temp_free(EA); 2853 } 2854 } 2855 #endif 2856 /*** Integer load and store with byte reverse ***/ 2857 2858 /* lhbrx */ 2859 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2860 2861 /* lwbrx */ 2862 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2863 2864 #if defined(TARGET_PPC64) 2865 /* ldbrx */ 2866 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2867 /* stdbrx */ 2868 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2869 #endif /* TARGET_PPC64 */ 2870 2871 /* sthbrx */ 2872 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2873 /* stwbrx */ 2874 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2875 2876 /*** Integer load and store multiple ***/ 2877 2878 /* lmw */ 2879 static void gen_lmw(DisasContext *ctx) 2880 { 2881 TCGv t0; 2882 TCGv_i32 t1; 2883 2884 if (ctx->le_mode) { 2885 gen_align_no_le(ctx); 2886 return; 2887 } 2888 gen_set_access_type(ctx, ACCESS_INT); 2889 t0 = tcg_temp_new(); 2890 t1 = tcg_const_i32(rD(ctx->opcode)); 2891 gen_addr_imm_index(ctx, t0, 0); 2892 gen_helper_lmw(cpu_env, t0, t1); 2893 tcg_temp_free(t0); 2894 tcg_temp_free_i32(t1); 2895 } 2896 2897 /* stmw */ 2898 static void gen_stmw(DisasContext *ctx) 2899 { 2900 TCGv t0; 2901 TCGv_i32 t1; 2902 2903 if (ctx->le_mode) { 2904 gen_align_no_le(ctx); 2905 return; 2906 } 2907 gen_set_access_type(ctx, ACCESS_INT); 2908 t0 = tcg_temp_new(); 2909 t1 = tcg_const_i32(rS(ctx->opcode)); 2910 gen_addr_imm_index(ctx, t0, 0); 2911 gen_helper_stmw(cpu_env, t0, t1); 2912 tcg_temp_free(t0); 2913 tcg_temp_free_i32(t1); 2914 } 2915 2916 /*** Integer load and store strings ***/ 2917 2918 /* lswi */ 2919 /* PowerPC32 specification says we must generate an exception if 2920 * rA is in the range of registers to be loaded. 2921 * In an other hand, IBM says this is valid, but rA won't be loaded. 2922 * For now, I'll follow the spec... 2923 */ 2924 static void gen_lswi(DisasContext *ctx) 2925 { 2926 TCGv t0; 2927 TCGv_i32 t1, t2; 2928 int nb = NB(ctx->opcode); 2929 int start = rD(ctx->opcode); 2930 int ra = rA(ctx->opcode); 2931 int nr; 2932 2933 if (ctx->le_mode) { 2934 gen_align_no_le(ctx); 2935 return; 2936 } 2937 if (nb == 0) 2938 nb = 32; 2939 nr = DIV_ROUND_UP(nb, 4); 2940 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2941 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2942 return; 2943 } 2944 gen_set_access_type(ctx, ACCESS_INT); 2945 t0 = tcg_temp_new(); 2946 gen_addr_register(ctx, t0); 2947 t1 = tcg_const_i32(nb); 2948 t2 = tcg_const_i32(start); 2949 gen_helper_lsw(cpu_env, t0, t1, t2); 2950 tcg_temp_free(t0); 2951 tcg_temp_free_i32(t1); 2952 tcg_temp_free_i32(t2); 2953 } 2954 2955 /* lswx */ 2956 static void gen_lswx(DisasContext *ctx) 2957 { 2958 TCGv t0; 2959 TCGv_i32 t1, t2, t3; 2960 2961 if (ctx->le_mode) { 2962 gen_align_no_le(ctx); 2963 return; 2964 } 2965 gen_set_access_type(ctx, ACCESS_INT); 2966 t0 = tcg_temp_new(); 2967 gen_addr_reg_index(ctx, t0); 2968 t1 = tcg_const_i32(rD(ctx->opcode)); 2969 t2 = tcg_const_i32(rA(ctx->opcode)); 2970 t3 = tcg_const_i32(rB(ctx->opcode)); 2971 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 2972 tcg_temp_free(t0); 2973 tcg_temp_free_i32(t1); 2974 tcg_temp_free_i32(t2); 2975 tcg_temp_free_i32(t3); 2976 } 2977 2978 /* stswi */ 2979 static void gen_stswi(DisasContext *ctx) 2980 { 2981 TCGv t0; 2982 TCGv_i32 t1, t2; 2983 int nb = NB(ctx->opcode); 2984 2985 if (ctx->le_mode) { 2986 gen_align_no_le(ctx); 2987 return; 2988 } 2989 gen_set_access_type(ctx, ACCESS_INT); 2990 t0 = tcg_temp_new(); 2991 gen_addr_register(ctx, t0); 2992 if (nb == 0) 2993 nb = 32; 2994 t1 = tcg_const_i32(nb); 2995 t2 = tcg_const_i32(rS(ctx->opcode)); 2996 gen_helper_stsw(cpu_env, t0, t1, t2); 2997 tcg_temp_free(t0); 2998 tcg_temp_free_i32(t1); 2999 tcg_temp_free_i32(t2); 3000 } 3001 3002 /* stswx */ 3003 static void gen_stswx(DisasContext *ctx) 3004 { 3005 TCGv t0; 3006 TCGv_i32 t1, t2; 3007 3008 if (ctx->le_mode) { 3009 gen_align_no_le(ctx); 3010 return; 3011 } 3012 gen_set_access_type(ctx, ACCESS_INT); 3013 t0 = tcg_temp_new(); 3014 gen_addr_reg_index(ctx, t0); 3015 t1 = tcg_temp_new_i32(); 3016 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3017 tcg_gen_andi_i32(t1, t1, 0x7F); 3018 t2 = tcg_const_i32(rS(ctx->opcode)); 3019 gen_helper_stsw(cpu_env, t0, t1, t2); 3020 tcg_temp_free(t0); 3021 tcg_temp_free_i32(t1); 3022 tcg_temp_free_i32(t2); 3023 } 3024 3025 /*** Memory synchronisation ***/ 3026 /* eieio */ 3027 static void gen_eieio(DisasContext *ctx) 3028 { 3029 TCGBar bar = TCG_MO_LD_ST; 3030 3031 /* 3032 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3033 * tell the CPU it is a store-forwarding barrier. 3034 */ 3035 if (ctx->opcode & 0x2000000) { 3036 /* 3037 * ISA says that "Reserved fields in instructions are ignored 3038 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3039 * as this is not an instruction software should be using, 3040 * complain to the user. 3041 */ 3042 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3043 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3044 TARGET_FMT_lx "\n", ctx->base.pc_next - 4); 3045 } else { 3046 bar = TCG_MO_ST_LD; 3047 } 3048 } 3049 3050 tcg_gen_mb(bar | TCG_BAR_SC); 3051 } 3052 3053 #if !defined(CONFIG_USER_ONLY) 3054 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3055 { 3056 TCGv_i32 t; 3057 TCGLabel *l; 3058 3059 if (!ctx->lazy_tlb_flush) { 3060 return; 3061 } 3062 l = gen_new_label(); 3063 t = tcg_temp_new_i32(); 3064 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3065 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3066 if (global) { 3067 gen_helper_check_tlb_flush_global(cpu_env); 3068 } else { 3069 gen_helper_check_tlb_flush_local(cpu_env); 3070 } 3071 gen_set_label(l); 3072 tcg_temp_free_i32(t); 3073 } 3074 #else 3075 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3076 #endif 3077 3078 /* isync */ 3079 static void gen_isync(DisasContext *ctx) 3080 { 3081 /* 3082 * We need to check for a pending TLB flush. This can only happen in 3083 * kernel mode however so check MSR_PR 3084 */ 3085 if (!ctx->pr) { 3086 gen_check_tlb_flush(ctx, false); 3087 } 3088 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3089 gen_stop_exception(ctx); 3090 } 3091 3092 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3093 3094 static void gen_load_locked(DisasContext *ctx, TCGMemOp memop) 3095 { 3096 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3097 TCGv t0 = tcg_temp_new(); 3098 3099 gen_set_access_type(ctx, ACCESS_RES); 3100 gen_addr_reg_index(ctx, t0); 3101 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3102 tcg_gen_mov_tl(cpu_reserve, t0); 3103 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3104 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3105 tcg_temp_free(t0); 3106 } 3107 3108 #define LARX(name, memop) \ 3109 static void gen_##name(DisasContext *ctx) \ 3110 { \ 3111 gen_load_locked(ctx, memop); \ 3112 } 3113 3114 /* lwarx */ 3115 LARX(lbarx, DEF_MEMOP(MO_UB)) 3116 LARX(lharx, DEF_MEMOP(MO_UW)) 3117 LARX(lwarx, DEF_MEMOP(MO_UL)) 3118 3119 static void gen_fetch_inc_conditional(DisasContext *ctx, TCGMemOp memop, 3120 TCGv EA, TCGCond cond, int addend) 3121 { 3122 TCGv t = tcg_temp_new(); 3123 TCGv t2 = tcg_temp_new(); 3124 TCGv u = tcg_temp_new(); 3125 3126 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3127 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3128 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3129 tcg_gen_addi_tl(u, t, addend); 3130 3131 /* E.g. for fetch and increment bounded... */ 3132 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3133 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3134 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3135 3136 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3137 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3138 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3139 3140 tcg_temp_free(t); 3141 tcg_temp_free(t2); 3142 tcg_temp_free(u); 3143 } 3144 3145 static void gen_ld_atomic(DisasContext *ctx, TCGMemOp memop) 3146 { 3147 uint32_t gpr_FC = FC(ctx->opcode); 3148 TCGv EA = tcg_temp_new(); 3149 int rt = rD(ctx->opcode); 3150 bool need_serial; 3151 TCGv src, dst; 3152 3153 gen_addr_register(ctx, EA); 3154 dst = cpu_gpr[rt]; 3155 src = cpu_gpr[(rt + 1) & 31]; 3156 3157 need_serial = false; 3158 memop |= MO_ALIGN; 3159 switch (gpr_FC) { 3160 case 0: /* Fetch and add */ 3161 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3162 break; 3163 case 1: /* Fetch and xor */ 3164 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3165 break; 3166 case 2: /* Fetch and or */ 3167 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3168 break; 3169 case 3: /* Fetch and 'and' */ 3170 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3171 break; 3172 case 4: /* Fetch and max unsigned */ 3173 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3174 break; 3175 case 5: /* Fetch and max signed */ 3176 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3177 break; 3178 case 6: /* Fetch and min unsigned */ 3179 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3180 break; 3181 case 7: /* Fetch and min signed */ 3182 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3183 break; 3184 case 8: /* Swap */ 3185 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3186 break; 3187 3188 case 16: /* Compare and swap not equal */ 3189 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3190 need_serial = true; 3191 } else { 3192 TCGv t0 = tcg_temp_new(); 3193 TCGv t1 = tcg_temp_new(); 3194 3195 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3196 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3197 tcg_gen_mov_tl(t1, src); 3198 } else { 3199 tcg_gen_ext32u_tl(t1, src); 3200 } 3201 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3202 cpu_gpr[(rt + 2) & 31], t0); 3203 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3204 tcg_gen_mov_tl(dst, t0); 3205 3206 tcg_temp_free(t0); 3207 tcg_temp_free(t1); 3208 } 3209 break; 3210 3211 case 24: /* Fetch and increment bounded */ 3212 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3213 need_serial = true; 3214 } else { 3215 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3216 } 3217 break; 3218 case 25: /* Fetch and increment equal */ 3219 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3220 need_serial = true; 3221 } else { 3222 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3223 } 3224 break; 3225 case 28: /* Fetch and decrement bounded */ 3226 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3227 need_serial = true; 3228 } else { 3229 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3230 } 3231 break; 3232 3233 default: 3234 /* invoke data storage error handler */ 3235 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3236 } 3237 tcg_temp_free(EA); 3238 3239 if (need_serial) { 3240 /* Restart with exclusive lock. */ 3241 gen_helper_exit_atomic(cpu_env); 3242 ctx->base.is_jmp = DISAS_NORETURN; 3243 } 3244 } 3245 3246 static void gen_lwat(DisasContext *ctx) 3247 { 3248 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3249 } 3250 3251 #ifdef TARGET_PPC64 3252 static void gen_ldat(DisasContext *ctx) 3253 { 3254 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3255 } 3256 #endif 3257 3258 static void gen_st_atomic(DisasContext *ctx, TCGMemOp memop) 3259 { 3260 uint32_t gpr_FC = FC(ctx->opcode); 3261 TCGv EA = tcg_temp_new(); 3262 TCGv src, discard; 3263 3264 gen_addr_register(ctx, EA); 3265 src = cpu_gpr[rD(ctx->opcode)]; 3266 discard = tcg_temp_new(); 3267 3268 memop |= MO_ALIGN; 3269 switch (gpr_FC) { 3270 case 0: /* add and Store */ 3271 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3272 break; 3273 case 1: /* xor and Store */ 3274 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3275 break; 3276 case 2: /* Or and Store */ 3277 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3278 break; 3279 case 3: /* 'and' and Store */ 3280 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3281 break; 3282 case 4: /* Store max unsigned */ 3283 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3284 break; 3285 case 5: /* Store max signed */ 3286 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3287 break; 3288 case 6: /* Store min unsigned */ 3289 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3290 break; 3291 case 7: /* Store min signed */ 3292 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3293 break; 3294 case 24: /* Store twin */ 3295 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3296 /* Restart with exclusive lock. */ 3297 gen_helper_exit_atomic(cpu_env); 3298 ctx->base.is_jmp = DISAS_NORETURN; 3299 } else { 3300 TCGv t = tcg_temp_new(); 3301 TCGv t2 = tcg_temp_new(); 3302 TCGv s = tcg_temp_new(); 3303 TCGv s2 = tcg_temp_new(); 3304 TCGv ea_plus_s = tcg_temp_new(); 3305 3306 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3307 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3308 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3309 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3310 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3311 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3312 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3313 3314 tcg_temp_free(ea_plus_s); 3315 tcg_temp_free(s2); 3316 tcg_temp_free(s); 3317 tcg_temp_free(t2); 3318 tcg_temp_free(t); 3319 } 3320 break; 3321 default: 3322 /* invoke data storage error handler */ 3323 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3324 } 3325 tcg_temp_free(discard); 3326 tcg_temp_free(EA); 3327 } 3328 3329 static void gen_stwat(DisasContext *ctx) 3330 { 3331 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3332 } 3333 3334 #ifdef TARGET_PPC64 3335 static void gen_stdat(DisasContext *ctx) 3336 { 3337 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3338 } 3339 #endif 3340 3341 static void gen_conditional_store(DisasContext *ctx, TCGMemOp memop) 3342 { 3343 TCGLabel *l1 = gen_new_label(); 3344 TCGLabel *l2 = gen_new_label(); 3345 TCGv t0 = tcg_temp_new(); 3346 int reg = rS(ctx->opcode); 3347 3348 gen_set_access_type(ctx, ACCESS_RES); 3349 gen_addr_reg_index(ctx, t0); 3350 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 3351 tcg_temp_free(t0); 3352 3353 t0 = tcg_temp_new(); 3354 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3355 cpu_gpr[reg], ctx->mem_idx, 3356 DEF_MEMOP(memop) | MO_ALIGN); 3357 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3358 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3359 tcg_gen_or_tl(t0, t0, cpu_so); 3360 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3361 tcg_temp_free(t0); 3362 tcg_gen_br(l2); 3363 3364 gen_set_label(l1); 3365 3366 /* Address mismatch implies failure. But we still need to provide the 3367 memory barrier semantics of the instruction. */ 3368 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3369 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3370 3371 gen_set_label(l2); 3372 tcg_gen_movi_tl(cpu_reserve, -1); 3373 } 3374 3375 #define STCX(name, memop) \ 3376 static void gen_##name(DisasContext *ctx) \ 3377 { \ 3378 gen_conditional_store(ctx, memop); \ 3379 } 3380 3381 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3382 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3383 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3384 3385 #if defined(TARGET_PPC64) 3386 /* ldarx */ 3387 LARX(ldarx, DEF_MEMOP(MO_Q)) 3388 /* stdcx. */ 3389 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3390 3391 /* lqarx */ 3392 static void gen_lqarx(DisasContext *ctx) 3393 { 3394 int rd = rD(ctx->opcode); 3395 TCGv EA, hi, lo; 3396 3397 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3398 (rd == rB(ctx->opcode)))) { 3399 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3400 return; 3401 } 3402 3403 gen_set_access_type(ctx, ACCESS_RES); 3404 EA = tcg_temp_new(); 3405 gen_addr_reg_index(ctx, EA); 3406 3407 /* Note that the low part is always in RD+1, even in LE mode. */ 3408 lo = cpu_gpr[rd + 1]; 3409 hi = cpu_gpr[rd]; 3410 3411 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3412 if (HAVE_ATOMIC128) { 3413 TCGv_i32 oi = tcg_temp_new_i32(); 3414 if (ctx->le_mode) { 3415 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16, 3416 ctx->mem_idx)); 3417 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3418 } else { 3419 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16, 3420 ctx->mem_idx)); 3421 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3422 } 3423 tcg_temp_free_i32(oi); 3424 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3425 } else { 3426 /* Restart with exclusive lock. */ 3427 gen_helper_exit_atomic(cpu_env); 3428 ctx->base.is_jmp = DISAS_NORETURN; 3429 tcg_temp_free(EA); 3430 return; 3431 } 3432 } else if (ctx->le_mode) { 3433 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 3434 tcg_gen_mov_tl(cpu_reserve, EA); 3435 gen_addr_add(ctx, EA, EA, 8); 3436 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3437 } else { 3438 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 3439 tcg_gen_mov_tl(cpu_reserve, EA); 3440 gen_addr_add(ctx, EA, EA, 8); 3441 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3442 } 3443 tcg_temp_free(EA); 3444 3445 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 3446 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3447 } 3448 3449 /* stqcx. */ 3450 static void gen_stqcx_(DisasContext *ctx) 3451 { 3452 int rs = rS(ctx->opcode); 3453 TCGv EA, hi, lo; 3454 3455 if (unlikely(rs & 1)) { 3456 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3457 return; 3458 } 3459 3460 gen_set_access_type(ctx, ACCESS_RES); 3461 EA = tcg_temp_new(); 3462 gen_addr_reg_index(ctx, EA); 3463 3464 /* Note that the low part is always in RS+1, even in LE mode. */ 3465 lo = cpu_gpr[rs + 1]; 3466 hi = cpu_gpr[rs]; 3467 3468 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3469 if (HAVE_CMPXCHG128) { 3470 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16); 3471 if (ctx->le_mode) { 3472 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 3473 EA, lo, hi, oi); 3474 } else { 3475 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 3476 EA, lo, hi, oi); 3477 } 3478 tcg_temp_free_i32(oi); 3479 } else { 3480 /* Restart with exclusive lock. */ 3481 gen_helper_exit_atomic(cpu_env); 3482 ctx->base.is_jmp = DISAS_NORETURN; 3483 } 3484 tcg_temp_free(EA); 3485 } else { 3486 TCGLabel *lab_fail = gen_new_label(); 3487 TCGLabel *lab_over = gen_new_label(); 3488 TCGv_i64 t0 = tcg_temp_new_i64(); 3489 TCGv_i64 t1 = tcg_temp_new_i64(); 3490 3491 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 3492 tcg_temp_free(EA); 3493 3494 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 3495 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3496 ? offsetof(CPUPPCState, reserve_val2) 3497 : offsetof(CPUPPCState, reserve_val))); 3498 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3499 3500 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3501 gen_qemu_ld64_i64(ctx, t0, t0); 3502 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3503 ? offsetof(CPUPPCState, reserve_val) 3504 : offsetof(CPUPPCState, reserve_val2))); 3505 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3506 3507 /* Success */ 3508 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 3509 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3510 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 3511 3512 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3513 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 3514 tcg_gen_br(lab_over); 3515 3516 gen_set_label(lab_fail); 3517 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3518 3519 gen_set_label(lab_over); 3520 tcg_gen_movi_tl(cpu_reserve, -1); 3521 tcg_temp_free_i64(t0); 3522 tcg_temp_free_i64(t1); 3523 } 3524 } 3525 #endif /* defined(TARGET_PPC64) */ 3526 3527 /* sync */ 3528 static void gen_sync(DisasContext *ctx) 3529 { 3530 uint32_t l = (ctx->opcode >> 21) & 3; 3531 3532 /* 3533 * We may need to check for a pending TLB flush. 3534 * 3535 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3536 * 3537 * Additionally, this can only happen in kernel mode however so 3538 * check MSR_PR as well. 3539 */ 3540 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3541 gen_check_tlb_flush(ctx, true); 3542 } 3543 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3544 } 3545 3546 /* wait */ 3547 static void gen_wait(DisasContext *ctx) 3548 { 3549 TCGv_i32 t0 = tcg_const_i32(1); 3550 tcg_gen_st_i32(t0, cpu_env, 3551 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3552 tcg_temp_free_i32(t0); 3553 /* Stop translation, as the CPU is supposed to sleep from now */ 3554 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3555 } 3556 3557 #if defined(TARGET_PPC64) 3558 static void gen_doze(DisasContext *ctx) 3559 { 3560 #if defined(CONFIG_USER_ONLY) 3561 GEN_PRIV; 3562 #else 3563 TCGv_i32 t; 3564 3565 CHK_HV; 3566 t = tcg_const_i32(PPC_PM_DOZE); 3567 gen_helper_pminsn(cpu_env, t); 3568 tcg_temp_free_i32(t); 3569 /* Stop translation, as the CPU is supposed to sleep from now */ 3570 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3571 #endif /* defined(CONFIG_USER_ONLY) */ 3572 } 3573 3574 static void gen_nap(DisasContext *ctx) 3575 { 3576 #if defined(CONFIG_USER_ONLY) 3577 GEN_PRIV; 3578 #else 3579 TCGv_i32 t; 3580 3581 CHK_HV; 3582 t = tcg_const_i32(PPC_PM_NAP); 3583 gen_helper_pminsn(cpu_env, t); 3584 tcg_temp_free_i32(t); 3585 /* Stop translation, as the CPU is supposed to sleep from now */ 3586 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3587 #endif /* defined(CONFIG_USER_ONLY) */ 3588 } 3589 3590 static void gen_stop(DisasContext *ctx) 3591 { 3592 #if defined(CONFIG_USER_ONLY) 3593 GEN_PRIV; 3594 #else 3595 TCGv_i32 t; 3596 3597 CHK_HV; 3598 t = tcg_const_i32(PPC_PM_STOP); 3599 gen_helper_pminsn(cpu_env, t); 3600 tcg_temp_free_i32(t); 3601 /* Stop translation, as the CPU is supposed to sleep from now */ 3602 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3603 #endif /* defined(CONFIG_USER_ONLY) */ 3604 } 3605 3606 static void gen_sleep(DisasContext *ctx) 3607 { 3608 #if defined(CONFIG_USER_ONLY) 3609 GEN_PRIV; 3610 #else 3611 TCGv_i32 t; 3612 3613 CHK_HV; 3614 t = tcg_const_i32(PPC_PM_SLEEP); 3615 gen_helper_pminsn(cpu_env, t); 3616 tcg_temp_free_i32(t); 3617 /* Stop translation, as the CPU is supposed to sleep from now */ 3618 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3619 #endif /* defined(CONFIG_USER_ONLY) */ 3620 } 3621 3622 static void gen_rvwinkle(DisasContext *ctx) 3623 { 3624 #if defined(CONFIG_USER_ONLY) 3625 GEN_PRIV; 3626 #else 3627 TCGv_i32 t; 3628 3629 CHK_HV; 3630 t = tcg_const_i32(PPC_PM_RVWINKLE); 3631 gen_helper_pminsn(cpu_env, t); 3632 tcg_temp_free_i32(t); 3633 /* Stop translation, as the CPU is supposed to sleep from now */ 3634 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3635 #endif /* defined(CONFIG_USER_ONLY) */ 3636 } 3637 #endif /* #if defined(TARGET_PPC64) */ 3638 3639 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3640 { 3641 #if defined(TARGET_PPC64) 3642 if (ctx->has_cfar) 3643 tcg_gen_movi_tl(cpu_cfar, nip); 3644 #endif 3645 } 3646 3647 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3648 { 3649 if (unlikely(ctx->singlestep_enabled)) { 3650 return false; 3651 } 3652 3653 #ifndef CONFIG_USER_ONLY 3654 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3655 #else 3656 return true; 3657 #endif 3658 } 3659 3660 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 3661 { 3662 int sse = ctx->singlestep_enabled; 3663 if (unlikely(sse)) { 3664 if (sse & GDBSTUB_SINGLE_STEP) { 3665 gen_debug_exception(ctx); 3666 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) { 3667 uint32_t excp = gen_prep_dbgex(ctx); 3668 gen_exception(ctx, excp); 3669 } 3670 tcg_gen_exit_tb(NULL, 0); 3671 } else { 3672 tcg_gen_lookup_and_goto_ptr(); 3673 } 3674 } 3675 3676 /*** Branch ***/ 3677 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3678 { 3679 if (NARROW_MODE(ctx)) { 3680 dest = (uint32_t) dest; 3681 } 3682 if (use_goto_tb(ctx, dest)) { 3683 tcg_gen_goto_tb(n); 3684 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3685 tcg_gen_exit_tb(ctx->base.tb, n); 3686 } else { 3687 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3688 gen_lookup_and_goto_ptr(ctx); 3689 } 3690 } 3691 3692 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3693 { 3694 if (NARROW_MODE(ctx)) { 3695 nip = (uint32_t)nip; 3696 } 3697 tcg_gen_movi_tl(cpu_lr, nip); 3698 } 3699 3700 /* b ba bl bla */ 3701 static void gen_b(DisasContext *ctx) 3702 { 3703 target_ulong li, target; 3704 3705 ctx->exception = POWERPC_EXCP_BRANCH; 3706 /* sign extend LI */ 3707 li = LI(ctx->opcode); 3708 li = (li ^ 0x02000000) - 0x02000000; 3709 if (likely(AA(ctx->opcode) == 0)) { 3710 target = ctx->base.pc_next + li - 4; 3711 } else { 3712 target = li; 3713 } 3714 if (LK(ctx->opcode)) { 3715 gen_setlr(ctx, ctx->base.pc_next); 3716 } 3717 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3718 gen_goto_tb(ctx, 0, target); 3719 } 3720 3721 #define BCOND_IM 0 3722 #define BCOND_LR 1 3723 #define BCOND_CTR 2 3724 #define BCOND_TAR 3 3725 3726 static void gen_bcond(DisasContext *ctx, int type) 3727 { 3728 uint32_t bo = BO(ctx->opcode); 3729 TCGLabel *l1; 3730 TCGv target; 3731 ctx->exception = POWERPC_EXCP_BRANCH; 3732 3733 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3734 target = tcg_temp_local_new(); 3735 if (type == BCOND_CTR) 3736 tcg_gen_mov_tl(target, cpu_ctr); 3737 else if (type == BCOND_TAR) 3738 gen_load_spr(target, SPR_TAR); 3739 else 3740 tcg_gen_mov_tl(target, cpu_lr); 3741 } else { 3742 target = NULL; 3743 } 3744 if (LK(ctx->opcode)) 3745 gen_setlr(ctx, ctx->base.pc_next); 3746 l1 = gen_new_label(); 3747 if ((bo & 0x4) == 0) { 3748 /* Decrement and test CTR */ 3749 TCGv temp = tcg_temp_new(); 3750 3751 if (type == BCOND_CTR) { 3752 /* 3753 * All ISAs up to v3 describe this form of bcctr as invalid but 3754 * some processors, ie. 64-bit server processors compliant with 3755 * arch 2.x, do implement a "test and decrement" logic instead, 3756 * as described in their respective UMs. This logic involves CTR 3757 * to act as both the branch target and a counter, which makes 3758 * it basically useless and thus never used in real code. 3759 * 3760 * This form was hence chosen to trigger extra micro-architectural 3761 * side-effect on real HW needed for the Spectre v2 workaround. 3762 * It is up to guests that implement such workaround, ie. linux, to 3763 * use this form in a way it just triggers the side-effect without 3764 * doing anything else harmful. 3765 */ 3766 if (unlikely(!is_book3s_arch2x(ctx))) { 3767 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3768 tcg_temp_free(temp); 3769 tcg_temp_free(target); 3770 return; 3771 } 3772 3773 if (NARROW_MODE(ctx)) { 3774 tcg_gen_ext32u_tl(temp, cpu_ctr); 3775 } else { 3776 tcg_gen_mov_tl(temp, cpu_ctr); 3777 } 3778 if (bo & 0x2) { 3779 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3780 } else { 3781 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3782 } 3783 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3784 } else { 3785 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3786 if (NARROW_MODE(ctx)) { 3787 tcg_gen_ext32u_tl(temp, cpu_ctr); 3788 } else { 3789 tcg_gen_mov_tl(temp, cpu_ctr); 3790 } 3791 if (bo & 0x2) { 3792 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3793 } else { 3794 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3795 } 3796 } 3797 tcg_temp_free(temp); 3798 } 3799 if ((bo & 0x10) == 0) { 3800 /* Test CR */ 3801 uint32_t bi = BI(ctx->opcode); 3802 uint32_t mask = 0x08 >> (bi & 0x03); 3803 TCGv_i32 temp = tcg_temp_new_i32(); 3804 3805 if (bo & 0x8) { 3806 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3807 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3808 } else { 3809 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3810 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3811 } 3812 tcg_temp_free_i32(temp); 3813 } 3814 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3815 if (type == BCOND_IM) { 3816 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3817 if (likely(AA(ctx->opcode) == 0)) { 3818 gen_goto_tb(ctx, 0, ctx->base.pc_next + li - 4); 3819 } else { 3820 gen_goto_tb(ctx, 0, li); 3821 } 3822 } else { 3823 if (NARROW_MODE(ctx)) { 3824 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3825 } else { 3826 tcg_gen_andi_tl(cpu_nip, target, ~3); 3827 } 3828 gen_lookup_and_goto_ptr(ctx); 3829 tcg_temp_free(target); 3830 } 3831 if ((bo & 0x14) != 0x14) { 3832 /* fallthrough case */ 3833 gen_set_label(l1); 3834 gen_goto_tb(ctx, 1, ctx->base.pc_next); 3835 } 3836 } 3837 3838 static void gen_bc(DisasContext *ctx) 3839 { 3840 gen_bcond(ctx, BCOND_IM); 3841 } 3842 3843 static void gen_bcctr(DisasContext *ctx) 3844 { 3845 gen_bcond(ctx, BCOND_CTR); 3846 } 3847 3848 static void gen_bclr(DisasContext *ctx) 3849 { 3850 gen_bcond(ctx, BCOND_LR); 3851 } 3852 3853 static void gen_bctar(DisasContext *ctx) 3854 { 3855 gen_bcond(ctx, BCOND_TAR); 3856 } 3857 3858 /*** Condition register logical ***/ 3859 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3860 static void glue(gen_, name)(DisasContext *ctx) \ 3861 { \ 3862 uint8_t bitmask; \ 3863 int sh; \ 3864 TCGv_i32 t0, t1; \ 3865 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3866 t0 = tcg_temp_new_i32(); \ 3867 if (sh > 0) \ 3868 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3869 else if (sh < 0) \ 3870 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3871 else \ 3872 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3873 t1 = tcg_temp_new_i32(); \ 3874 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3875 if (sh > 0) \ 3876 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3877 else if (sh < 0) \ 3878 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3879 else \ 3880 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3881 tcg_op(t0, t0, t1); \ 3882 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3883 tcg_gen_andi_i32(t0, t0, bitmask); \ 3884 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3885 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3886 tcg_temp_free_i32(t0); \ 3887 tcg_temp_free_i32(t1); \ 3888 } 3889 3890 /* crand */ 3891 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3892 /* crandc */ 3893 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3894 /* creqv */ 3895 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3896 /* crnand */ 3897 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3898 /* crnor */ 3899 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3900 /* cror */ 3901 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3902 /* crorc */ 3903 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3904 /* crxor */ 3905 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3906 3907 /* mcrf */ 3908 static void gen_mcrf(DisasContext *ctx) 3909 { 3910 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3911 } 3912 3913 /*** System linkage ***/ 3914 3915 /* rfi (supervisor only) */ 3916 static void gen_rfi(DisasContext *ctx) 3917 { 3918 #if defined(CONFIG_USER_ONLY) 3919 GEN_PRIV; 3920 #else 3921 /* This instruction doesn't exist anymore on 64-bit server 3922 * processors compliant with arch 2.x 3923 */ 3924 if (is_book3s_arch2x(ctx)) { 3925 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3926 return; 3927 } 3928 /* Restore CPU state */ 3929 CHK_SV; 3930 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3931 gen_io_start(); 3932 } 3933 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3934 gen_helper_rfi(cpu_env); 3935 gen_sync_exception(ctx); 3936 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3937 gen_io_end(); 3938 } 3939 #endif 3940 } 3941 3942 #if defined(TARGET_PPC64) 3943 static void gen_rfid(DisasContext *ctx) 3944 { 3945 #if defined(CONFIG_USER_ONLY) 3946 GEN_PRIV; 3947 #else 3948 /* Restore CPU state */ 3949 CHK_SV; 3950 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3951 gen_io_start(); 3952 } 3953 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3954 gen_helper_rfid(cpu_env); 3955 gen_sync_exception(ctx); 3956 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3957 gen_io_end(); 3958 } 3959 #endif 3960 } 3961 3962 static void gen_hrfid(DisasContext *ctx) 3963 { 3964 #if defined(CONFIG_USER_ONLY) 3965 GEN_PRIV; 3966 #else 3967 /* Restore CPU state */ 3968 CHK_HV; 3969 gen_helper_hrfid(cpu_env); 3970 gen_sync_exception(ctx); 3971 #endif 3972 } 3973 #endif 3974 3975 /* sc */ 3976 #if defined(CONFIG_USER_ONLY) 3977 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3978 #else 3979 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3980 #endif 3981 static void gen_sc(DisasContext *ctx) 3982 { 3983 uint32_t lev; 3984 3985 lev = (ctx->opcode >> 5) & 0x7F; 3986 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3987 } 3988 3989 /*** Trap ***/ 3990 3991 /* Check for unconditional traps (always or never) */ 3992 static bool check_unconditional_trap(DisasContext *ctx) 3993 { 3994 /* Trap never */ 3995 if (TO(ctx->opcode) == 0) { 3996 return true; 3997 } 3998 /* Trap always */ 3999 if (TO(ctx->opcode) == 31) { 4000 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 4001 return true; 4002 } 4003 return false; 4004 } 4005 4006 /* tw */ 4007 static void gen_tw(DisasContext *ctx) 4008 { 4009 TCGv_i32 t0; 4010 4011 if (check_unconditional_trap(ctx)) { 4012 return; 4013 } 4014 t0 = tcg_const_i32(TO(ctx->opcode)); 4015 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4016 t0); 4017 tcg_temp_free_i32(t0); 4018 } 4019 4020 /* twi */ 4021 static void gen_twi(DisasContext *ctx) 4022 { 4023 TCGv t0; 4024 TCGv_i32 t1; 4025 4026 if (check_unconditional_trap(ctx)) { 4027 return; 4028 } 4029 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4030 t1 = tcg_const_i32(TO(ctx->opcode)); 4031 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4032 tcg_temp_free(t0); 4033 tcg_temp_free_i32(t1); 4034 } 4035 4036 #if defined(TARGET_PPC64) 4037 /* td */ 4038 static void gen_td(DisasContext *ctx) 4039 { 4040 TCGv_i32 t0; 4041 4042 if (check_unconditional_trap(ctx)) { 4043 return; 4044 } 4045 t0 = tcg_const_i32(TO(ctx->opcode)); 4046 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4047 t0); 4048 tcg_temp_free_i32(t0); 4049 } 4050 4051 /* tdi */ 4052 static void gen_tdi(DisasContext *ctx) 4053 { 4054 TCGv t0; 4055 TCGv_i32 t1; 4056 4057 if (check_unconditional_trap(ctx)) { 4058 return; 4059 } 4060 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4061 t1 = tcg_const_i32(TO(ctx->opcode)); 4062 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4063 tcg_temp_free(t0); 4064 tcg_temp_free_i32(t1); 4065 } 4066 #endif 4067 4068 /*** Processor control ***/ 4069 4070 static void gen_read_xer(DisasContext *ctx, TCGv dst) 4071 { 4072 TCGv t0 = tcg_temp_new(); 4073 TCGv t1 = tcg_temp_new(); 4074 TCGv t2 = tcg_temp_new(); 4075 tcg_gen_mov_tl(dst, cpu_xer); 4076 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 4077 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 4078 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 4079 tcg_gen_or_tl(t0, t0, t1); 4080 tcg_gen_or_tl(dst, dst, t2); 4081 tcg_gen_or_tl(dst, dst, t0); 4082 if (is_isa300(ctx)) { 4083 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 4084 tcg_gen_or_tl(dst, dst, t0); 4085 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 4086 tcg_gen_or_tl(dst, dst, t0); 4087 } 4088 tcg_temp_free(t0); 4089 tcg_temp_free(t1); 4090 tcg_temp_free(t2); 4091 } 4092 4093 static void gen_write_xer(TCGv src) 4094 { 4095 /* Write all flags, while reading back check for isa300 */ 4096 tcg_gen_andi_tl(cpu_xer, src, 4097 ~((1u << XER_SO) | 4098 (1u << XER_OV) | (1u << XER_OV32) | 4099 (1u << XER_CA) | (1u << XER_CA32))); 4100 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 4101 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 4102 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 4103 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 4104 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 4105 } 4106 4107 /* mcrxr */ 4108 static void gen_mcrxr(DisasContext *ctx) 4109 { 4110 TCGv_i32 t0 = tcg_temp_new_i32(); 4111 TCGv_i32 t1 = tcg_temp_new_i32(); 4112 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4113 4114 tcg_gen_trunc_tl_i32(t0, cpu_so); 4115 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4116 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4117 tcg_gen_shli_i32(t0, t0, 3); 4118 tcg_gen_shli_i32(t1, t1, 2); 4119 tcg_gen_shli_i32(dst, dst, 1); 4120 tcg_gen_or_i32(dst, dst, t0); 4121 tcg_gen_or_i32(dst, dst, t1); 4122 tcg_temp_free_i32(t0); 4123 tcg_temp_free_i32(t1); 4124 4125 tcg_gen_movi_tl(cpu_so, 0); 4126 tcg_gen_movi_tl(cpu_ov, 0); 4127 tcg_gen_movi_tl(cpu_ca, 0); 4128 } 4129 4130 #ifdef TARGET_PPC64 4131 /* mcrxrx */ 4132 static void gen_mcrxrx(DisasContext *ctx) 4133 { 4134 TCGv t0 = tcg_temp_new(); 4135 TCGv t1 = tcg_temp_new(); 4136 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4137 4138 /* copy OV and OV32 */ 4139 tcg_gen_shli_tl(t0, cpu_ov, 1); 4140 tcg_gen_or_tl(t0, t0, cpu_ov32); 4141 tcg_gen_shli_tl(t0, t0, 2); 4142 /* copy CA and CA32 */ 4143 tcg_gen_shli_tl(t1, cpu_ca, 1); 4144 tcg_gen_or_tl(t1, t1, cpu_ca32); 4145 tcg_gen_or_tl(t0, t0, t1); 4146 tcg_gen_trunc_tl_i32(dst, t0); 4147 tcg_temp_free(t0); 4148 tcg_temp_free(t1); 4149 } 4150 #endif 4151 4152 /* mfcr mfocrf */ 4153 static void gen_mfcr(DisasContext *ctx) 4154 { 4155 uint32_t crm, crn; 4156 4157 if (likely(ctx->opcode & 0x00100000)) { 4158 crm = CRM(ctx->opcode); 4159 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4160 crn = ctz32 (crm); 4161 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4162 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4163 cpu_gpr[rD(ctx->opcode)], crn * 4); 4164 } 4165 } else { 4166 TCGv_i32 t0 = tcg_temp_new_i32(); 4167 tcg_gen_mov_i32(t0, cpu_crf[0]); 4168 tcg_gen_shli_i32(t0, t0, 4); 4169 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4170 tcg_gen_shli_i32(t0, t0, 4); 4171 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4172 tcg_gen_shli_i32(t0, t0, 4); 4173 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4174 tcg_gen_shli_i32(t0, t0, 4); 4175 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4176 tcg_gen_shli_i32(t0, t0, 4); 4177 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4178 tcg_gen_shli_i32(t0, t0, 4); 4179 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4180 tcg_gen_shli_i32(t0, t0, 4); 4181 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4182 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4183 tcg_temp_free_i32(t0); 4184 } 4185 } 4186 4187 /* mfmsr */ 4188 static void gen_mfmsr(DisasContext *ctx) 4189 { 4190 CHK_SV; 4191 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4192 } 4193 4194 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 4195 { 4196 #if 0 4197 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 4198 printf("ERROR: try to access SPR %d !\n", sprn); 4199 #endif 4200 } 4201 #define SPR_NOACCESS (&spr_noaccess) 4202 4203 /* mfspr */ 4204 static inline void gen_op_mfspr(DisasContext *ctx) 4205 { 4206 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4207 uint32_t sprn = SPR(ctx->opcode); 4208 4209 #if defined(CONFIG_USER_ONLY) 4210 read_cb = ctx->spr_cb[sprn].uea_read; 4211 #else 4212 if (ctx->pr) { 4213 read_cb = ctx->spr_cb[sprn].uea_read; 4214 } else if (ctx->hv) { 4215 read_cb = ctx->spr_cb[sprn].hea_read; 4216 } else { 4217 read_cb = ctx->spr_cb[sprn].oea_read; 4218 } 4219 #endif 4220 if (likely(read_cb != NULL)) { 4221 if (likely(read_cb != SPR_NOACCESS)) { 4222 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4223 } else { 4224 /* Privilege exception */ 4225 /* This is a hack to avoid warnings when running Linux: 4226 * this OS breaks the PowerPC virtualisation model, 4227 * allowing userland application to read the PVR 4228 */ 4229 if (sprn != SPR_PVR) { 4230 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4231 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4232 ctx->base.pc_next - 4); 4233 } 4234 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4235 } 4236 } else { 4237 /* ISA 2.07 defines these as no-ops */ 4238 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4239 (sprn >= 808 && sprn <= 811)) { 4240 /* This is a nop */ 4241 return; 4242 } 4243 /* Not defined */ 4244 qemu_log_mask(LOG_GUEST_ERROR, 4245 "Trying to read invalid spr %d (0x%03x) at " 4246 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4247 4248 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4249 * it can generate a priv, a hv emu or a no-op 4250 */ 4251 if (sprn & 0x10) { 4252 if (ctx->pr) { 4253 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4254 } 4255 } else { 4256 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4257 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4258 } 4259 } 4260 } 4261 } 4262 4263 static void gen_mfspr(DisasContext *ctx) 4264 { 4265 gen_op_mfspr(ctx); 4266 } 4267 4268 /* mftb */ 4269 static void gen_mftb(DisasContext *ctx) 4270 { 4271 gen_op_mfspr(ctx); 4272 } 4273 4274 /* mtcrf mtocrf*/ 4275 static void gen_mtcrf(DisasContext *ctx) 4276 { 4277 uint32_t crm, crn; 4278 4279 crm = CRM(ctx->opcode); 4280 if (likely((ctx->opcode & 0x00100000))) { 4281 if (crm && ((crm & (crm - 1)) == 0)) { 4282 TCGv_i32 temp = tcg_temp_new_i32(); 4283 crn = ctz32 (crm); 4284 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4285 tcg_gen_shri_i32(temp, temp, crn * 4); 4286 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4287 tcg_temp_free_i32(temp); 4288 } 4289 } else { 4290 TCGv_i32 temp = tcg_temp_new_i32(); 4291 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4292 for (crn = 0 ; crn < 8 ; crn++) { 4293 if (crm & (1 << crn)) { 4294 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4295 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4296 } 4297 } 4298 tcg_temp_free_i32(temp); 4299 } 4300 } 4301 4302 /* mtmsr */ 4303 #if defined(TARGET_PPC64) 4304 static void gen_mtmsrd(DisasContext *ctx) 4305 { 4306 CHK_SV; 4307 4308 #if !defined(CONFIG_USER_ONLY) 4309 if (ctx->opcode & 0x00010000) { 4310 /* Special form that does not need any synchronisation */ 4311 TCGv t0 = tcg_temp_new(); 4312 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4313 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4314 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4315 tcg_temp_free(t0); 4316 } else { 4317 /* XXX: we need to update nip before the store 4318 * if we enter power saving mode, we will exit the loop 4319 * directly from ppc_store_msr 4320 */ 4321 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4322 gen_io_start(); 4323 } 4324 gen_update_nip(ctx, ctx->base.pc_next); 4325 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4326 /* Must stop the translation as machine state (may have) changed */ 4327 /* Note that mtmsr is not always defined as context-synchronizing */ 4328 gen_stop_exception(ctx); 4329 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4330 gen_io_end(); 4331 } 4332 } 4333 #endif /* !defined(CONFIG_USER_ONLY) */ 4334 } 4335 #endif /* defined(TARGET_PPC64) */ 4336 4337 static void gen_mtmsr(DisasContext *ctx) 4338 { 4339 CHK_SV; 4340 4341 #if !defined(CONFIG_USER_ONLY) 4342 if (ctx->opcode & 0x00010000) { 4343 /* Special form that does not need any synchronisation */ 4344 TCGv t0 = tcg_temp_new(); 4345 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4346 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4347 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4348 tcg_temp_free(t0); 4349 } else { 4350 TCGv msr = tcg_temp_new(); 4351 4352 /* XXX: we need to update nip before the store 4353 * if we enter power saving mode, we will exit the loop 4354 * directly from ppc_store_msr 4355 */ 4356 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4357 gen_io_start(); 4358 } 4359 gen_update_nip(ctx, ctx->base.pc_next); 4360 #if defined(TARGET_PPC64) 4361 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4362 #else 4363 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4364 #endif 4365 gen_helper_store_msr(cpu_env, msr); 4366 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4367 gen_io_end(); 4368 } 4369 tcg_temp_free(msr); 4370 /* Must stop the translation as machine state (may have) changed */ 4371 /* Note that mtmsr is not always defined as context-synchronizing */ 4372 gen_stop_exception(ctx); 4373 } 4374 #endif 4375 } 4376 4377 /* mtspr */ 4378 static void gen_mtspr(DisasContext *ctx) 4379 { 4380 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4381 uint32_t sprn = SPR(ctx->opcode); 4382 4383 #if defined(CONFIG_USER_ONLY) 4384 write_cb = ctx->spr_cb[sprn].uea_write; 4385 #else 4386 if (ctx->pr) { 4387 write_cb = ctx->spr_cb[sprn].uea_write; 4388 } else if (ctx->hv) { 4389 write_cb = ctx->spr_cb[sprn].hea_write; 4390 } else { 4391 write_cb = ctx->spr_cb[sprn].oea_write; 4392 } 4393 #endif 4394 if (likely(write_cb != NULL)) { 4395 if (likely(write_cb != SPR_NOACCESS)) { 4396 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4397 } else { 4398 /* Privilege exception */ 4399 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 4400 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4401 ctx->base.pc_next - 4); 4402 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4403 } 4404 } else { 4405 /* ISA 2.07 defines these as no-ops */ 4406 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4407 (sprn >= 808 && sprn <= 811)) { 4408 /* This is a nop */ 4409 return; 4410 } 4411 4412 /* Not defined */ 4413 qemu_log_mask(LOG_GUEST_ERROR, 4414 "Trying to write invalid spr %d (0x%03x) at " 4415 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4416 4417 4418 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4419 * it can generate a priv, a hv emu or a no-op 4420 */ 4421 if (sprn & 0x10) { 4422 if (ctx->pr) { 4423 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4424 } 4425 } else { 4426 if (ctx->pr || sprn == 0) { 4427 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4428 } 4429 } 4430 } 4431 } 4432 4433 #if defined(TARGET_PPC64) 4434 /* setb */ 4435 static void gen_setb(DisasContext *ctx) 4436 { 4437 TCGv_i32 t0 = tcg_temp_new_i32(); 4438 TCGv_i32 t8 = tcg_temp_new_i32(); 4439 TCGv_i32 tm1 = tcg_temp_new_i32(); 4440 int crf = crfS(ctx->opcode); 4441 4442 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4443 tcg_gen_movi_i32(t8, 8); 4444 tcg_gen_movi_i32(tm1, -1); 4445 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4446 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4447 4448 tcg_temp_free_i32(t0); 4449 tcg_temp_free_i32(t8); 4450 tcg_temp_free_i32(tm1); 4451 } 4452 #endif 4453 4454 /*** Cache management ***/ 4455 4456 /* dcbf */ 4457 static void gen_dcbf(DisasContext *ctx) 4458 { 4459 /* XXX: specification says this is treated as a load by the MMU */ 4460 TCGv t0; 4461 gen_set_access_type(ctx, ACCESS_CACHE); 4462 t0 = tcg_temp_new(); 4463 gen_addr_reg_index(ctx, t0); 4464 gen_qemu_ld8u(ctx, t0, t0); 4465 tcg_temp_free(t0); 4466 } 4467 4468 /* dcbfep (external PID dcbf) */ 4469 static void gen_dcbfep(DisasContext *ctx) 4470 { 4471 /* XXX: specification says this is treated as a load by the MMU */ 4472 TCGv t0; 4473 CHK_SV; 4474 gen_set_access_type(ctx, ACCESS_CACHE); 4475 t0 = tcg_temp_new(); 4476 gen_addr_reg_index(ctx, t0); 4477 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4478 tcg_temp_free(t0); 4479 } 4480 4481 /* dcbi (Supervisor only) */ 4482 static void gen_dcbi(DisasContext *ctx) 4483 { 4484 #if defined(CONFIG_USER_ONLY) 4485 GEN_PRIV; 4486 #else 4487 TCGv EA, val; 4488 4489 CHK_SV; 4490 EA = tcg_temp_new(); 4491 gen_set_access_type(ctx, ACCESS_CACHE); 4492 gen_addr_reg_index(ctx, EA); 4493 val = tcg_temp_new(); 4494 /* XXX: specification says this should be treated as a store by the MMU */ 4495 gen_qemu_ld8u(ctx, val, EA); 4496 gen_qemu_st8(ctx, val, EA); 4497 tcg_temp_free(val); 4498 tcg_temp_free(EA); 4499 #endif /* defined(CONFIG_USER_ONLY) */ 4500 } 4501 4502 /* dcdst */ 4503 static void gen_dcbst(DisasContext *ctx) 4504 { 4505 /* XXX: specification say this is treated as a load by the MMU */ 4506 TCGv t0; 4507 gen_set_access_type(ctx, ACCESS_CACHE); 4508 t0 = tcg_temp_new(); 4509 gen_addr_reg_index(ctx, t0); 4510 gen_qemu_ld8u(ctx, t0, t0); 4511 tcg_temp_free(t0); 4512 } 4513 4514 /* dcbstep (dcbstep External PID version) */ 4515 static void gen_dcbstep(DisasContext *ctx) 4516 { 4517 /* XXX: specification say this is treated as a load by the MMU */ 4518 TCGv t0; 4519 gen_set_access_type(ctx, ACCESS_CACHE); 4520 t0 = tcg_temp_new(); 4521 gen_addr_reg_index(ctx, t0); 4522 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4523 tcg_temp_free(t0); 4524 } 4525 4526 /* dcbt */ 4527 static void gen_dcbt(DisasContext *ctx) 4528 { 4529 /* interpreted as no-op */ 4530 /* XXX: specification say this is treated as a load by the MMU 4531 * but does not generate any exception 4532 */ 4533 } 4534 4535 /* dcbtep */ 4536 static void gen_dcbtep(DisasContext *ctx) 4537 { 4538 /* interpreted as no-op */ 4539 /* XXX: specification say this is treated as a load by the MMU 4540 * but does not generate any exception 4541 */ 4542 } 4543 4544 /* dcbtst */ 4545 static void gen_dcbtst(DisasContext *ctx) 4546 { 4547 /* interpreted as no-op */ 4548 /* XXX: specification say this is treated as a load by the MMU 4549 * but does not generate any exception 4550 */ 4551 } 4552 4553 /* dcbtstep */ 4554 static void gen_dcbtstep(DisasContext *ctx) 4555 { 4556 /* interpreted as no-op */ 4557 /* XXX: specification say this is treated as a load by the MMU 4558 * but does not generate any exception 4559 */ 4560 } 4561 4562 /* dcbtls */ 4563 static void gen_dcbtls(DisasContext *ctx) 4564 { 4565 /* Always fails locking the cache */ 4566 TCGv t0 = tcg_temp_new(); 4567 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4568 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4569 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4570 tcg_temp_free(t0); 4571 } 4572 4573 /* dcbz */ 4574 static void gen_dcbz(DisasContext *ctx) 4575 { 4576 TCGv tcgv_addr; 4577 TCGv_i32 tcgv_op; 4578 4579 gen_set_access_type(ctx, ACCESS_CACHE); 4580 tcgv_addr = tcg_temp_new(); 4581 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4582 gen_addr_reg_index(ctx, tcgv_addr); 4583 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4584 tcg_temp_free(tcgv_addr); 4585 tcg_temp_free_i32(tcgv_op); 4586 } 4587 4588 /* dcbzep */ 4589 static void gen_dcbzep(DisasContext *ctx) 4590 { 4591 TCGv tcgv_addr; 4592 TCGv_i32 tcgv_op; 4593 4594 gen_set_access_type(ctx, ACCESS_CACHE); 4595 tcgv_addr = tcg_temp_new(); 4596 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4597 gen_addr_reg_index(ctx, tcgv_addr); 4598 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 4599 tcg_temp_free(tcgv_addr); 4600 tcg_temp_free_i32(tcgv_op); 4601 } 4602 4603 /* dst / dstt */ 4604 static void gen_dst(DisasContext *ctx) 4605 { 4606 if (rA(ctx->opcode) == 0) { 4607 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4608 } else { 4609 /* interpreted as no-op */ 4610 } 4611 } 4612 4613 /* dstst /dststt */ 4614 static void gen_dstst(DisasContext *ctx) 4615 { 4616 if (rA(ctx->opcode) == 0) { 4617 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4618 } else { 4619 /* interpreted as no-op */ 4620 } 4621 4622 } 4623 4624 /* dss / dssall */ 4625 static void gen_dss(DisasContext *ctx) 4626 { 4627 /* interpreted as no-op */ 4628 } 4629 4630 /* icbi */ 4631 static void gen_icbi(DisasContext *ctx) 4632 { 4633 TCGv t0; 4634 gen_set_access_type(ctx, ACCESS_CACHE); 4635 t0 = tcg_temp_new(); 4636 gen_addr_reg_index(ctx, t0); 4637 gen_helper_icbi(cpu_env, t0); 4638 tcg_temp_free(t0); 4639 } 4640 4641 /* icbiep */ 4642 static void gen_icbiep(DisasContext *ctx) 4643 { 4644 TCGv t0; 4645 gen_set_access_type(ctx, ACCESS_CACHE); 4646 t0 = tcg_temp_new(); 4647 gen_addr_reg_index(ctx, t0); 4648 gen_helper_icbiep(cpu_env, t0); 4649 tcg_temp_free(t0); 4650 } 4651 4652 /* Optional: */ 4653 /* dcba */ 4654 static void gen_dcba(DisasContext *ctx) 4655 { 4656 /* interpreted as no-op */ 4657 /* XXX: specification say this is treated as a store by the MMU 4658 * but does not generate any exception 4659 */ 4660 } 4661 4662 /*** Segment register manipulation ***/ 4663 /* Supervisor only: */ 4664 4665 /* mfsr */ 4666 static void gen_mfsr(DisasContext *ctx) 4667 { 4668 #if defined(CONFIG_USER_ONLY) 4669 GEN_PRIV; 4670 #else 4671 TCGv t0; 4672 4673 CHK_SV; 4674 t0 = tcg_const_tl(SR(ctx->opcode)); 4675 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4676 tcg_temp_free(t0); 4677 #endif /* defined(CONFIG_USER_ONLY) */ 4678 } 4679 4680 /* mfsrin */ 4681 static void gen_mfsrin(DisasContext *ctx) 4682 { 4683 #if defined(CONFIG_USER_ONLY) 4684 GEN_PRIV; 4685 #else 4686 TCGv t0; 4687 4688 CHK_SV; 4689 t0 = tcg_temp_new(); 4690 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4691 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4692 tcg_temp_free(t0); 4693 #endif /* defined(CONFIG_USER_ONLY) */ 4694 } 4695 4696 /* mtsr */ 4697 static void gen_mtsr(DisasContext *ctx) 4698 { 4699 #if defined(CONFIG_USER_ONLY) 4700 GEN_PRIV; 4701 #else 4702 TCGv t0; 4703 4704 CHK_SV; 4705 t0 = tcg_const_tl(SR(ctx->opcode)); 4706 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4707 tcg_temp_free(t0); 4708 #endif /* defined(CONFIG_USER_ONLY) */ 4709 } 4710 4711 /* mtsrin */ 4712 static void gen_mtsrin(DisasContext *ctx) 4713 { 4714 #if defined(CONFIG_USER_ONLY) 4715 GEN_PRIV; 4716 #else 4717 TCGv t0; 4718 CHK_SV; 4719 4720 t0 = tcg_temp_new(); 4721 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4722 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4723 tcg_temp_free(t0); 4724 #endif /* defined(CONFIG_USER_ONLY) */ 4725 } 4726 4727 #if defined(TARGET_PPC64) 4728 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4729 4730 /* mfsr */ 4731 static void gen_mfsr_64b(DisasContext *ctx) 4732 { 4733 #if defined(CONFIG_USER_ONLY) 4734 GEN_PRIV; 4735 #else 4736 TCGv t0; 4737 4738 CHK_SV; 4739 t0 = tcg_const_tl(SR(ctx->opcode)); 4740 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4741 tcg_temp_free(t0); 4742 #endif /* defined(CONFIG_USER_ONLY) */ 4743 } 4744 4745 /* mfsrin */ 4746 static void gen_mfsrin_64b(DisasContext *ctx) 4747 { 4748 #if defined(CONFIG_USER_ONLY) 4749 GEN_PRIV; 4750 #else 4751 TCGv t0; 4752 4753 CHK_SV; 4754 t0 = tcg_temp_new(); 4755 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4756 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4757 tcg_temp_free(t0); 4758 #endif /* defined(CONFIG_USER_ONLY) */ 4759 } 4760 4761 /* mtsr */ 4762 static void gen_mtsr_64b(DisasContext *ctx) 4763 { 4764 #if defined(CONFIG_USER_ONLY) 4765 GEN_PRIV; 4766 #else 4767 TCGv t0; 4768 4769 CHK_SV; 4770 t0 = tcg_const_tl(SR(ctx->opcode)); 4771 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4772 tcg_temp_free(t0); 4773 #endif /* defined(CONFIG_USER_ONLY) */ 4774 } 4775 4776 /* mtsrin */ 4777 static void gen_mtsrin_64b(DisasContext *ctx) 4778 { 4779 #if defined(CONFIG_USER_ONLY) 4780 GEN_PRIV; 4781 #else 4782 TCGv t0; 4783 4784 CHK_SV; 4785 t0 = tcg_temp_new(); 4786 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4787 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4788 tcg_temp_free(t0); 4789 #endif /* defined(CONFIG_USER_ONLY) */ 4790 } 4791 4792 /* slbmte */ 4793 static void gen_slbmte(DisasContext *ctx) 4794 { 4795 #if defined(CONFIG_USER_ONLY) 4796 GEN_PRIV; 4797 #else 4798 CHK_SV; 4799 4800 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4801 cpu_gpr[rS(ctx->opcode)]); 4802 #endif /* defined(CONFIG_USER_ONLY) */ 4803 } 4804 4805 static void gen_slbmfee(DisasContext *ctx) 4806 { 4807 #if defined(CONFIG_USER_ONLY) 4808 GEN_PRIV; 4809 #else 4810 CHK_SV; 4811 4812 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4813 cpu_gpr[rB(ctx->opcode)]); 4814 #endif /* defined(CONFIG_USER_ONLY) */ 4815 } 4816 4817 static void gen_slbmfev(DisasContext *ctx) 4818 { 4819 #if defined(CONFIG_USER_ONLY) 4820 GEN_PRIV; 4821 #else 4822 CHK_SV; 4823 4824 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4825 cpu_gpr[rB(ctx->opcode)]); 4826 #endif /* defined(CONFIG_USER_ONLY) */ 4827 } 4828 4829 static void gen_slbfee_(DisasContext *ctx) 4830 { 4831 #if defined(CONFIG_USER_ONLY) 4832 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4833 #else 4834 TCGLabel *l1, *l2; 4835 4836 if (unlikely(ctx->pr)) { 4837 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4838 return; 4839 } 4840 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4841 cpu_gpr[rB(ctx->opcode)]); 4842 l1 = gen_new_label(); 4843 l2 = gen_new_label(); 4844 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4845 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4846 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4847 tcg_gen_br(l2); 4848 gen_set_label(l1); 4849 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4850 gen_set_label(l2); 4851 #endif 4852 } 4853 #endif /* defined(TARGET_PPC64) */ 4854 4855 /*** Lookaside buffer management ***/ 4856 /* Optional & supervisor only: */ 4857 4858 /* tlbia */ 4859 static void gen_tlbia(DisasContext *ctx) 4860 { 4861 #if defined(CONFIG_USER_ONLY) 4862 GEN_PRIV; 4863 #else 4864 CHK_HV; 4865 4866 gen_helper_tlbia(cpu_env); 4867 #endif /* defined(CONFIG_USER_ONLY) */ 4868 } 4869 4870 /* tlbiel */ 4871 static void gen_tlbiel(DisasContext *ctx) 4872 { 4873 #if defined(CONFIG_USER_ONLY) 4874 GEN_PRIV; 4875 #else 4876 CHK_SV; 4877 4878 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4879 #endif /* defined(CONFIG_USER_ONLY) */ 4880 } 4881 4882 /* tlbie */ 4883 static void gen_tlbie(DisasContext *ctx) 4884 { 4885 #if defined(CONFIG_USER_ONLY) 4886 GEN_PRIV; 4887 #else 4888 TCGv_i32 t1; 4889 4890 if (ctx->gtse) { 4891 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */ 4892 } else { 4893 CHK_HV; /* Else hypervisor privileged */ 4894 } 4895 4896 if (NARROW_MODE(ctx)) { 4897 TCGv t0 = tcg_temp_new(); 4898 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4899 gen_helper_tlbie(cpu_env, t0); 4900 tcg_temp_free(t0); 4901 } else { 4902 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4903 } 4904 t1 = tcg_temp_new_i32(); 4905 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4906 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4907 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4908 tcg_temp_free_i32(t1); 4909 #endif /* defined(CONFIG_USER_ONLY) */ 4910 } 4911 4912 /* tlbsync */ 4913 static void gen_tlbsync(DisasContext *ctx) 4914 { 4915 #if defined(CONFIG_USER_ONLY) 4916 GEN_PRIV; 4917 #else 4918 4919 if (ctx->gtse) { 4920 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 4921 } else { 4922 CHK_HV; /* Else hypervisor privileged */ 4923 } 4924 4925 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4926 if (ctx->insns_flags & PPC_BOOKE) { 4927 gen_check_tlb_flush(ctx, true); 4928 } 4929 #endif /* defined(CONFIG_USER_ONLY) */ 4930 } 4931 4932 #if defined(TARGET_PPC64) 4933 /* slbia */ 4934 static void gen_slbia(DisasContext *ctx) 4935 { 4936 #if defined(CONFIG_USER_ONLY) 4937 GEN_PRIV; 4938 #else 4939 CHK_SV; 4940 4941 gen_helper_slbia(cpu_env); 4942 #endif /* defined(CONFIG_USER_ONLY) */ 4943 } 4944 4945 /* slbie */ 4946 static void gen_slbie(DisasContext *ctx) 4947 { 4948 #if defined(CONFIG_USER_ONLY) 4949 GEN_PRIV; 4950 #else 4951 CHK_SV; 4952 4953 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4954 #endif /* defined(CONFIG_USER_ONLY) */ 4955 } 4956 4957 /* slbieg */ 4958 static void gen_slbieg(DisasContext *ctx) 4959 { 4960 #if defined(CONFIG_USER_ONLY) 4961 GEN_PRIV; 4962 #else 4963 CHK_SV; 4964 4965 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4966 #endif /* defined(CONFIG_USER_ONLY) */ 4967 } 4968 4969 /* slbsync */ 4970 static void gen_slbsync(DisasContext *ctx) 4971 { 4972 #if defined(CONFIG_USER_ONLY) 4973 GEN_PRIV; 4974 #else 4975 CHK_SV; 4976 gen_check_tlb_flush(ctx, true); 4977 #endif /* defined(CONFIG_USER_ONLY) */ 4978 } 4979 4980 #endif /* defined(TARGET_PPC64) */ 4981 4982 /*** External control ***/ 4983 /* Optional: */ 4984 4985 /* eciwx */ 4986 static void gen_eciwx(DisasContext *ctx) 4987 { 4988 TCGv t0; 4989 /* Should check EAR[E] ! */ 4990 gen_set_access_type(ctx, ACCESS_EXT); 4991 t0 = tcg_temp_new(); 4992 gen_addr_reg_index(ctx, t0); 4993 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4994 DEF_MEMOP(MO_UL | MO_ALIGN)); 4995 tcg_temp_free(t0); 4996 } 4997 4998 /* ecowx */ 4999 static void gen_ecowx(DisasContext *ctx) 5000 { 5001 TCGv t0; 5002 /* Should check EAR[E] ! */ 5003 gen_set_access_type(ctx, ACCESS_EXT); 5004 t0 = tcg_temp_new(); 5005 gen_addr_reg_index(ctx, t0); 5006 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5007 DEF_MEMOP(MO_UL | MO_ALIGN)); 5008 tcg_temp_free(t0); 5009 } 5010 5011 /* PowerPC 601 specific instructions */ 5012 5013 /* abs - abs. */ 5014 static void gen_abs(DisasContext *ctx) 5015 { 5016 TCGLabel *l1 = gen_new_label(); 5017 TCGLabel *l2 = gen_new_label(); 5018 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 5019 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5020 tcg_gen_br(l2); 5021 gen_set_label(l1); 5022 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5023 gen_set_label(l2); 5024 if (unlikely(Rc(ctx->opcode) != 0)) 5025 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5026 } 5027 5028 /* abso - abso. */ 5029 static void gen_abso(DisasContext *ctx) 5030 { 5031 TCGLabel *l1 = gen_new_label(); 5032 TCGLabel *l2 = gen_new_label(); 5033 TCGLabel *l3 = gen_new_label(); 5034 /* Start with XER OV disabled, the most likely case */ 5035 tcg_gen_movi_tl(cpu_ov, 0); 5036 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 5037 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 5038 tcg_gen_movi_tl(cpu_ov, 1); 5039 tcg_gen_movi_tl(cpu_so, 1); 5040 tcg_gen_br(l2); 5041 gen_set_label(l1); 5042 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5043 tcg_gen_br(l3); 5044 gen_set_label(l2); 5045 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5046 gen_set_label(l3); 5047 if (unlikely(Rc(ctx->opcode) != 0)) 5048 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5049 } 5050 5051 /* clcs */ 5052 static void gen_clcs(DisasContext *ctx) 5053 { 5054 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5055 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5056 tcg_temp_free_i32(t0); 5057 /* Rc=1 sets CR0 to an undefined state */ 5058 } 5059 5060 /* div - div. */ 5061 static void gen_div(DisasContext *ctx) 5062 { 5063 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5064 cpu_gpr[rB(ctx->opcode)]); 5065 if (unlikely(Rc(ctx->opcode) != 0)) 5066 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5067 } 5068 5069 /* divo - divo. */ 5070 static void gen_divo(DisasContext *ctx) 5071 { 5072 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5073 cpu_gpr[rB(ctx->opcode)]); 5074 if (unlikely(Rc(ctx->opcode) != 0)) 5075 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5076 } 5077 5078 /* divs - divs. */ 5079 static void gen_divs(DisasContext *ctx) 5080 { 5081 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5082 cpu_gpr[rB(ctx->opcode)]); 5083 if (unlikely(Rc(ctx->opcode) != 0)) 5084 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5085 } 5086 5087 /* divso - divso. */ 5088 static void gen_divso(DisasContext *ctx) 5089 { 5090 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5091 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5092 if (unlikely(Rc(ctx->opcode) != 0)) 5093 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5094 } 5095 5096 /* doz - doz. */ 5097 static void gen_doz(DisasContext *ctx) 5098 { 5099 TCGLabel *l1 = gen_new_label(); 5100 TCGLabel *l2 = gen_new_label(); 5101 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5102 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5103 tcg_gen_br(l2); 5104 gen_set_label(l1); 5105 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5106 gen_set_label(l2); 5107 if (unlikely(Rc(ctx->opcode) != 0)) 5108 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5109 } 5110 5111 /* dozo - dozo. */ 5112 static void gen_dozo(DisasContext *ctx) 5113 { 5114 TCGLabel *l1 = gen_new_label(); 5115 TCGLabel *l2 = gen_new_label(); 5116 TCGv t0 = tcg_temp_new(); 5117 TCGv t1 = tcg_temp_new(); 5118 TCGv t2 = tcg_temp_new(); 5119 /* Start with XER OV disabled, the most likely case */ 5120 tcg_gen_movi_tl(cpu_ov, 0); 5121 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5122 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5123 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5124 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5125 tcg_gen_andc_tl(t1, t1, t2); 5126 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5127 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5128 tcg_gen_movi_tl(cpu_ov, 1); 5129 tcg_gen_movi_tl(cpu_so, 1); 5130 tcg_gen_br(l2); 5131 gen_set_label(l1); 5132 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5133 gen_set_label(l2); 5134 tcg_temp_free(t0); 5135 tcg_temp_free(t1); 5136 tcg_temp_free(t2); 5137 if (unlikely(Rc(ctx->opcode) != 0)) 5138 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5139 } 5140 5141 /* dozi */ 5142 static void gen_dozi(DisasContext *ctx) 5143 { 5144 target_long simm = SIMM(ctx->opcode); 5145 TCGLabel *l1 = gen_new_label(); 5146 TCGLabel *l2 = gen_new_label(); 5147 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5148 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5149 tcg_gen_br(l2); 5150 gen_set_label(l1); 5151 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5152 gen_set_label(l2); 5153 if (unlikely(Rc(ctx->opcode) != 0)) 5154 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5155 } 5156 5157 /* lscbx - lscbx. */ 5158 static void gen_lscbx(DisasContext *ctx) 5159 { 5160 TCGv t0 = tcg_temp_new(); 5161 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5162 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5163 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5164 5165 gen_addr_reg_index(ctx, t0); 5166 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5167 tcg_temp_free_i32(t1); 5168 tcg_temp_free_i32(t2); 5169 tcg_temp_free_i32(t3); 5170 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5171 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5172 if (unlikely(Rc(ctx->opcode) != 0)) 5173 gen_set_Rc0(ctx, t0); 5174 tcg_temp_free(t0); 5175 } 5176 5177 /* maskg - maskg. */ 5178 static void gen_maskg(DisasContext *ctx) 5179 { 5180 TCGLabel *l1 = gen_new_label(); 5181 TCGv t0 = tcg_temp_new(); 5182 TCGv t1 = tcg_temp_new(); 5183 TCGv t2 = tcg_temp_new(); 5184 TCGv t3 = tcg_temp_new(); 5185 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5186 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5187 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5188 tcg_gen_addi_tl(t2, t0, 1); 5189 tcg_gen_shr_tl(t2, t3, t2); 5190 tcg_gen_shr_tl(t3, t3, t1); 5191 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5192 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5193 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5194 gen_set_label(l1); 5195 tcg_temp_free(t0); 5196 tcg_temp_free(t1); 5197 tcg_temp_free(t2); 5198 tcg_temp_free(t3); 5199 if (unlikely(Rc(ctx->opcode) != 0)) 5200 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5201 } 5202 5203 /* maskir - maskir. */ 5204 static void gen_maskir(DisasContext *ctx) 5205 { 5206 TCGv t0 = tcg_temp_new(); 5207 TCGv t1 = tcg_temp_new(); 5208 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5209 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5210 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5211 tcg_temp_free(t0); 5212 tcg_temp_free(t1); 5213 if (unlikely(Rc(ctx->opcode) != 0)) 5214 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5215 } 5216 5217 /* mul - mul. */ 5218 static void gen_mul(DisasContext *ctx) 5219 { 5220 TCGv_i64 t0 = tcg_temp_new_i64(); 5221 TCGv_i64 t1 = tcg_temp_new_i64(); 5222 TCGv t2 = tcg_temp_new(); 5223 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5224 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5225 tcg_gen_mul_i64(t0, t0, t1); 5226 tcg_gen_trunc_i64_tl(t2, t0); 5227 gen_store_spr(SPR_MQ, t2); 5228 tcg_gen_shri_i64(t1, t0, 32); 5229 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5230 tcg_temp_free_i64(t0); 5231 tcg_temp_free_i64(t1); 5232 tcg_temp_free(t2); 5233 if (unlikely(Rc(ctx->opcode) != 0)) 5234 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5235 } 5236 5237 /* mulo - mulo. */ 5238 static void gen_mulo(DisasContext *ctx) 5239 { 5240 TCGLabel *l1 = gen_new_label(); 5241 TCGv_i64 t0 = tcg_temp_new_i64(); 5242 TCGv_i64 t1 = tcg_temp_new_i64(); 5243 TCGv t2 = tcg_temp_new(); 5244 /* Start with XER OV disabled, the most likely case */ 5245 tcg_gen_movi_tl(cpu_ov, 0); 5246 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5247 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5248 tcg_gen_mul_i64(t0, t0, t1); 5249 tcg_gen_trunc_i64_tl(t2, t0); 5250 gen_store_spr(SPR_MQ, t2); 5251 tcg_gen_shri_i64(t1, t0, 32); 5252 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5253 tcg_gen_ext32s_i64(t1, t0); 5254 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5255 tcg_gen_movi_tl(cpu_ov, 1); 5256 tcg_gen_movi_tl(cpu_so, 1); 5257 gen_set_label(l1); 5258 tcg_temp_free_i64(t0); 5259 tcg_temp_free_i64(t1); 5260 tcg_temp_free(t2); 5261 if (unlikely(Rc(ctx->opcode) != 0)) 5262 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5263 } 5264 5265 /* nabs - nabs. */ 5266 static void gen_nabs(DisasContext *ctx) 5267 { 5268 TCGLabel *l1 = gen_new_label(); 5269 TCGLabel *l2 = gen_new_label(); 5270 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5271 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5272 tcg_gen_br(l2); 5273 gen_set_label(l1); 5274 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5275 gen_set_label(l2); 5276 if (unlikely(Rc(ctx->opcode) != 0)) 5277 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5278 } 5279 5280 /* nabso - nabso. */ 5281 static void gen_nabso(DisasContext *ctx) 5282 { 5283 TCGLabel *l1 = gen_new_label(); 5284 TCGLabel *l2 = gen_new_label(); 5285 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5286 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5287 tcg_gen_br(l2); 5288 gen_set_label(l1); 5289 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5290 gen_set_label(l2); 5291 /* nabs never overflows */ 5292 tcg_gen_movi_tl(cpu_ov, 0); 5293 if (unlikely(Rc(ctx->opcode) != 0)) 5294 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5295 } 5296 5297 /* rlmi - rlmi. */ 5298 static void gen_rlmi(DisasContext *ctx) 5299 { 5300 uint32_t mb = MB(ctx->opcode); 5301 uint32_t me = ME(ctx->opcode); 5302 TCGv t0 = tcg_temp_new(); 5303 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5304 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5305 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5306 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 5307 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5308 tcg_temp_free(t0); 5309 if (unlikely(Rc(ctx->opcode) != 0)) 5310 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5311 } 5312 5313 /* rrib - rrib. */ 5314 static void gen_rrib(DisasContext *ctx) 5315 { 5316 TCGv t0 = tcg_temp_new(); 5317 TCGv t1 = tcg_temp_new(); 5318 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5319 tcg_gen_movi_tl(t1, 0x80000000); 5320 tcg_gen_shr_tl(t1, t1, t0); 5321 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5322 tcg_gen_and_tl(t0, t0, t1); 5323 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5324 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5325 tcg_temp_free(t0); 5326 tcg_temp_free(t1); 5327 if (unlikely(Rc(ctx->opcode) != 0)) 5328 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5329 } 5330 5331 /* sle - sle. */ 5332 static void gen_sle(DisasContext *ctx) 5333 { 5334 TCGv t0 = tcg_temp_new(); 5335 TCGv t1 = tcg_temp_new(); 5336 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5337 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5338 tcg_gen_subfi_tl(t1, 32, t1); 5339 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5340 tcg_gen_or_tl(t1, t0, t1); 5341 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5342 gen_store_spr(SPR_MQ, t1); 5343 tcg_temp_free(t0); 5344 tcg_temp_free(t1); 5345 if (unlikely(Rc(ctx->opcode) != 0)) 5346 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5347 } 5348 5349 /* sleq - sleq. */ 5350 static void gen_sleq(DisasContext *ctx) 5351 { 5352 TCGv t0 = tcg_temp_new(); 5353 TCGv t1 = tcg_temp_new(); 5354 TCGv t2 = tcg_temp_new(); 5355 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5356 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 5357 tcg_gen_shl_tl(t2, t2, t0); 5358 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5359 gen_load_spr(t1, SPR_MQ); 5360 gen_store_spr(SPR_MQ, t0); 5361 tcg_gen_and_tl(t0, t0, t2); 5362 tcg_gen_andc_tl(t1, t1, t2); 5363 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5364 tcg_temp_free(t0); 5365 tcg_temp_free(t1); 5366 tcg_temp_free(t2); 5367 if (unlikely(Rc(ctx->opcode) != 0)) 5368 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5369 } 5370 5371 /* sliq - sliq. */ 5372 static void gen_sliq(DisasContext *ctx) 5373 { 5374 int sh = SH(ctx->opcode); 5375 TCGv t0 = tcg_temp_new(); 5376 TCGv t1 = tcg_temp_new(); 5377 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5378 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5379 tcg_gen_or_tl(t1, t0, t1); 5380 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5381 gen_store_spr(SPR_MQ, t1); 5382 tcg_temp_free(t0); 5383 tcg_temp_free(t1); 5384 if (unlikely(Rc(ctx->opcode) != 0)) 5385 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5386 } 5387 5388 /* slliq - slliq. */ 5389 static void gen_slliq(DisasContext *ctx) 5390 { 5391 int sh = SH(ctx->opcode); 5392 TCGv t0 = tcg_temp_new(); 5393 TCGv t1 = tcg_temp_new(); 5394 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5395 gen_load_spr(t1, SPR_MQ); 5396 gen_store_spr(SPR_MQ, t0); 5397 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 5398 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 5399 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5400 tcg_temp_free(t0); 5401 tcg_temp_free(t1); 5402 if (unlikely(Rc(ctx->opcode) != 0)) 5403 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5404 } 5405 5406 /* sllq - sllq. */ 5407 static void gen_sllq(DisasContext *ctx) 5408 { 5409 TCGLabel *l1 = gen_new_label(); 5410 TCGLabel *l2 = gen_new_label(); 5411 TCGv t0 = tcg_temp_local_new(); 5412 TCGv t1 = tcg_temp_local_new(); 5413 TCGv t2 = tcg_temp_local_new(); 5414 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5415 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5416 tcg_gen_shl_tl(t1, t1, t2); 5417 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5418 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5419 gen_load_spr(t0, SPR_MQ); 5420 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5421 tcg_gen_br(l2); 5422 gen_set_label(l1); 5423 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5424 gen_load_spr(t2, SPR_MQ); 5425 tcg_gen_andc_tl(t1, t2, t1); 5426 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5427 gen_set_label(l2); 5428 tcg_temp_free(t0); 5429 tcg_temp_free(t1); 5430 tcg_temp_free(t2); 5431 if (unlikely(Rc(ctx->opcode) != 0)) 5432 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5433 } 5434 5435 /* slq - slq. */ 5436 static void gen_slq(DisasContext *ctx) 5437 { 5438 TCGLabel *l1 = gen_new_label(); 5439 TCGv t0 = tcg_temp_new(); 5440 TCGv t1 = tcg_temp_new(); 5441 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5442 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5443 tcg_gen_subfi_tl(t1, 32, t1); 5444 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5445 tcg_gen_or_tl(t1, t0, t1); 5446 gen_store_spr(SPR_MQ, t1); 5447 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5448 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5449 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5450 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5451 gen_set_label(l1); 5452 tcg_temp_free(t0); 5453 tcg_temp_free(t1); 5454 if (unlikely(Rc(ctx->opcode) != 0)) 5455 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5456 } 5457 5458 /* sraiq - sraiq. */ 5459 static void gen_sraiq(DisasContext *ctx) 5460 { 5461 int sh = SH(ctx->opcode); 5462 TCGLabel *l1 = gen_new_label(); 5463 TCGv t0 = tcg_temp_new(); 5464 TCGv t1 = tcg_temp_new(); 5465 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5466 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5467 tcg_gen_or_tl(t0, t0, t1); 5468 gen_store_spr(SPR_MQ, t0); 5469 tcg_gen_movi_tl(cpu_ca, 0); 5470 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5471 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5472 tcg_gen_movi_tl(cpu_ca, 1); 5473 gen_set_label(l1); 5474 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5475 tcg_temp_free(t0); 5476 tcg_temp_free(t1); 5477 if (unlikely(Rc(ctx->opcode) != 0)) 5478 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5479 } 5480 5481 /* sraq - sraq. */ 5482 static void gen_sraq(DisasContext *ctx) 5483 { 5484 TCGLabel *l1 = gen_new_label(); 5485 TCGLabel *l2 = gen_new_label(); 5486 TCGv t0 = tcg_temp_new(); 5487 TCGv t1 = tcg_temp_local_new(); 5488 TCGv t2 = tcg_temp_local_new(); 5489 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5490 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5491 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5492 tcg_gen_subfi_tl(t2, 32, t2); 5493 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5494 tcg_gen_or_tl(t0, t0, t2); 5495 gen_store_spr(SPR_MQ, t0); 5496 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5497 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5498 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5499 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5500 gen_set_label(l1); 5501 tcg_temp_free(t0); 5502 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5503 tcg_gen_movi_tl(cpu_ca, 0); 5504 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5505 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5506 tcg_gen_movi_tl(cpu_ca, 1); 5507 gen_set_label(l2); 5508 tcg_temp_free(t1); 5509 tcg_temp_free(t2); 5510 if (unlikely(Rc(ctx->opcode) != 0)) 5511 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5512 } 5513 5514 /* sre - sre. */ 5515 static void gen_sre(DisasContext *ctx) 5516 { 5517 TCGv t0 = tcg_temp_new(); 5518 TCGv t1 = tcg_temp_new(); 5519 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5520 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5521 tcg_gen_subfi_tl(t1, 32, t1); 5522 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5523 tcg_gen_or_tl(t1, t0, t1); 5524 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5525 gen_store_spr(SPR_MQ, t1); 5526 tcg_temp_free(t0); 5527 tcg_temp_free(t1); 5528 if (unlikely(Rc(ctx->opcode) != 0)) 5529 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5530 } 5531 5532 /* srea - srea. */ 5533 static void gen_srea(DisasContext *ctx) 5534 { 5535 TCGv t0 = tcg_temp_new(); 5536 TCGv t1 = tcg_temp_new(); 5537 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5538 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5539 gen_store_spr(SPR_MQ, t0); 5540 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5541 tcg_temp_free(t0); 5542 tcg_temp_free(t1); 5543 if (unlikely(Rc(ctx->opcode) != 0)) 5544 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5545 } 5546 5547 /* sreq */ 5548 static void gen_sreq(DisasContext *ctx) 5549 { 5550 TCGv t0 = tcg_temp_new(); 5551 TCGv t1 = tcg_temp_new(); 5552 TCGv t2 = tcg_temp_new(); 5553 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5554 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5555 tcg_gen_shr_tl(t1, t1, t0); 5556 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5557 gen_load_spr(t2, SPR_MQ); 5558 gen_store_spr(SPR_MQ, t0); 5559 tcg_gen_and_tl(t0, t0, t1); 5560 tcg_gen_andc_tl(t2, t2, t1); 5561 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5562 tcg_temp_free(t0); 5563 tcg_temp_free(t1); 5564 tcg_temp_free(t2); 5565 if (unlikely(Rc(ctx->opcode) != 0)) 5566 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5567 } 5568 5569 /* sriq */ 5570 static void gen_sriq(DisasContext *ctx) 5571 { 5572 int sh = SH(ctx->opcode); 5573 TCGv t0 = tcg_temp_new(); 5574 TCGv t1 = tcg_temp_new(); 5575 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5576 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5577 tcg_gen_or_tl(t1, t0, t1); 5578 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5579 gen_store_spr(SPR_MQ, t1); 5580 tcg_temp_free(t0); 5581 tcg_temp_free(t1); 5582 if (unlikely(Rc(ctx->opcode) != 0)) 5583 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5584 } 5585 5586 /* srliq */ 5587 static void gen_srliq(DisasContext *ctx) 5588 { 5589 int sh = SH(ctx->opcode); 5590 TCGv t0 = tcg_temp_new(); 5591 TCGv t1 = tcg_temp_new(); 5592 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5593 gen_load_spr(t1, SPR_MQ); 5594 gen_store_spr(SPR_MQ, t0); 5595 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5596 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5597 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5598 tcg_temp_free(t0); 5599 tcg_temp_free(t1); 5600 if (unlikely(Rc(ctx->opcode) != 0)) 5601 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5602 } 5603 5604 /* srlq */ 5605 static void gen_srlq(DisasContext *ctx) 5606 { 5607 TCGLabel *l1 = gen_new_label(); 5608 TCGLabel *l2 = gen_new_label(); 5609 TCGv t0 = tcg_temp_local_new(); 5610 TCGv t1 = tcg_temp_local_new(); 5611 TCGv t2 = tcg_temp_local_new(); 5612 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5613 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5614 tcg_gen_shr_tl(t2, t1, t2); 5615 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5616 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5617 gen_load_spr(t0, SPR_MQ); 5618 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5619 tcg_gen_br(l2); 5620 gen_set_label(l1); 5621 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5622 tcg_gen_and_tl(t0, t0, t2); 5623 gen_load_spr(t1, SPR_MQ); 5624 tcg_gen_andc_tl(t1, t1, t2); 5625 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5626 gen_set_label(l2); 5627 tcg_temp_free(t0); 5628 tcg_temp_free(t1); 5629 tcg_temp_free(t2); 5630 if (unlikely(Rc(ctx->opcode) != 0)) 5631 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5632 } 5633 5634 /* srq */ 5635 static void gen_srq(DisasContext *ctx) 5636 { 5637 TCGLabel *l1 = gen_new_label(); 5638 TCGv t0 = tcg_temp_new(); 5639 TCGv t1 = tcg_temp_new(); 5640 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5641 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5642 tcg_gen_subfi_tl(t1, 32, t1); 5643 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5644 tcg_gen_or_tl(t1, t0, t1); 5645 gen_store_spr(SPR_MQ, t1); 5646 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5647 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5648 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5649 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5650 gen_set_label(l1); 5651 tcg_temp_free(t0); 5652 tcg_temp_free(t1); 5653 if (unlikely(Rc(ctx->opcode) != 0)) 5654 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5655 } 5656 5657 /* PowerPC 602 specific instructions */ 5658 5659 /* dsa */ 5660 static void gen_dsa(DisasContext *ctx) 5661 { 5662 /* XXX: TODO */ 5663 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5664 } 5665 5666 /* esa */ 5667 static void gen_esa(DisasContext *ctx) 5668 { 5669 /* XXX: TODO */ 5670 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5671 } 5672 5673 /* mfrom */ 5674 static void gen_mfrom(DisasContext *ctx) 5675 { 5676 #if defined(CONFIG_USER_ONLY) 5677 GEN_PRIV; 5678 #else 5679 CHK_SV; 5680 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5681 #endif /* defined(CONFIG_USER_ONLY) */ 5682 } 5683 5684 /* 602 - 603 - G2 TLB management */ 5685 5686 /* tlbld */ 5687 static void gen_tlbld_6xx(DisasContext *ctx) 5688 { 5689 #if defined(CONFIG_USER_ONLY) 5690 GEN_PRIV; 5691 #else 5692 CHK_SV; 5693 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5694 #endif /* defined(CONFIG_USER_ONLY) */ 5695 } 5696 5697 /* tlbli */ 5698 static void gen_tlbli_6xx(DisasContext *ctx) 5699 { 5700 #if defined(CONFIG_USER_ONLY) 5701 GEN_PRIV; 5702 #else 5703 CHK_SV; 5704 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5705 #endif /* defined(CONFIG_USER_ONLY) */ 5706 } 5707 5708 /* 74xx TLB management */ 5709 5710 /* tlbld */ 5711 static void gen_tlbld_74xx(DisasContext *ctx) 5712 { 5713 #if defined(CONFIG_USER_ONLY) 5714 GEN_PRIV; 5715 #else 5716 CHK_SV; 5717 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5718 #endif /* defined(CONFIG_USER_ONLY) */ 5719 } 5720 5721 /* tlbli */ 5722 static void gen_tlbli_74xx(DisasContext *ctx) 5723 { 5724 #if defined(CONFIG_USER_ONLY) 5725 GEN_PRIV; 5726 #else 5727 CHK_SV; 5728 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5729 #endif /* defined(CONFIG_USER_ONLY) */ 5730 } 5731 5732 /* POWER instructions not in PowerPC 601 */ 5733 5734 /* clf */ 5735 static void gen_clf(DisasContext *ctx) 5736 { 5737 /* Cache line flush: implemented as no-op */ 5738 } 5739 5740 /* cli */ 5741 static void gen_cli(DisasContext *ctx) 5742 { 5743 #if defined(CONFIG_USER_ONLY) 5744 GEN_PRIV; 5745 #else 5746 /* Cache line invalidate: privileged and treated as no-op */ 5747 CHK_SV; 5748 #endif /* defined(CONFIG_USER_ONLY) */ 5749 } 5750 5751 /* dclst */ 5752 static void gen_dclst(DisasContext *ctx) 5753 { 5754 /* Data cache line store: treated as no-op */ 5755 } 5756 5757 static void gen_mfsri(DisasContext *ctx) 5758 { 5759 #if defined(CONFIG_USER_ONLY) 5760 GEN_PRIV; 5761 #else 5762 int ra = rA(ctx->opcode); 5763 int rd = rD(ctx->opcode); 5764 TCGv t0; 5765 5766 CHK_SV; 5767 t0 = tcg_temp_new(); 5768 gen_addr_reg_index(ctx, t0); 5769 tcg_gen_extract_tl(t0, t0, 28, 4); 5770 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5771 tcg_temp_free(t0); 5772 if (ra != 0 && ra != rd) 5773 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5774 #endif /* defined(CONFIG_USER_ONLY) */ 5775 } 5776 5777 static void gen_rac(DisasContext *ctx) 5778 { 5779 #if defined(CONFIG_USER_ONLY) 5780 GEN_PRIV; 5781 #else 5782 TCGv t0; 5783 5784 CHK_SV; 5785 t0 = tcg_temp_new(); 5786 gen_addr_reg_index(ctx, t0); 5787 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5788 tcg_temp_free(t0); 5789 #endif /* defined(CONFIG_USER_ONLY) */ 5790 } 5791 5792 static void gen_rfsvc(DisasContext *ctx) 5793 { 5794 #if defined(CONFIG_USER_ONLY) 5795 GEN_PRIV; 5796 #else 5797 CHK_SV; 5798 5799 gen_helper_rfsvc(cpu_env); 5800 gen_sync_exception(ctx); 5801 #endif /* defined(CONFIG_USER_ONLY) */ 5802 } 5803 5804 /* svc is not implemented for now */ 5805 5806 /* BookE specific instructions */ 5807 5808 /* XXX: not implemented on 440 ? */ 5809 static void gen_mfapidi(DisasContext *ctx) 5810 { 5811 /* XXX: TODO */ 5812 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5813 } 5814 5815 /* XXX: not implemented on 440 ? */ 5816 static void gen_tlbiva(DisasContext *ctx) 5817 { 5818 #if defined(CONFIG_USER_ONLY) 5819 GEN_PRIV; 5820 #else 5821 TCGv t0; 5822 5823 CHK_SV; 5824 t0 = tcg_temp_new(); 5825 gen_addr_reg_index(ctx, t0); 5826 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5827 tcg_temp_free(t0); 5828 #endif /* defined(CONFIG_USER_ONLY) */ 5829 } 5830 5831 /* All 405 MAC instructions are translated here */ 5832 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5833 int ra, int rb, int rt, int Rc) 5834 { 5835 TCGv t0, t1; 5836 5837 t0 = tcg_temp_local_new(); 5838 t1 = tcg_temp_local_new(); 5839 5840 switch (opc3 & 0x0D) { 5841 case 0x05: 5842 /* macchw - macchw. - macchwo - macchwo. */ 5843 /* macchws - macchws. - macchwso - macchwso. */ 5844 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5845 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5846 /* mulchw - mulchw. */ 5847 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5848 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5849 tcg_gen_ext16s_tl(t1, t1); 5850 break; 5851 case 0x04: 5852 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5853 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5854 /* mulchwu - mulchwu. */ 5855 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5856 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5857 tcg_gen_ext16u_tl(t1, t1); 5858 break; 5859 case 0x01: 5860 /* machhw - machhw. - machhwo - machhwo. */ 5861 /* machhws - machhws. - machhwso - machhwso. */ 5862 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5863 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5864 /* mulhhw - mulhhw. */ 5865 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5866 tcg_gen_ext16s_tl(t0, t0); 5867 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5868 tcg_gen_ext16s_tl(t1, t1); 5869 break; 5870 case 0x00: 5871 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5872 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5873 /* mulhhwu - mulhhwu. */ 5874 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5875 tcg_gen_ext16u_tl(t0, t0); 5876 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5877 tcg_gen_ext16u_tl(t1, t1); 5878 break; 5879 case 0x0D: 5880 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5881 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5882 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5883 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5884 /* mullhw - mullhw. */ 5885 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5886 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5887 break; 5888 case 0x0C: 5889 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5890 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5891 /* mullhwu - mullhwu. */ 5892 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5893 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5894 break; 5895 } 5896 if (opc2 & 0x04) { 5897 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5898 tcg_gen_mul_tl(t1, t0, t1); 5899 if (opc2 & 0x02) { 5900 /* nmultiply-and-accumulate (0x0E) */ 5901 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5902 } else { 5903 /* multiply-and-accumulate (0x0C) */ 5904 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5905 } 5906 5907 if (opc3 & 0x12) { 5908 /* Check overflow and/or saturate */ 5909 TCGLabel *l1 = gen_new_label(); 5910 5911 if (opc3 & 0x10) { 5912 /* Start with XER OV disabled, the most likely case */ 5913 tcg_gen_movi_tl(cpu_ov, 0); 5914 } 5915 if (opc3 & 0x01) { 5916 /* Signed */ 5917 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5918 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5919 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5920 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5921 if (opc3 & 0x02) { 5922 /* Saturate */ 5923 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5924 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5925 } 5926 } else { 5927 /* Unsigned */ 5928 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5929 if (opc3 & 0x02) { 5930 /* Saturate */ 5931 tcg_gen_movi_tl(t0, UINT32_MAX); 5932 } 5933 } 5934 if (opc3 & 0x10) { 5935 /* Check overflow */ 5936 tcg_gen_movi_tl(cpu_ov, 1); 5937 tcg_gen_movi_tl(cpu_so, 1); 5938 } 5939 gen_set_label(l1); 5940 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5941 } 5942 } else { 5943 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5944 } 5945 tcg_temp_free(t0); 5946 tcg_temp_free(t1); 5947 if (unlikely(Rc) != 0) { 5948 /* Update Rc0 */ 5949 gen_set_Rc0(ctx, cpu_gpr[rt]); 5950 } 5951 } 5952 5953 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5954 static void glue(gen_, name)(DisasContext *ctx) \ 5955 { \ 5956 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5957 rD(ctx->opcode), Rc(ctx->opcode)); \ 5958 } 5959 5960 /* macchw - macchw. */ 5961 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5962 /* macchwo - macchwo. */ 5963 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5964 /* macchws - macchws. */ 5965 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5966 /* macchwso - macchwso. */ 5967 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5968 /* macchwsu - macchwsu. */ 5969 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5970 /* macchwsuo - macchwsuo. */ 5971 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5972 /* macchwu - macchwu. */ 5973 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5974 /* macchwuo - macchwuo. */ 5975 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5976 /* machhw - machhw. */ 5977 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5978 /* machhwo - machhwo. */ 5979 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5980 /* machhws - machhws. */ 5981 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5982 /* machhwso - machhwso. */ 5983 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5984 /* machhwsu - machhwsu. */ 5985 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5986 /* machhwsuo - machhwsuo. */ 5987 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5988 /* machhwu - machhwu. */ 5989 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5990 /* machhwuo - machhwuo. */ 5991 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5992 /* maclhw - maclhw. */ 5993 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5994 /* maclhwo - maclhwo. */ 5995 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5996 /* maclhws - maclhws. */ 5997 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5998 /* maclhwso - maclhwso. */ 5999 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 6000 /* maclhwu - maclhwu. */ 6001 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 6002 /* maclhwuo - maclhwuo. */ 6003 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 6004 /* maclhwsu - maclhwsu. */ 6005 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 6006 /* maclhwsuo - maclhwsuo. */ 6007 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 6008 /* nmacchw - nmacchw. */ 6009 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 6010 /* nmacchwo - nmacchwo. */ 6011 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 6012 /* nmacchws - nmacchws. */ 6013 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 6014 /* nmacchwso - nmacchwso. */ 6015 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 6016 /* nmachhw - nmachhw. */ 6017 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 6018 /* nmachhwo - nmachhwo. */ 6019 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 6020 /* nmachhws - nmachhws. */ 6021 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 6022 /* nmachhwso - nmachhwso. */ 6023 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 6024 /* nmaclhw - nmaclhw. */ 6025 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 6026 /* nmaclhwo - nmaclhwo. */ 6027 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 6028 /* nmaclhws - nmaclhws. */ 6029 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 6030 /* nmaclhwso - nmaclhwso. */ 6031 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 6032 6033 /* mulchw - mulchw. */ 6034 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 6035 /* mulchwu - mulchwu. */ 6036 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 6037 /* mulhhw - mulhhw. */ 6038 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 6039 /* mulhhwu - mulhhwu. */ 6040 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 6041 /* mullhw - mullhw. */ 6042 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 6043 /* mullhwu - mullhwu. */ 6044 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 6045 6046 /* mfdcr */ 6047 static void gen_mfdcr(DisasContext *ctx) 6048 { 6049 #if defined(CONFIG_USER_ONLY) 6050 GEN_PRIV; 6051 #else 6052 TCGv dcrn; 6053 6054 CHK_SV; 6055 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6056 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6057 tcg_temp_free(dcrn); 6058 #endif /* defined(CONFIG_USER_ONLY) */ 6059 } 6060 6061 /* mtdcr */ 6062 static void gen_mtdcr(DisasContext *ctx) 6063 { 6064 #if defined(CONFIG_USER_ONLY) 6065 GEN_PRIV; 6066 #else 6067 TCGv dcrn; 6068 6069 CHK_SV; 6070 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6071 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6072 tcg_temp_free(dcrn); 6073 #endif /* defined(CONFIG_USER_ONLY) */ 6074 } 6075 6076 /* mfdcrx */ 6077 /* XXX: not implemented on 440 ? */ 6078 static void gen_mfdcrx(DisasContext *ctx) 6079 { 6080 #if defined(CONFIG_USER_ONLY) 6081 GEN_PRIV; 6082 #else 6083 CHK_SV; 6084 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6085 cpu_gpr[rA(ctx->opcode)]); 6086 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6087 #endif /* defined(CONFIG_USER_ONLY) */ 6088 } 6089 6090 /* mtdcrx */ 6091 /* XXX: not implemented on 440 ? */ 6092 static void gen_mtdcrx(DisasContext *ctx) 6093 { 6094 #if defined(CONFIG_USER_ONLY) 6095 GEN_PRIV; 6096 #else 6097 CHK_SV; 6098 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6099 cpu_gpr[rS(ctx->opcode)]); 6100 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6101 #endif /* defined(CONFIG_USER_ONLY) */ 6102 } 6103 6104 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6105 static void gen_mfdcrux(DisasContext *ctx) 6106 { 6107 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6108 cpu_gpr[rA(ctx->opcode)]); 6109 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6110 } 6111 6112 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6113 static void gen_mtdcrux(DisasContext *ctx) 6114 { 6115 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6116 cpu_gpr[rS(ctx->opcode)]); 6117 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6118 } 6119 6120 /* dccci */ 6121 static void gen_dccci(DisasContext *ctx) 6122 { 6123 CHK_SV; 6124 /* interpreted as no-op */ 6125 } 6126 6127 /* dcread */ 6128 static void gen_dcread(DisasContext *ctx) 6129 { 6130 #if defined(CONFIG_USER_ONLY) 6131 GEN_PRIV; 6132 #else 6133 TCGv EA, val; 6134 6135 CHK_SV; 6136 gen_set_access_type(ctx, ACCESS_CACHE); 6137 EA = tcg_temp_new(); 6138 gen_addr_reg_index(ctx, EA); 6139 val = tcg_temp_new(); 6140 gen_qemu_ld32u(ctx, val, EA); 6141 tcg_temp_free(val); 6142 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6143 tcg_temp_free(EA); 6144 #endif /* defined(CONFIG_USER_ONLY) */ 6145 } 6146 6147 /* icbt */ 6148 static void gen_icbt_40x(DisasContext *ctx) 6149 { 6150 /* interpreted as no-op */ 6151 /* XXX: specification say this is treated as a load by the MMU 6152 * but does not generate any exception 6153 */ 6154 } 6155 6156 /* iccci */ 6157 static void gen_iccci(DisasContext *ctx) 6158 { 6159 CHK_SV; 6160 /* interpreted as no-op */ 6161 } 6162 6163 /* icread */ 6164 static void gen_icread(DisasContext *ctx) 6165 { 6166 CHK_SV; 6167 /* interpreted as no-op */ 6168 } 6169 6170 /* rfci (supervisor only) */ 6171 static void gen_rfci_40x(DisasContext *ctx) 6172 { 6173 #if defined(CONFIG_USER_ONLY) 6174 GEN_PRIV; 6175 #else 6176 CHK_SV; 6177 /* Restore CPU state */ 6178 gen_helper_40x_rfci(cpu_env); 6179 gen_sync_exception(ctx); 6180 #endif /* defined(CONFIG_USER_ONLY) */ 6181 } 6182 6183 static void gen_rfci(DisasContext *ctx) 6184 { 6185 #if defined(CONFIG_USER_ONLY) 6186 GEN_PRIV; 6187 #else 6188 CHK_SV; 6189 /* Restore CPU state */ 6190 gen_helper_rfci(cpu_env); 6191 gen_sync_exception(ctx); 6192 #endif /* defined(CONFIG_USER_ONLY) */ 6193 } 6194 6195 /* BookE specific */ 6196 6197 /* XXX: not implemented on 440 ? */ 6198 static void gen_rfdi(DisasContext *ctx) 6199 { 6200 #if defined(CONFIG_USER_ONLY) 6201 GEN_PRIV; 6202 #else 6203 CHK_SV; 6204 /* Restore CPU state */ 6205 gen_helper_rfdi(cpu_env); 6206 gen_sync_exception(ctx); 6207 #endif /* defined(CONFIG_USER_ONLY) */ 6208 } 6209 6210 /* XXX: not implemented on 440 ? */ 6211 static void gen_rfmci(DisasContext *ctx) 6212 { 6213 #if defined(CONFIG_USER_ONLY) 6214 GEN_PRIV; 6215 #else 6216 CHK_SV; 6217 /* Restore CPU state */ 6218 gen_helper_rfmci(cpu_env); 6219 gen_sync_exception(ctx); 6220 #endif /* defined(CONFIG_USER_ONLY) */ 6221 } 6222 6223 /* TLB management - PowerPC 405 implementation */ 6224 6225 /* tlbre */ 6226 static void gen_tlbre_40x(DisasContext *ctx) 6227 { 6228 #if defined(CONFIG_USER_ONLY) 6229 GEN_PRIV; 6230 #else 6231 CHK_SV; 6232 switch (rB(ctx->opcode)) { 6233 case 0: 6234 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6235 cpu_gpr[rA(ctx->opcode)]); 6236 break; 6237 case 1: 6238 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6239 cpu_gpr[rA(ctx->opcode)]); 6240 break; 6241 default: 6242 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6243 break; 6244 } 6245 #endif /* defined(CONFIG_USER_ONLY) */ 6246 } 6247 6248 /* tlbsx - tlbsx. */ 6249 static void gen_tlbsx_40x(DisasContext *ctx) 6250 { 6251 #if defined(CONFIG_USER_ONLY) 6252 GEN_PRIV; 6253 #else 6254 TCGv t0; 6255 6256 CHK_SV; 6257 t0 = tcg_temp_new(); 6258 gen_addr_reg_index(ctx, t0); 6259 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6260 tcg_temp_free(t0); 6261 if (Rc(ctx->opcode)) { 6262 TCGLabel *l1 = gen_new_label(); 6263 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6264 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6265 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6266 gen_set_label(l1); 6267 } 6268 #endif /* defined(CONFIG_USER_ONLY) */ 6269 } 6270 6271 /* tlbwe */ 6272 static void gen_tlbwe_40x(DisasContext *ctx) 6273 { 6274 #if defined(CONFIG_USER_ONLY) 6275 GEN_PRIV; 6276 #else 6277 CHK_SV; 6278 6279 switch (rB(ctx->opcode)) { 6280 case 0: 6281 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6282 cpu_gpr[rS(ctx->opcode)]); 6283 break; 6284 case 1: 6285 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6286 cpu_gpr[rS(ctx->opcode)]); 6287 break; 6288 default: 6289 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6290 break; 6291 } 6292 #endif /* defined(CONFIG_USER_ONLY) */ 6293 } 6294 6295 /* TLB management - PowerPC 440 implementation */ 6296 6297 /* tlbre */ 6298 static void gen_tlbre_440(DisasContext *ctx) 6299 { 6300 #if defined(CONFIG_USER_ONLY) 6301 GEN_PRIV; 6302 #else 6303 CHK_SV; 6304 6305 switch (rB(ctx->opcode)) { 6306 case 0: 6307 case 1: 6308 case 2: 6309 { 6310 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6311 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6312 t0, cpu_gpr[rA(ctx->opcode)]); 6313 tcg_temp_free_i32(t0); 6314 } 6315 break; 6316 default: 6317 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6318 break; 6319 } 6320 #endif /* defined(CONFIG_USER_ONLY) */ 6321 } 6322 6323 /* tlbsx - tlbsx. */ 6324 static void gen_tlbsx_440(DisasContext *ctx) 6325 { 6326 #if defined(CONFIG_USER_ONLY) 6327 GEN_PRIV; 6328 #else 6329 TCGv t0; 6330 6331 CHK_SV; 6332 t0 = tcg_temp_new(); 6333 gen_addr_reg_index(ctx, t0); 6334 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6335 tcg_temp_free(t0); 6336 if (Rc(ctx->opcode)) { 6337 TCGLabel *l1 = gen_new_label(); 6338 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6339 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6340 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6341 gen_set_label(l1); 6342 } 6343 #endif /* defined(CONFIG_USER_ONLY) */ 6344 } 6345 6346 /* tlbwe */ 6347 static void gen_tlbwe_440(DisasContext *ctx) 6348 { 6349 #if defined(CONFIG_USER_ONLY) 6350 GEN_PRIV; 6351 #else 6352 CHK_SV; 6353 switch (rB(ctx->opcode)) { 6354 case 0: 6355 case 1: 6356 case 2: 6357 { 6358 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6359 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 6360 cpu_gpr[rS(ctx->opcode)]); 6361 tcg_temp_free_i32(t0); 6362 } 6363 break; 6364 default: 6365 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6366 break; 6367 } 6368 #endif /* defined(CONFIG_USER_ONLY) */ 6369 } 6370 6371 /* TLB management - PowerPC BookE 2.06 implementation */ 6372 6373 /* tlbre */ 6374 static void gen_tlbre_booke206(DisasContext *ctx) 6375 { 6376 #if defined(CONFIG_USER_ONLY) 6377 GEN_PRIV; 6378 #else 6379 CHK_SV; 6380 gen_helper_booke206_tlbre(cpu_env); 6381 #endif /* defined(CONFIG_USER_ONLY) */ 6382 } 6383 6384 /* tlbsx - tlbsx. */ 6385 static void gen_tlbsx_booke206(DisasContext *ctx) 6386 { 6387 #if defined(CONFIG_USER_ONLY) 6388 GEN_PRIV; 6389 #else 6390 TCGv t0; 6391 6392 CHK_SV; 6393 if (rA(ctx->opcode)) { 6394 t0 = tcg_temp_new(); 6395 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6396 } else { 6397 t0 = tcg_const_tl(0); 6398 } 6399 6400 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6401 gen_helper_booke206_tlbsx(cpu_env, t0); 6402 tcg_temp_free(t0); 6403 #endif /* defined(CONFIG_USER_ONLY) */ 6404 } 6405 6406 /* tlbwe */ 6407 static void gen_tlbwe_booke206(DisasContext *ctx) 6408 { 6409 #if defined(CONFIG_USER_ONLY) 6410 GEN_PRIV; 6411 #else 6412 CHK_SV; 6413 gen_helper_booke206_tlbwe(cpu_env); 6414 #endif /* defined(CONFIG_USER_ONLY) */ 6415 } 6416 6417 static void gen_tlbivax_booke206(DisasContext *ctx) 6418 { 6419 #if defined(CONFIG_USER_ONLY) 6420 GEN_PRIV; 6421 #else 6422 TCGv t0; 6423 6424 CHK_SV; 6425 t0 = tcg_temp_new(); 6426 gen_addr_reg_index(ctx, t0); 6427 gen_helper_booke206_tlbivax(cpu_env, t0); 6428 tcg_temp_free(t0); 6429 #endif /* defined(CONFIG_USER_ONLY) */ 6430 } 6431 6432 static void gen_tlbilx_booke206(DisasContext *ctx) 6433 { 6434 #if defined(CONFIG_USER_ONLY) 6435 GEN_PRIV; 6436 #else 6437 TCGv t0; 6438 6439 CHK_SV; 6440 t0 = tcg_temp_new(); 6441 gen_addr_reg_index(ctx, t0); 6442 6443 switch((ctx->opcode >> 21) & 0x3) { 6444 case 0: 6445 gen_helper_booke206_tlbilx0(cpu_env, t0); 6446 break; 6447 case 1: 6448 gen_helper_booke206_tlbilx1(cpu_env, t0); 6449 break; 6450 case 3: 6451 gen_helper_booke206_tlbilx3(cpu_env, t0); 6452 break; 6453 default: 6454 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6455 break; 6456 } 6457 6458 tcg_temp_free(t0); 6459 #endif /* defined(CONFIG_USER_ONLY) */ 6460 } 6461 6462 6463 /* wrtee */ 6464 static void gen_wrtee(DisasContext *ctx) 6465 { 6466 #if defined(CONFIG_USER_ONLY) 6467 GEN_PRIV; 6468 #else 6469 TCGv t0; 6470 6471 CHK_SV; 6472 t0 = tcg_temp_new(); 6473 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6474 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6475 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6476 tcg_temp_free(t0); 6477 /* Stop translation to have a chance to raise an exception 6478 * if we just set msr_ee to 1 6479 */ 6480 gen_stop_exception(ctx); 6481 #endif /* defined(CONFIG_USER_ONLY) */ 6482 } 6483 6484 /* wrteei */ 6485 static void gen_wrteei(DisasContext *ctx) 6486 { 6487 #if defined(CONFIG_USER_ONLY) 6488 GEN_PRIV; 6489 #else 6490 CHK_SV; 6491 if (ctx->opcode & 0x00008000) { 6492 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6493 /* Stop translation to have a chance to raise an exception */ 6494 gen_stop_exception(ctx); 6495 } else { 6496 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6497 } 6498 #endif /* defined(CONFIG_USER_ONLY) */ 6499 } 6500 6501 /* PowerPC 440 specific instructions */ 6502 6503 /* dlmzb */ 6504 static void gen_dlmzb(DisasContext *ctx) 6505 { 6506 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6507 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6508 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6509 tcg_temp_free_i32(t0); 6510 } 6511 6512 /* mbar replaces eieio on 440 */ 6513 static void gen_mbar(DisasContext *ctx) 6514 { 6515 /* interpreted as no-op */ 6516 } 6517 6518 /* msync replaces sync on 440 */ 6519 static void gen_msync_4xx(DisasContext *ctx) 6520 { 6521 /* Only e500 seems to treat reserved bits as invalid */ 6522 if ((ctx->insns_flags2 & PPC2_BOOKE206) && 6523 (ctx->opcode & 0x03FFF801)) { 6524 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6525 } 6526 /* otherwise interpreted as no-op */ 6527 } 6528 6529 /* icbt */ 6530 static void gen_icbt_440(DisasContext *ctx) 6531 { 6532 /* interpreted as no-op */ 6533 /* XXX: specification say this is treated as a load by the MMU 6534 * but does not generate any exception 6535 */ 6536 } 6537 6538 /* Embedded.Processor Control */ 6539 6540 static void gen_msgclr(DisasContext *ctx) 6541 { 6542 #if defined(CONFIG_USER_ONLY) 6543 GEN_PRIV; 6544 #else 6545 CHK_HV; 6546 if (is_book3s_arch2x(ctx)) { 6547 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6548 } else { 6549 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6550 } 6551 #endif /* defined(CONFIG_USER_ONLY) */ 6552 } 6553 6554 static void gen_msgsnd(DisasContext *ctx) 6555 { 6556 #if defined(CONFIG_USER_ONLY) 6557 GEN_PRIV; 6558 #else 6559 CHK_HV; 6560 if (is_book3s_arch2x(ctx)) { 6561 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6562 } else { 6563 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6564 } 6565 #endif /* defined(CONFIG_USER_ONLY) */ 6566 } 6567 6568 static void gen_msgsync(DisasContext *ctx) 6569 { 6570 #if defined(CONFIG_USER_ONLY) 6571 GEN_PRIV; 6572 #else 6573 CHK_HV; 6574 #endif /* defined(CONFIG_USER_ONLY) */ 6575 /* interpreted as no-op */ 6576 } 6577 6578 #if defined(TARGET_PPC64) 6579 static void gen_maddld(DisasContext *ctx) 6580 { 6581 TCGv_i64 t1 = tcg_temp_new_i64(); 6582 6583 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6584 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6585 tcg_temp_free_i64(t1); 6586 } 6587 6588 /* maddhd maddhdu */ 6589 static void gen_maddhd_maddhdu(DisasContext *ctx) 6590 { 6591 TCGv_i64 lo = tcg_temp_new_i64(); 6592 TCGv_i64 hi = tcg_temp_new_i64(); 6593 TCGv_i64 t1 = tcg_temp_new_i64(); 6594 6595 if (Rc(ctx->opcode)) { 6596 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6597 cpu_gpr[rB(ctx->opcode)]); 6598 tcg_gen_movi_i64(t1, 0); 6599 } else { 6600 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6601 cpu_gpr[rB(ctx->opcode)]); 6602 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6603 } 6604 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6605 cpu_gpr[rC(ctx->opcode)], t1); 6606 tcg_temp_free_i64(lo); 6607 tcg_temp_free_i64(hi); 6608 tcg_temp_free_i64(t1); 6609 } 6610 #endif /* defined(TARGET_PPC64) */ 6611 6612 static void gen_tbegin(DisasContext *ctx) 6613 { 6614 if (unlikely(!ctx->tm_enabled)) { 6615 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6616 return; 6617 } 6618 gen_helper_tbegin(cpu_env); 6619 } 6620 6621 #define GEN_TM_NOOP(name) \ 6622 static inline void gen_##name(DisasContext *ctx) \ 6623 { \ 6624 if (unlikely(!ctx->tm_enabled)) { \ 6625 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6626 return; \ 6627 } \ 6628 /* Because tbegin always fails in QEMU, these user \ 6629 * space instructions all have a simple implementation: \ 6630 * \ 6631 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6632 * = 0b0 || 0b00 || 0b0 \ 6633 */ \ 6634 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6635 } 6636 6637 GEN_TM_NOOP(tend); 6638 GEN_TM_NOOP(tabort); 6639 GEN_TM_NOOP(tabortwc); 6640 GEN_TM_NOOP(tabortwci); 6641 GEN_TM_NOOP(tabortdc); 6642 GEN_TM_NOOP(tabortdci); 6643 GEN_TM_NOOP(tsr); 6644 static inline void gen_cp_abort(DisasContext *ctx) 6645 { 6646 // Do Nothing 6647 } 6648 6649 #define GEN_CP_PASTE_NOOP(name) \ 6650 static inline void gen_##name(DisasContext *ctx) \ 6651 { \ 6652 /* Generate invalid exception until \ 6653 * we have an implementation of the copy \ 6654 * paste facility \ 6655 */ \ 6656 gen_invalid(ctx); \ 6657 } 6658 6659 GEN_CP_PASTE_NOOP(copy) 6660 GEN_CP_PASTE_NOOP(paste) 6661 6662 static void gen_tcheck(DisasContext *ctx) 6663 { 6664 if (unlikely(!ctx->tm_enabled)) { 6665 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6666 return; 6667 } 6668 /* Because tbegin always fails, the tcheck implementation 6669 * is simple: 6670 * 6671 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6672 * = 0b1 || 0b00 || 0b0 6673 */ 6674 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6675 } 6676 6677 #if defined(CONFIG_USER_ONLY) 6678 #define GEN_TM_PRIV_NOOP(name) \ 6679 static inline void gen_##name(DisasContext *ctx) \ 6680 { \ 6681 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6682 } 6683 6684 #else 6685 6686 #define GEN_TM_PRIV_NOOP(name) \ 6687 static inline void gen_##name(DisasContext *ctx) \ 6688 { \ 6689 CHK_SV; \ 6690 if (unlikely(!ctx->tm_enabled)) { \ 6691 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6692 return; \ 6693 } \ 6694 /* Because tbegin always fails, the implementation is \ 6695 * simple: \ 6696 * \ 6697 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6698 * = 0b0 || 0b00 | 0b0 \ 6699 */ \ 6700 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6701 } 6702 6703 #endif 6704 6705 GEN_TM_PRIV_NOOP(treclaim); 6706 GEN_TM_PRIV_NOOP(trechkpt); 6707 6708 static inline void get_fpr(TCGv_i64 dst, int regno) 6709 { 6710 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno)); 6711 } 6712 6713 static inline void set_fpr(int regno, TCGv_i64 src) 6714 { 6715 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno)); 6716 } 6717 6718 static inline void get_avr64(TCGv_i64 dst, int regno, bool high) 6719 { 6720 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high)); 6721 } 6722 6723 static inline void set_avr64(int regno, TCGv_i64 src, bool high) 6724 { 6725 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high)); 6726 } 6727 6728 #include "translate/fp-impl.inc.c" 6729 6730 #include "translate/vmx-impl.inc.c" 6731 6732 #include "translate/vsx-impl.inc.c" 6733 6734 #include "translate/dfp-impl.inc.c" 6735 6736 #include "translate/spe-impl.inc.c" 6737 6738 /* Handles lfdp, lxsd, lxssp */ 6739 static void gen_dform39(DisasContext *ctx) 6740 { 6741 switch (ctx->opcode & 0x3) { 6742 case 0: /* lfdp */ 6743 if (ctx->insns_flags2 & PPC2_ISA205) { 6744 return gen_lfdp(ctx); 6745 } 6746 break; 6747 case 2: /* lxsd */ 6748 if (ctx->insns_flags2 & PPC2_ISA300) { 6749 return gen_lxsd(ctx); 6750 } 6751 break; 6752 case 3: /* lxssp */ 6753 if (ctx->insns_flags2 & PPC2_ISA300) { 6754 return gen_lxssp(ctx); 6755 } 6756 break; 6757 } 6758 return gen_invalid(ctx); 6759 } 6760 6761 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 6762 static void gen_dform3D(DisasContext *ctx) 6763 { 6764 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 6765 switch (ctx->opcode & 0x7) { 6766 case 1: /* lxv */ 6767 if (ctx->insns_flags2 & PPC2_ISA300) { 6768 return gen_lxv(ctx); 6769 } 6770 break; 6771 case 5: /* stxv */ 6772 if (ctx->insns_flags2 & PPC2_ISA300) { 6773 return gen_stxv(ctx); 6774 } 6775 break; 6776 } 6777 } else { /* DS-FORM */ 6778 switch (ctx->opcode & 0x3) { 6779 case 0: /* stfdp */ 6780 if (ctx->insns_flags2 & PPC2_ISA205) { 6781 return gen_stfdp(ctx); 6782 } 6783 break; 6784 case 2: /* stxsd */ 6785 if (ctx->insns_flags2 & PPC2_ISA300) { 6786 return gen_stxsd(ctx); 6787 } 6788 break; 6789 case 3: /* stxssp */ 6790 if (ctx->insns_flags2 & PPC2_ISA300) { 6791 return gen_stxssp(ctx); 6792 } 6793 break; 6794 } 6795 } 6796 return gen_invalid(ctx); 6797 } 6798 6799 static opcode_t opcodes[] = { 6800 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6801 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6802 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6803 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6804 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6805 #if defined(TARGET_PPC64) 6806 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6807 #endif 6808 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6809 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6810 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6811 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6812 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6813 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6814 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6815 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6816 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6817 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6818 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6819 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6820 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6821 #if defined(TARGET_PPC64) 6822 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6823 #endif 6824 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6825 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6826 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6827 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6828 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6829 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6830 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6831 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 6832 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6833 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 6834 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6835 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6836 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6837 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6838 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6839 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6840 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6841 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6842 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6843 #if defined(TARGET_PPC64) 6844 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6845 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6846 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6847 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6848 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6849 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6850 #endif 6851 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6852 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6853 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6854 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6855 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6856 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6857 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6858 #if defined(TARGET_PPC64) 6859 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6860 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6861 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6862 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6863 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6864 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6865 PPC_NONE, PPC2_ISA300), 6866 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6867 PPC_NONE, PPC2_ISA300), 6868 #endif 6869 #if defined(TARGET_PPC64) 6870 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6871 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6872 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6873 #endif 6874 /* handles lfdp, lxsd, lxssp */ 6875 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6876 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 6877 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6878 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6879 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6880 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6881 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6882 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6883 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6884 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 6885 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6886 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6887 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6888 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6889 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 6890 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 6891 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6892 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6893 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6894 #if defined(TARGET_PPC64) 6895 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 6896 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 6897 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6898 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6899 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6900 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6901 #endif 6902 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6903 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6904 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 6905 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6906 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6907 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6908 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6909 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6910 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6911 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6912 #if defined(TARGET_PPC64) 6913 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6914 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6915 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6916 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6917 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6918 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6919 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6920 #endif 6921 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6922 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6923 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6924 #if defined(TARGET_PPC64) 6925 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6926 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6927 #endif 6928 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6929 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6930 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6931 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6932 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6933 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6934 #if defined(TARGET_PPC64) 6935 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6936 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6937 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 6938 #endif 6939 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6940 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6941 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6942 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6943 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6944 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6945 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6946 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6947 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6948 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6949 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6950 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6951 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6952 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6953 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6954 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 6955 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6956 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6957 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6958 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6959 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6960 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6961 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6962 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6963 #if defined(TARGET_PPC64) 6964 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6965 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6966 PPC_SEGMENT_64B), 6967 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6968 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6969 PPC_SEGMENT_64B), 6970 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6971 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6972 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6973 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6974 #endif 6975 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6976 /* XXX Those instructions will need to be handled differently for 6977 * different ISA versions */ 6978 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6979 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6980 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 6981 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 6982 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6983 #if defined(TARGET_PPC64) 6984 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6985 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6986 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 6987 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6988 #endif 6989 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6990 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6991 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6992 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6993 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6994 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6995 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6996 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6997 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6998 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6999 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 7000 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7001 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 7002 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 7003 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 7004 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 7005 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 7006 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 7007 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 7008 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7009 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 7010 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 7011 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 7012 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 7013 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 7014 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 7015 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 7016 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 7017 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 7018 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 7019 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 7020 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 7021 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 7022 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 7023 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 7024 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 7025 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 7026 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 7027 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 7028 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 7029 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 7030 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 7031 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 7032 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 7033 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7034 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7035 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7036 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7037 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7038 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7039 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7040 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7041 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7042 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7043 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7044 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7045 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7046 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7047 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7048 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7049 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7050 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7051 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7052 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7053 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7054 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7055 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7056 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7057 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7058 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7059 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7060 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7061 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7062 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7063 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7064 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7065 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7066 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7067 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7068 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7069 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7070 PPC_NONE, PPC2_BOOKE206), 7071 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7072 PPC_NONE, PPC2_BOOKE206), 7073 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7074 PPC_NONE, PPC2_BOOKE206), 7075 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7076 PPC_NONE, PPC2_BOOKE206), 7077 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7078 PPC_NONE, PPC2_BOOKE206), 7079 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7080 PPC_NONE, PPC2_PRCNTL), 7081 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7082 PPC_NONE, PPC2_PRCNTL), 7083 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7084 PPC_NONE, PPC2_PRCNTL), 7085 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7086 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7087 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7088 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7089 PPC_BOOKE, PPC2_BOOKE206), 7090 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE), 7091 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7092 PPC_BOOKE, PPC2_BOOKE206), 7093 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7094 PPC_440_SPEC), 7095 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7096 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7097 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7098 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7099 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7100 #if defined(TARGET_PPC64) 7101 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7102 PPC2_ISA300), 7103 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7104 #endif 7105 7106 #undef GEN_INT_ARITH_ADD 7107 #undef GEN_INT_ARITH_ADD_CONST 7108 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7109 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7110 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7111 add_ca, compute_ca, compute_ov) \ 7112 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7113 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7114 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7115 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7116 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7117 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7118 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7119 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7120 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7121 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 7122 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7123 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7124 7125 #undef GEN_INT_ARITH_DIVW 7126 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7127 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7128 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7129 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7130 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7131 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7132 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7133 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7134 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7135 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7136 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7137 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7138 7139 #if defined(TARGET_PPC64) 7140 #undef GEN_INT_ARITH_DIVD 7141 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7142 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7143 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7144 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7145 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7146 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7147 7148 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7149 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7150 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7151 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7152 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7153 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7154 7155 #undef GEN_INT_ARITH_MUL_HELPER 7156 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7157 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7158 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7159 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7160 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7161 #endif 7162 7163 #undef GEN_INT_ARITH_SUBF 7164 #undef GEN_INT_ARITH_SUBF_CONST 7165 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7166 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7167 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7168 add_ca, compute_ca, compute_ov) \ 7169 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7170 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7171 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7172 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7173 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7174 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7175 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7176 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7177 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7178 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7179 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7180 7181 #undef GEN_LOGICAL1 7182 #undef GEN_LOGICAL2 7183 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7184 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7185 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7186 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7187 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7188 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7189 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7190 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7191 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7192 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7193 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7194 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 7195 #if defined(TARGET_PPC64) 7196 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 7197 #endif 7198 7199 #if defined(TARGET_PPC64) 7200 #undef GEN_PPC64_R2 7201 #undef GEN_PPC64_R4 7202 #define GEN_PPC64_R2(name, opc1, opc2) \ 7203 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7204 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7205 PPC_64B) 7206 #define GEN_PPC64_R4(name, opc1, opc2) \ 7207 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7208 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 7209 PPC_64B), \ 7210 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7211 PPC_64B), \ 7212 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 7213 PPC_64B) 7214 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 7215 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 7216 GEN_PPC64_R4(rldic, 0x1E, 0x04), 7217 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 7218 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 7219 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 7220 #endif 7221 7222 #undef GEN_LD 7223 #undef GEN_LDU 7224 #undef GEN_LDUX 7225 #undef GEN_LDX_E 7226 #undef GEN_LDS 7227 #define GEN_LD(name, ldop, opc, type) \ 7228 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7229 #define GEN_LDU(name, ldop, opc, type) \ 7230 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 7231 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 7232 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7233 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 7234 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 7235 #define GEN_LDS(name, ldop, op, type) \ 7236 GEN_LD(name, ldop, op | 0x20, type) \ 7237 GEN_LDU(name, ldop, op | 0x21, type) \ 7238 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 7239 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 7240 7241 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 7242 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 7243 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 7244 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 7245 #if defined(TARGET_PPC64) 7246 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 7247 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 7248 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 7249 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 7250 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 7251 7252 /* HV/P7 and later only */ 7253 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 7254 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 7255 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 7256 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 7257 #endif 7258 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 7259 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 7260 7261 /* External PID based load */ 7262 #undef GEN_LDEPX 7263 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 7264 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7265 0x00000001, PPC_NONE, PPC2_BOOKE206), 7266 7267 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 7268 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 7269 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 7270 #if defined(TARGET_PPC64) 7271 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 7272 #endif 7273 7274 #undef GEN_ST 7275 #undef GEN_STU 7276 #undef GEN_STUX 7277 #undef GEN_STX_E 7278 #undef GEN_STS 7279 #define GEN_ST(name, stop, opc, type) \ 7280 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7281 #define GEN_STU(name, stop, opc, type) \ 7282 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 7283 #define GEN_STUX(name, stop, opc2, opc3, type) \ 7284 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7285 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 7286 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 7287 #define GEN_STS(name, stop, op, type) \ 7288 GEN_ST(name, stop, op | 0x20, type) \ 7289 GEN_STU(name, stop, op | 0x21, type) \ 7290 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 7291 GEN_STX(name, stop, 0x17, op | 0x00, type) 7292 7293 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 7294 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 7295 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 7296 #if defined(TARGET_PPC64) 7297 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 7298 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 7299 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 7300 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 7301 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 7302 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 7303 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 7304 #endif 7305 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 7306 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 7307 7308 #undef GEN_STEPX 7309 #define GEN_STEPX(name, ldop, opc2, opc3) \ 7310 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7311 0x00000001, PPC_NONE, PPC2_BOOKE206), 7312 7313 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 7314 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 7315 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 7316 #if defined(TARGET_PPC64) 7317 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04) 7318 #endif 7319 7320 #undef GEN_CRLOGIC 7321 #define GEN_CRLOGIC(name, tcg_op, opc) \ 7322 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 7323 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 7324 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 7325 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 7326 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 7327 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 7328 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 7329 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 7330 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 7331 7332 #undef GEN_MAC_HANDLER 7333 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 7334 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 7335 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 7336 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 7337 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 7338 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 7339 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 7340 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 7341 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 7342 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 7343 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 7344 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 7345 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 7346 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 7347 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 7348 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 7349 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 7350 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 7351 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 7352 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 7353 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 7354 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 7355 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 7356 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 7357 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 7358 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 7359 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 7360 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 7361 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 7362 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 7363 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 7364 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 7365 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 7366 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 7367 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 7368 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 7369 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 7370 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 7371 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 7372 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 7373 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 7374 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 7375 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 7376 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 7377 7378 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 7379 PPC_NONE, PPC2_TM), 7380 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 7381 PPC_NONE, PPC2_TM), 7382 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 7383 PPC_NONE, PPC2_TM), 7384 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 7385 PPC_NONE, PPC2_TM), 7386 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 7387 PPC_NONE, PPC2_TM), 7388 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 7389 PPC_NONE, PPC2_TM), 7390 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 7391 PPC_NONE, PPC2_TM), 7392 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 7393 PPC_NONE, PPC2_TM), 7394 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 7395 PPC_NONE, PPC2_TM), 7396 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 7397 PPC_NONE, PPC2_TM), 7398 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 7399 PPC_NONE, PPC2_TM), 7400 7401 #include "translate/fp-ops.inc.c" 7402 7403 #include "translate/vmx-ops.inc.c" 7404 7405 #include "translate/vsx-ops.inc.c" 7406 7407 #include "translate/dfp-ops.inc.c" 7408 7409 #include "translate/spe-ops.inc.c" 7410 }; 7411 7412 #include "helper_regs.h" 7413 #include "translate_init.inc.c" 7414 7415 /*****************************************************************************/ 7416 /* Misc PowerPC helpers */ 7417 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 7418 int flags) 7419 { 7420 #define RGPL 4 7421 #define RFPL 4 7422 7423 PowerPCCPU *cpu = POWERPC_CPU(cs); 7424 CPUPPCState *env = &cpu->env; 7425 int i; 7426 7427 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 7428 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 7429 env->nip, env->lr, env->ctr, cpu_read_xer(env), 7430 cs->cpu_index); 7431 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 7432 TARGET_FMT_lx " iidx %d didx %d\n", 7433 env->msr, env->spr[SPR_HID0], 7434 env->hflags, env->immu_idx, env->dmmu_idx); 7435 #if !defined(NO_TIMER_DUMP) 7436 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 7437 #if !defined(CONFIG_USER_ONLY) 7438 " DECR " TARGET_FMT_lu 7439 #endif 7440 "\n", 7441 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 7442 #if !defined(CONFIG_USER_ONLY) 7443 , cpu_ppc_load_decr(env) 7444 #endif 7445 ); 7446 #endif 7447 for (i = 0; i < 32; i++) { 7448 if ((i & (RGPL - 1)) == 0) 7449 cpu_fprintf(f, "GPR%02d", i); 7450 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 7451 if ((i & (RGPL - 1)) == (RGPL - 1)) 7452 cpu_fprintf(f, "\n"); 7453 } 7454 cpu_fprintf(f, "CR "); 7455 for (i = 0; i < 8; i++) 7456 cpu_fprintf(f, "%01x", env->crf[i]); 7457 cpu_fprintf(f, " ["); 7458 for (i = 0; i < 8; i++) { 7459 char a = '-'; 7460 if (env->crf[i] & 0x08) 7461 a = 'L'; 7462 else if (env->crf[i] & 0x04) 7463 a = 'G'; 7464 else if (env->crf[i] & 0x02) 7465 a = 'E'; 7466 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 7467 } 7468 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 7469 env->reserve_addr); 7470 7471 if (flags & CPU_DUMP_FPU) { 7472 for (i = 0; i < 32; i++) { 7473 if ((i & (RFPL - 1)) == 0) { 7474 cpu_fprintf(f, "FPR%02d", i); 7475 } 7476 cpu_fprintf(f, " %016" PRIx64, *cpu_fpr_ptr(env, i)); 7477 if ((i & (RFPL - 1)) == (RFPL - 1)) { 7478 cpu_fprintf(f, "\n"); 7479 } 7480 } 7481 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 7482 } 7483 7484 #if !defined(CONFIG_USER_ONLY) 7485 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 7486 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 7487 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 7488 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 7489 7490 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 7491 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 7492 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 7493 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 7494 7495 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 7496 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 7497 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 7498 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 7499 7500 #if defined(TARGET_PPC64) 7501 if (env->excp_model == POWERPC_EXCP_POWER7 || 7502 env->excp_model == POWERPC_EXCP_POWER8 || 7503 env->excp_model == POWERPC_EXCP_POWER9) { 7504 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 7505 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 7506 } 7507 #endif 7508 if (env->excp_model == POWERPC_EXCP_BOOKE) { 7509 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 7510 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 7511 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 7512 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 7513 7514 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 7515 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 7516 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 7517 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 7518 7519 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 7520 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 7521 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 7522 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 7523 7524 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 7525 " EPR " TARGET_FMT_lx "\n", 7526 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 7527 env->spr[SPR_BOOKE_EPR]); 7528 7529 /* FSL-specific */ 7530 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 7531 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 7532 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 7533 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 7534 7535 /* 7536 * IVORs are left out as they are large and do not change often -- 7537 * they can be read with "p $ivor0", "p $ivor1", etc. 7538 */ 7539 } 7540 7541 #if defined(TARGET_PPC64) 7542 if (env->flags & POWERPC_FLAG_CFAR) { 7543 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 7544 } 7545 #endif 7546 7547 if (env->spr_cb[SPR_LPCR].name) 7548 cpu_fprintf(f, " LPCR " TARGET_FMT_lx "\n", env->spr[SPR_LPCR]); 7549 7550 switch (env->mmu_model) { 7551 case POWERPC_MMU_32B: 7552 case POWERPC_MMU_601: 7553 case POWERPC_MMU_SOFT_6xx: 7554 case POWERPC_MMU_SOFT_74xx: 7555 #if defined(TARGET_PPC64) 7556 case POWERPC_MMU_64B: 7557 case POWERPC_MMU_2_03: 7558 case POWERPC_MMU_2_06: 7559 case POWERPC_MMU_2_07: 7560 case POWERPC_MMU_3_00: 7561 #endif 7562 if (env->spr_cb[SPR_SDR1].name) { /* SDR1 Exists */ 7563 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " ", env->spr[SPR_SDR1]); 7564 } 7565 if (env->spr_cb[SPR_PTCR].name) { /* PTCR Exists */ 7566 cpu_fprintf(f, " PTCR " TARGET_FMT_lx " ", env->spr[SPR_PTCR]); 7567 } 7568 cpu_fprintf(f, " DAR " TARGET_FMT_lx " DSISR " TARGET_FMT_lx "\n", 7569 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 7570 break; 7571 case POWERPC_MMU_BOOKE206: 7572 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 7573 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 7574 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 7575 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 7576 7577 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 7578 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 7579 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 7580 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 7581 7582 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 7583 " TLB1CFG " TARGET_FMT_lx "\n", 7584 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 7585 env->spr[SPR_BOOKE_TLB1CFG]); 7586 break; 7587 default: 7588 break; 7589 } 7590 #endif 7591 7592 #undef RGPL 7593 #undef RFPL 7594 } 7595 7596 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 7597 fprintf_function cpu_fprintf, int flags) 7598 { 7599 #if defined(DO_PPC_STATISTICS) 7600 PowerPCCPU *cpu = POWERPC_CPU(cs); 7601 opc_handler_t **t1, **t2, **t3, *handler; 7602 int op1, op2, op3; 7603 7604 t1 = cpu->env.opcodes; 7605 for (op1 = 0; op1 < 64; op1++) { 7606 handler = t1[op1]; 7607 if (is_indirect_opcode(handler)) { 7608 t2 = ind_table(handler); 7609 for (op2 = 0; op2 < 32; op2++) { 7610 handler = t2[op2]; 7611 if (is_indirect_opcode(handler)) { 7612 t3 = ind_table(handler); 7613 for (op3 = 0; op3 < 32; op3++) { 7614 handler = t3[op3]; 7615 if (handler->count == 0) 7616 continue; 7617 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 7618 "%016" PRIx64 " %" PRId64 "\n", 7619 op1, op2, op3, op1, (op3 << 5) | op2, 7620 handler->oname, 7621 handler->count, handler->count); 7622 } 7623 } else { 7624 if (handler->count == 0) 7625 continue; 7626 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 7627 "%016" PRIx64 " %" PRId64 "\n", 7628 op1, op2, op1, op2, handler->oname, 7629 handler->count, handler->count); 7630 } 7631 } 7632 } else { 7633 if (handler->count == 0) 7634 continue; 7635 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 7636 " %" PRId64 "\n", 7637 op1, op1, handler->oname, 7638 handler->count, handler->count); 7639 } 7640 } 7641 #endif 7642 } 7643 7644 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 7645 { 7646 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7647 CPUPPCState *env = cs->env_ptr; 7648 int bound; 7649 7650 ctx->exception = POWERPC_EXCP_NONE; 7651 ctx->spr_cb = env->spr_cb; 7652 ctx->pr = msr_pr; 7653 ctx->mem_idx = env->dmmu_idx; 7654 ctx->dr = msr_dr; 7655 #if !defined(CONFIG_USER_ONLY) 7656 ctx->hv = msr_hv || !env->has_hv_mode; 7657 #endif 7658 ctx->insns_flags = env->insns_flags; 7659 ctx->insns_flags2 = env->insns_flags2; 7660 ctx->access_type = -1; 7661 ctx->need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7662 ctx->le_mode = !!(env->hflags & (1 << MSR_LE)); 7663 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 7664 ctx->flags = env->flags; 7665 #if defined(TARGET_PPC64) 7666 ctx->sf_mode = msr_is_64bit(env, env->msr); 7667 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7668 #endif 7669 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 7670 || env->mmu_model == POWERPC_MMU_601 7671 || (env->mmu_model & POWERPC_MMU_64B); 7672 7673 ctx->fpu_enabled = !!msr_fp; 7674 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7675 ctx->spe_enabled = !!msr_spe; 7676 else 7677 ctx->spe_enabled = false; 7678 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7679 ctx->altivec_enabled = !!msr_vr; 7680 else 7681 ctx->altivec_enabled = false; 7682 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7683 ctx->vsx_enabled = !!msr_vsx; 7684 } else { 7685 ctx->vsx_enabled = false; 7686 } 7687 #if defined(TARGET_PPC64) 7688 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7689 ctx->tm_enabled = !!msr_tm; 7690 } else { 7691 ctx->tm_enabled = false; 7692 } 7693 #endif 7694 ctx->gtse = !!(env->spr[SPR_LPCR] & LPCR_GTSE); 7695 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7696 ctx->singlestep_enabled = CPU_SINGLE_STEP; 7697 else 7698 ctx->singlestep_enabled = 0; 7699 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7700 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7701 if ((env->flags & POWERPC_FLAG_DE) && msr_de) { 7702 ctx->singlestep_enabled = 0; 7703 target_ulong dbcr0 = env->spr[SPR_BOOKE_DBCR0]; 7704 if (dbcr0 & DBCR0_ICMP) { 7705 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 7706 } 7707 if (dbcr0 & DBCR0_BRT) { 7708 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7709 } 7710 7711 } 7712 if (unlikely(ctx->base.singlestep_enabled)) { 7713 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7714 } 7715 #if defined (DO_SINGLE_STEP) && 0 7716 /* Single step trace mode */ 7717 msr_se = 1; 7718 #endif 7719 7720 bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4; 7721 ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 7722 } 7723 7724 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 7725 { 7726 } 7727 7728 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 7729 { 7730 tcg_gen_insn_start(dcbase->pc_next); 7731 } 7732 7733 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs, 7734 const CPUBreakpoint *bp) 7735 { 7736 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7737 7738 gen_debug_exception(ctx); 7739 dcbase->is_jmp = DISAS_NORETURN; 7740 /* The address covered by the breakpoint must be included in 7741 [tb->pc, tb->pc + tb->size) in order to for it to be 7742 properly cleared -- thus we increment the PC here so that 7743 the logic setting tb->size below does the right thing. */ 7744 ctx->base.pc_next += 4; 7745 return true; 7746 } 7747 7748 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 7749 { 7750 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7751 CPUPPCState *env = cs->env_ptr; 7752 opc_handler_t **table, *handler; 7753 7754 LOG_DISAS("----------------\n"); 7755 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7756 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 7757 7758 if (unlikely(need_byteswap(ctx))) { 7759 ctx->opcode = bswap32(cpu_ldl_code(env, ctx->base.pc_next)); 7760 } else { 7761 ctx->opcode = cpu_ldl_code(env, ctx->base.pc_next); 7762 } 7763 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7764 ctx->opcode, opc1(ctx->opcode), opc2(ctx->opcode), 7765 opc3(ctx->opcode), opc4(ctx->opcode), 7766 ctx->le_mode ? "little" : "big"); 7767 ctx->base.pc_next += 4; 7768 table = env->opcodes; 7769 handler = table[opc1(ctx->opcode)]; 7770 if (is_indirect_opcode(handler)) { 7771 table = ind_table(handler); 7772 handler = table[opc2(ctx->opcode)]; 7773 if (is_indirect_opcode(handler)) { 7774 table = ind_table(handler); 7775 handler = table[opc3(ctx->opcode)]; 7776 if (is_indirect_opcode(handler)) { 7777 table = ind_table(handler); 7778 handler = table[opc4(ctx->opcode)]; 7779 } 7780 } 7781 } 7782 /* Is opcode *REALLY* valid ? */ 7783 if (unlikely(handler->handler == &gen_invalid)) { 7784 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7785 "%02x - %02x - %02x - %02x (%08x) " 7786 TARGET_FMT_lx " %d\n", 7787 opc1(ctx->opcode), opc2(ctx->opcode), 7788 opc3(ctx->opcode), opc4(ctx->opcode), 7789 ctx->opcode, ctx->base.pc_next - 4, (int)msr_ir); 7790 } else { 7791 uint32_t inval; 7792 7793 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 7794 && Rc(ctx->opcode))) { 7795 inval = handler->inval2; 7796 } else { 7797 inval = handler->inval1; 7798 } 7799 7800 if (unlikely((ctx->opcode & inval) != 0)) { 7801 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7802 "%02x - %02x - %02x - %02x (%08x) " 7803 TARGET_FMT_lx "\n", ctx->opcode & inval, 7804 opc1(ctx->opcode), opc2(ctx->opcode), 7805 opc3(ctx->opcode), opc4(ctx->opcode), 7806 ctx->opcode, ctx->base.pc_next - 4); 7807 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7808 ctx->base.is_jmp = DISAS_NORETURN; 7809 return; 7810 } 7811 } 7812 (*(handler->handler))(ctx); 7813 #if defined(DO_PPC_STATISTICS) 7814 handler->count++; 7815 #endif 7816 /* Check trace mode exceptions */ 7817 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP && 7818 (ctx->base.pc_next <= 0x100 || ctx->base.pc_next > 0xF00) && 7819 ctx->exception != POWERPC_SYSCALL && 7820 ctx->exception != POWERPC_EXCP_TRAP && 7821 ctx->exception != POWERPC_EXCP_BRANCH)) { 7822 uint32_t excp = gen_prep_dbgex(ctx); 7823 gen_exception_nip(ctx, excp, ctx->base.pc_next); 7824 } 7825 7826 if (tcg_check_temp_count()) { 7827 qemu_log("Opcode %02x %02x %02x %02x (%08x) leaked " 7828 "temporaries\n", opc1(ctx->opcode), opc2(ctx->opcode), 7829 opc3(ctx->opcode), opc4(ctx->opcode), ctx->opcode); 7830 } 7831 7832 ctx->base.is_jmp = ctx->exception == POWERPC_EXCP_NONE ? 7833 DISAS_NEXT : DISAS_NORETURN; 7834 } 7835 7836 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 7837 { 7838 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7839 7840 if (ctx->exception == POWERPC_EXCP_NONE) { 7841 gen_goto_tb(ctx, 0, ctx->base.pc_next); 7842 } else if (ctx->exception != POWERPC_EXCP_BRANCH) { 7843 if (unlikely(ctx->base.singlestep_enabled)) { 7844 gen_debug_exception(ctx); 7845 } 7846 /* Generate the return instruction */ 7847 tcg_gen_exit_tb(NULL, 0); 7848 } 7849 } 7850 7851 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 7852 { 7853 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 7854 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 7855 } 7856 7857 static const TranslatorOps ppc_tr_ops = { 7858 .init_disas_context = ppc_tr_init_disas_context, 7859 .tb_start = ppc_tr_tb_start, 7860 .insn_start = ppc_tr_insn_start, 7861 .breakpoint_check = ppc_tr_breakpoint_check, 7862 .translate_insn = ppc_tr_translate_insn, 7863 .tb_stop = ppc_tr_tb_stop, 7864 .disas_log = ppc_tr_disas_log, 7865 }; 7866 7867 void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb) 7868 { 7869 DisasContext ctx; 7870 7871 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb); 7872 } 7873 7874 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7875 target_ulong *data) 7876 { 7877 env->nip = data[0]; 7878 } 7879