1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "qemu/host-utils.h" 28 #include "exec/cpu_ldst.h" 29 30 #include "exec/helper-proto.h" 31 #include "exec/helper-gen.h" 32 33 #include "trace-tcg.h" 34 #include "exec/translator.h" 35 #include "exec/log.h" 36 #include "qemu/atomic128.h" 37 38 39 #define CPU_SINGLE_STEP 0x1 40 #define CPU_BRANCH_STEP 0x2 41 #define GDBSTUB_SINGLE_STEP 0x4 42 43 /* Include definitions for instructions classes and implementations flags */ 44 //#define PPC_DEBUG_DISAS 45 //#define DO_PPC_STATISTICS 46 47 #ifdef PPC_DEBUG_DISAS 48 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 49 #else 50 # define LOG_DISAS(...) do { } while (0) 51 #endif 52 /*****************************************************************************/ 53 /* Code translation helpers */ 54 55 /* global register indexes */ 56 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 57 + 10*4 + 22*5 /* SPE GPRh */ 58 + 10*4 + 22*5 /* FPR */ 59 + 2*(10*6 + 22*7) /* AVRh, AVRl */ 60 + 10*5 + 22*6 /* VSR */ 61 + 8*5 /* CRF */]; 62 static TCGv cpu_gpr[32]; 63 static TCGv cpu_gprh[32]; 64 static TCGv_i64 cpu_fpr[32]; 65 static TCGv_i64 cpu_avrh[32], cpu_avrl[32]; 66 static TCGv_i64 cpu_vsr[32]; 67 static TCGv_i32 cpu_crf[8]; 68 static TCGv cpu_nip; 69 static TCGv cpu_msr; 70 static TCGv cpu_ctr; 71 static TCGv cpu_lr; 72 #if defined(TARGET_PPC64) 73 static TCGv cpu_cfar; 74 #endif 75 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 76 static TCGv cpu_reserve; 77 static TCGv cpu_reserve_val; 78 static TCGv cpu_fpscr; 79 static TCGv_i32 cpu_access_type; 80 81 #include "exec/gen-icount.h" 82 83 void ppc_translate_init(void) 84 { 85 int i; 86 char* p; 87 size_t cpu_reg_names_size; 88 89 p = cpu_reg_names; 90 cpu_reg_names_size = sizeof(cpu_reg_names); 91 92 for (i = 0; i < 8; i++) { 93 snprintf(p, cpu_reg_names_size, "crf%d", i); 94 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 95 offsetof(CPUPPCState, crf[i]), p); 96 p += 5; 97 cpu_reg_names_size -= 5; 98 } 99 100 for (i = 0; i < 32; i++) { 101 snprintf(p, cpu_reg_names_size, "r%d", i); 102 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 103 offsetof(CPUPPCState, gpr[i]), p); 104 p += (i < 10) ? 3 : 4; 105 cpu_reg_names_size -= (i < 10) ? 3 : 4; 106 snprintf(p, cpu_reg_names_size, "r%dH", i); 107 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 108 offsetof(CPUPPCState, gprh[i]), p); 109 p += (i < 10) ? 4 : 5; 110 cpu_reg_names_size -= (i < 10) ? 4 : 5; 111 112 snprintf(p, cpu_reg_names_size, "fp%d", i); 113 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env, 114 offsetof(CPUPPCState, fpr[i]), p); 115 p += (i < 10) ? 4 : 5; 116 cpu_reg_names_size -= (i < 10) ? 4 : 5; 117 118 snprintf(p, cpu_reg_names_size, "avr%dH", i); 119 #ifdef HOST_WORDS_BIGENDIAN 120 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 121 offsetof(CPUPPCState, avr[i].u64[0]), p); 122 #else 123 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 124 offsetof(CPUPPCState, avr[i].u64[1]), p); 125 #endif 126 p += (i < 10) ? 6 : 7; 127 cpu_reg_names_size -= (i < 10) ? 6 : 7; 128 129 snprintf(p, cpu_reg_names_size, "avr%dL", i); 130 #ifdef HOST_WORDS_BIGENDIAN 131 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 132 offsetof(CPUPPCState, avr[i].u64[1]), p); 133 #else 134 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 135 offsetof(CPUPPCState, avr[i].u64[0]), p); 136 #endif 137 p += (i < 10) ? 6 : 7; 138 cpu_reg_names_size -= (i < 10) ? 6 : 7; 139 snprintf(p, cpu_reg_names_size, "vsr%d", i); 140 cpu_vsr[i] = tcg_global_mem_new_i64(cpu_env, 141 offsetof(CPUPPCState, vsr[i]), p); 142 p += (i < 10) ? 5 : 6; 143 cpu_reg_names_size -= (i < 10) ? 5 : 6; 144 } 145 146 cpu_nip = tcg_global_mem_new(cpu_env, 147 offsetof(CPUPPCState, nip), "nip"); 148 149 cpu_msr = tcg_global_mem_new(cpu_env, 150 offsetof(CPUPPCState, msr), "msr"); 151 152 cpu_ctr = tcg_global_mem_new(cpu_env, 153 offsetof(CPUPPCState, ctr), "ctr"); 154 155 cpu_lr = tcg_global_mem_new(cpu_env, 156 offsetof(CPUPPCState, lr), "lr"); 157 158 #if defined(TARGET_PPC64) 159 cpu_cfar = tcg_global_mem_new(cpu_env, 160 offsetof(CPUPPCState, cfar), "cfar"); 161 #endif 162 163 cpu_xer = tcg_global_mem_new(cpu_env, 164 offsetof(CPUPPCState, xer), "xer"); 165 cpu_so = tcg_global_mem_new(cpu_env, 166 offsetof(CPUPPCState, so), "SO"); 167 cpu_ov = tcg_global_mem_new(cpu_env, 168 offsetof(CPUPPCState, ov), "OV"); 169 cpu_ca = tcg_global_mem_new(cpu_env, 170 offsetof(CPUPPCState, ca), "CA"); 171 cpu_ov32 = tcg_global_mem_new(cpu_env, 172 offsetof(CPUPPCState, ov32), "OV32"); 173 cpu_ca32 = tcg_global_mem_new(cpu_env, 174 offsetof(CPUPPCState, ca32), "CA32"); 175 176 cpu_reserve = tcg_global_mem_new(cpu_env, 177 offsetof(CPUPPCState, reserve_addr), 178 "reserve_addr"); 179 cpu_reserve_val = tcg_global_mem_new(cpu_env, 180 offsetof(CPUPPCState, reserve_val), 181 "reserve_val"); 182 183 cpu_fpscr = tcg_global_mem_new(cpu_env, 184 offsetof(CPUPPCState, fpscr), "fpscr"); 185 186 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 187 offsetof(CPUPPCState, access_type), "access_type"); 188 } 189 190 /* internal defines */ 191 struct DisasContext { 192 DisasContextBase base; 193 uint32_t opcode; 194 uint32_t exception; 195 /* Routine used to access memory */ 196 bool pr, hv, dr, le_mode; 197 bool lazy_tlb_flush; 198 bool need_access_type; 199 int mem_idx; 200 int access_type; 201 /* Translation flags */ 202 TCGMemOp default_tcg_memop_mask; 203 #if defined(TARGET_PPC64) 204 bool sf_mode; 205 bool has_cfar; 206 #endif 207 bool fpu_enabled; 208 bool altivec_enabled; 209 bool vsx_enabled; 210 bool spe_enabled; 211 bool tm_enabled; 212 bool gtse; 213 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 214 int singlestep_enabled; 215 uint32_t flags; 216 uint64_t insns_flags; 217 uint64_t insns_flags2; 218 }; 219 220 /* Return true iff byteswap is needed in a scalar memop */ 221 static inline bool need_byteswap(const DisasContext *ctx) 222 { 223 #if defined(TARGET_WORDS_BIGENDIAN) 224 return ctx->le_mode; 225 #else 226 return !ctx->le_mode; 227 #endif 228 } 229 230 /* True when active word size < size of target_long. */ 231 #ifdef TARGET_PPC64 232 # define NARROW_MODE(C) (!(C)->sf_mode) 233 #else 234 # define NARROW_MODE(C) 0 235 #endif 236 237 struct opc_handler_t { 238 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 239 uint32_t inval1; 240 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 241 uint32_t inval2; 242 /* instruction type */ 243 uint64_t type; 244 /* extended instruction type */ 245 uint64_t type2; 246 /* handler */ 247 void (*handler)(DisasContext *ctx); 248 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 249 const char *oname; 250 #endif 251 #if defined(DO_PPC_STATISTICS) 252 uint64_t count; 253 #endif 254 }; 255 256 /* SPR load/store helpers */ 257 static inline void gen_load_spr(TCGv t, int reg) 258 { 259 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 260 } 261 262 static inline void gen_store_spr(int reg, TCGv t) 263 { 264 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 265 } 266 267 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 268 { 269 if (ctx->need_access_type && ctx->access_type != access_type) { 270 tcg_gen_movi_i32(cpu_access_type, access_type); 271 ctx->access_type = access_type; 272 } 273 } 274 275 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 276 { 277 if (NARROW_MODE(ctx)) { 278 nip = (uint32_t)nip; 279 } 280 tcg_gen_movi_tl(cpu_nip, nip); 281 } 282 283 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 284 { 285 TCGv_i32 t0, t1; 286 287 /* These are all synchronous exceptions, we set the PC back to 288 * the faulting instruction 289 */ 290 if (ctx->exception == POWERPC_EXCP_NONE) { 291 gen_update_nip(ctx, ctx->base.pc_next - 4); 292 } 293 t0 = tcg_const_i32(excp); 294 t1 = tcg_const_i32(error); 295 gen_helper_raise_exception_err(cpu_env, t0, t1); 296 tcg_temp_free_i32(t0); 297 tcg_temp_free_i32(t1); 298 ctx->exception = (excp); 299 } 300 301 static void gen_exception(DisasContext *ctx, uint32_t excp) 302 { 303 TCGv_i32 t0; 304 305 /* These are all synchronous exceptions, we set the PC back to 306 * the faulting instruction 307 */ 308 if (ctx->exception == POWERPC_EXCP_NONE) { 309 gen_update_nip(ctx, ctx->base.pc_next - 4); 310 } 311 t0 = tcg_const_i32(excp); 312 gen_helper_raise_exception(cpu_env, t0); 313 tcg_temp_free_i32(t0); 314 ctx->exception = (excp); 315 } 316 317 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 318 target_ulong nip) 319 { 320 TCGv_i32 t0; 321 322 gen_update_nip(ctx, nip); 323 t0 = tcg_const_i32(excp); 324 gen_helper_raise_exception(cpu_env, t0); 325 tcg_temp_free_i32(t0); 326 ctx->exception = (excp); 327 } 328 329 /* Translates the EXCP_TRACE/BRANCH exceptions used on most PowerPCs to 330 * EXCP_DEBUG, if we are running on cores using the debug enable bit (e.g. 331 * BookE). 332 */ 333 static uint32_t gen_prep_dbgex(DisasContext *ctx, uint32_t excp) 334 { 335 if ((ctx->singlestep_enabled & CPU_SINGLE_STEP) 336 && (excp == POWERPC_EXCP_BRANCH)) { 337 /* Trace excpt. has priority */ 338 excp = POWERPC_EXCP_TRACE; 339 } 340 if (ctx->flags & POWERPC_FLAG_DE) { 341 target_ulong dbsr = 0; 342 switch (excp) { 343 case POWERPC_EXCP_TRACE: 344 dbsr = DBCR0_ICMP; 345 break; 346 case POWERPC_EXCP_BRANCH: 347 dbsr = DBCR0_BRT; 348 break; 349 } 350 TCGv t0 = tcg_temp_new(); 351 gen_load_spr(t0, SPR_BOOKE_DBSR); 352 tcg_gen_ori_tl(t0, t0, dbsr); 353 gen_store_spr(SPR_BOOKE_DBSR, t0); 354 tcg_temp_free(t0); 355 return POWERPC_EXCP_DEBUG; 356 } else { 357 return excp; 358 } 359 } 360 361 static void gen_debug_exception(DisasContext *ctx) 362 { 363 TCGv_i32 t0; 364 365 /* These are all synchronous exceptions, we set the PC back to 366 * the faulting instruction 367 */ 368 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 369 (ctx->exception != POWERPC_EXCP_SYNC)) { 370 gen_update_nip(ctx, ctx->base.pc_next); 371 } 372 t0 = tcg_const_i32(EXCP_DEBUG); 373 gen_helper_raise_exception(cpu_env, t0); 374 tcg_temp_free_i32(t0); 375 } 376 377 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 378 { 379 /* Will be converted to program check if needed */ 380 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 381 } 382 383 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 384 { 385 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 386 } 387 388 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 389 { 390 /* Will be converted to program check if needed */ 391 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 392 } 393 394 /* Stop translation */ 395 static inline void gen_stop_exception(DisasContext *ctx) 396 { 397 gen_update_nip(ctx, ctx->base.pc_next); 398 ctx->exception = POWERPC_EXCP_STOP; 399 } 400 401 #ifndef CONFIG_USER_ONLY 402 /* No need to update nip here, as execution flow will change */ 403 static inline void gen_sync_exception(DisasContext *ctx) 404 { 405 ctx->exception = POWERPC_EXCP_SYNC; 406 } 407 #endif 408 409 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 410 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 411 412 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 413 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 414 415 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 416 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 417 418 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 419 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 420 421 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 422 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 423 424 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 425 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 426 427 typedef struct opcode_t { 428 unsigned char opc1, opc2, opc3, opc4; 429 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 430 unsigned char pad[4]; 431 #endif 432 opc_handler_t handler; 433 const char *oname; 434 } opcode_t; 435 436 /* Helpers for priv. check */ 437 #define GEN_PRIV \ 438 do { \ 439 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 440 } while (0) 441 442 #if defined(CONFIG_USER_ONLY) 443 #define CHK_HV GEN_PRIV 444 #define CHK_SV GEN_PRIV 445 #define CHK_HVRM GEN_PRIV 446 #else 447 #define CHK_HV \ 448 do { \ 449 if (unlikely(ctx->pr || !ctx->hv)) { \ 450 GEN_PRIV; \ 451 } \ 452 } while (0) 453 #define CHK_SV \ 454 do { \ 455 if (unlikely(ctx->pr)) { \ 456 GEN_PRIV; \ 457 } \ 458 } while (0) 459 #define CHK_HVRM \ 460 do { \ 461 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 462 GEN_PRIV; \ 463 } \ 464 } while (0) 465 #endif 466 467 #define CHK_NONE 468 469 /*****************************************************************************/ 470 /* PowerPC instructions table */ 471 472 #if defined(DO_PPC_STATISTICS) 473 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 474 { \ 475 .opc1 = op1, \ 476 .opc2 = op2, \ 477 .opc3 = op3, \ 478 .opc4 = 0xff, \ 479 .handler = { \ 480 .inval1 = invl, \ 481 .type = _typ, \ 482 .type2 = _typ2, \ 483 .handler = &gen_##name, \ 484 .oname = stringify(name), \ 485 }, \ 486 .oname = stringify(name), \ 487 } 488 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 489 { \ 490 .opc1 = op1, \ 491 .opc2 = op2, \ 492 .opc3 = op3, \ 493 .opc4 = 0xff, \ 494 .handler = { \ 495 .inval1 = invl1, \ 496 .inval2 = invl2, \ 497 .type = _typ, \ 498 .type2 = _typ2, \ 499 .handler = &gen_##name, \ 500 .oname = stringify(name), \ 501 }, \ 502 .oname = stringify(name), \ 503 } 504 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 505 { \ 506 .opc1 = op1, \ 507 .opc2 = op2, \ 508 .opc3 = op3, \ 509 .opc4 = 0xff, \ 510 .handler = { \ 511 .inval1 = invl, \ 512 .type = _typ, \ 513 .type2 = _typ2, \ 514 .handler = &gen_##name, \ 515 .oname = onam, \ 516 }, \ 517 .oname = onam, \ 518 } 519 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 520 { \ 521 .opc1 = op1, \ 522 .opc2 = op2, \ 523 .opc3 = op3, \ 524 .opc4 = op4, \ 525 .handler = { \ 526 .inval1 = invl, \ 527 .type = _typ, \ 528 .type2 = _typ2, \ 529 .handler = &gen_##name, \ 530 .oname = stringify(name), \ 531 }, \ 532 .oname = stringify(name), \ 533 } 534 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 535 { \ 536 .opc1 = op1, \ 537 .opc2 = op2, \ 538 .opc3 = op3, \ 539 .opc4 = op4, \ 540 .handler = { \ 541 .inval1 = invl, \ 542 .type = _typ, \ 543 .type2 = _typ2, \ 544 .handler = &gen_##name, \ 545 .oname = onam, \ 546 }, \ 547 .oname = onam, \ 548 } 549 #else 550 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 551 { \ 552 .opc1 = op1, \ 553 .opc2 = op2, \ 554 .opc3 = op3, \ 555 .opc4 = 0xff, \ 556 .handler = { \ 557 .inval1 = invl, \ 558 .type = _typ, \ 559 .type2 = _typ2, \ 560 .handler = &gen_##name, \ 561 }, \ 562 .oname = stringify(name), \ 563 } 564 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 565 { \ 566 .opc1 = op1, \ 567 .opc2 = op2, \ 568 .opc3 = op3, \ 569 .opc4 = 0xff, \ 570 .handler = { \ 571 .inval1 = invl1, \ 572 .inval2 = invl2, \ 573 .type = _typ, \ 574 .type2 = _typ2, \ 575 .handler = &gen_##name, \ 576 }, \ 577 .oname = stringify(name), \ 578 } 579 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 580 { \ 581 .opc1 = op1, \ 582 .opc2 = op2, \ 583 .opc3 = op3, \ 584 .opc4 = 0xff, \ 585 .handler = { \ 586 .inval1 = invl, \ 587 .type = _typ, \ 588 .type2 = _typ2, \ 589 .handler = &gen_##name, \ 590 }, \ 591 .oname = onam, \ 592 } 593 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 594 { \ 595 .opc1 = op1, \ 596 .opc2 = op2, \ 597 .opc3 = op3, \ 598 .opc4 = op4, \ 599 .handler = { \ 600 .inval1 = invl, \ 601 .type = _typ, \ 602 .type2 = _typ2, \ 603 .handler = &gen_##name, \ 604 }, \ 605 .oname = stringify(name), \ 606 } 607 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 608 { \ 609 .opc1 = op1, \ 610 .opc2 = op2, \ 611 .opc3 = op3, \ 612 .opc4 = op4, \ 613 .handler = { \ 614 .inval1 = invl, \ 615 .type = _typ, \ 616 .type2 = _typ2, \ 617 .handler = &gen_##name, \ 618 }, \ 619 .oname = onam, \ 620 } 621 #endif 622 623 /* Invalid instruction */ 624 static void gen_invalid(DisasContext *ctx) 625 { 626 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 627 } 628 629 static opc_handler_t invalid_handler = { 630 .inval1 = 0xFFFFFFFF, 631 .inval2 = 0xFFFFFFFF, 632 .type = PPC_NONE, 633 .type2 = PPC_NONE, 634 .handler = gen_invalid, 635 }; 636 637 /*** Integer comparison ***/ 638 639 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 640 { 641 TCGv t0 = tcg_temp_new(); 642 TCGv t1 = tcg_temp_new(); 643 TCGv_i32 t = tcg_temp_new_i32(); 644 645 tcg_gen_movi_tl(t0, CRF_EQ); 646 tcg_gen_movi_tl(t1, CRF_LT); 647 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), t0, arg0, arg1, t1, t0); 648 tcg_gen_movi_tl(t1, CRF_GT); 649 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), t0, arg0, arg1, t1, t0); 650 651 tcg_gen_trunc_tl_i32(t, t0); 652 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 653 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 654 655 tcg_temp_free(t0); 656 tcg_temp_free(t1); 657 tcg_temp_free_i32(t); 658 } 659 660 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 661 { 662 TCGv t0 = tcg_const_tl(arg1); 663 gen_op_cmp(arg0, t0, s, crf); 664 tcg_temp_free(t0); 665 } 666 667 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 668 { 669 TCGv t0, t1; 670 t0 = tcg_temp_new(); 671 t1 = tcg_temp_new(); 672 if (s) { 673 tcg_gen_ext32s_tl(t0, arg0); 674 tcg_gen_ext32s_tl(t1, arg1); 675 } else { 676 tcg_gen_ext32u_tl(t0, arg0); 677 tcg_gen_ext32u_tl(t1, arg1); 678 } 679 gen_op_cmp(t0, t1, s, crf); 680 tcg_temp_free(t1); 681 tcg_temp_free(t0); 682 } 683 684 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 685 { 686 TCGv t0 = tcg_const_tl(arg1); 687 gen_op_cmp32(arg0, t0, s, crf); 688 tcg_temp_free(t0); 689 } 690 691 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 692 { 693 if (NARROW_MODE(ctx)) { 694 gen_op_cmpi32(reg, 0, 1, 0); 695 } else { 696 gen_op_cmpi(reg, 0, 1, 0); 697 } 698 } 699 700 /* cmp */ 701 static void gen_cmp(DisasContext *ctx) 702 { 703 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 704 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 705 1, crfD(ctx->opcode)); 706 } else { 707 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 708 1, crfD(ctx->opcode)); 709 } 710 } 711 712 /* cmpi */ 713 static void gen_cmpi(DisasContext *ctx) 714 { 715 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 716 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 717 1, crfD(ctx->opcode)); 718 } else { 719 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 720 1, crfD(ctx->opcode)); 721 } 722 } 723 724 /* cmpl */ 725 static void gen_cmpl(DisasContext *ctx) 726 { 727 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 728 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 729 0, crfD(ctx->opcode)); 730 } else { 731 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 732 0, crfD(ctx->opcode)); 733 } 734 } 735 736 /* cmpli */ 737 static void gen_cmpli(DisasContext *ctx) 738 { 739 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 740 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 741 0, crfD(ctx->opcode)); 742 } else { 743 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 744 0, crfD(ctx->opcode)); 745 } 746 } 747 748 /* cmprb - range comparison: isupper, isaplha, islower*/ 749 static void gen_cmprb(DisasContext *ctx) 750 { 751 TCGv_i32 src1 = tcg_temp_new_i32(); 752 TCGv_i32 src2 = tcg_temp_new_i32(); 753 TCGv_i32 src2lo = tcg_temp_new_i32(); 754 TCGv_i32 src2hi = tcg_temp_new_i32(); 755 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 756 757 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 758 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 759 760 tcg_gen_andi_i32(src1, src1, 0xFF); 761 tcg_gen_ext8u_i32(src2lo, src2); 762 tcg_gen_shri_i32(src2, src2, 8); 763 tcg_gen_ext8u_i32(src2hi, src2); 764 765 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 766 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 767 tcg_gen_and_i32(crf, src2lo, src2hi); 768 769 if (ctx->opcode & 0x00200000) { 770 tcg_gen_shri_i32(src2, src2, 8); 771 tcg_gen_ext8u_i32(src2lo, src2); 772 tcg_gen_shri_i32(src2, src2, 8); 773 tcg_gen_ext8u_i32(src2hi, src2); 774 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 775 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 776 tcg_gen_and_i32(src2lo, src2lo, src2hi); 777 tcg_gen_or_i32(crf, crf, src2lo); 778 } 779 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 780 tcg_temp_free_i32(src1); 781 tcg_temp_free_i32(src2); 782 tcg_temp_free_i32(src2lo); 783 tcg_temp_free_i32(src2hi); 784 } 785 786 #if defined(TARGET_PPC64) 787 /* cmpeqb */ 788 static void gen_cmpeqb(DisasContext *ctx) 789 { 790 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 791 cpu_gpr[rB(ctx->opcode)]); 792 } 793 #endif 794 795 /* isel (PowerPC 2.03 specification) */ 796 static void gen_isel(DisasContext *ctx) 797 { 798 uint32_t bi = rC(ctx->opcode); 799 uint32_t mask = 0x08 >> (bi & 0x03); 800 TCGv t0 = tcg_temp_new(); 801 TCGv zr; 802 803 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 804 tcg_gen_andi_tl(t0, t0, mask); 805 806 zr = tcg_const_tl(0); 807 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 808 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 809 cpu_gpr[rB(ctx->opcode)]); 810 tcg_temp_free(zr); 811 tcg_temp_free(t0); 812 } 813 814 /* cmpb: PowerPC 2.05 specification */ 815 static void gen_cmpb(DisasContext *ctx) 816 { 817 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 818 cpu_gpr[rB(ctx->opcode)]); 819 } 820 821 /*** Integer arithmetic ***/ 822 823 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 824 TCGv arg1, TCGv arg2, int sub) 825 { 826 TCGv t0 = tcg_temp_new(); 827 828 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 829 tcg_gen_xor_tl(t0, arg1, arg2); 830 if (sub) { 831 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 832 } else { 833 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 834 } 835 tcg_temp_free(t0); 836 if (NARROW_MODE(ctx)) { 837 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 838 if (is_isa300(ctx)) { 839 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 840 } 841 } else { 842 if (is_isa300(ctx)) { 843 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 844 } 845 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 846 } 847 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 848 } 849 850 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 851 TCGv res, TCGv arg0, TCGv arg1, 852 TCGv ca32, int sub) 853 { 854 TCGv t0; 855 856 if (!is_isa300(ctx)) { 857 return; 858 } 859 860 t0 = tcg_temp_new(); 861 if (sub) { 862 tcg_gen_eqv_tl(t0, arg0, arg1); 863 } else { 864 tcg_gen_xor_tl(t0, arg0, arg1); 865 } 866 tcg_gen_xor_tl(t0, t0, res); 867 tcg_gen_extract_tl(ca32, t0, 32, 1); 868 tcg_temp_free(t0); 869 } 870 871 /* Common add function */ 872 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 873 TCGv arg2, TCGv ca, TCGv ca32, 874 bool add_ca, bool compute_ca, 875 bool compute_ov, bool compute_rc0) 876 { 877 TCGv t0 = ret; 878 879 if (compute_ca || compute_ov) { 880 t0 = tcg_temp_new(); 881 } 882 883 if (compute_ca) { 884 if (NARROW_MODE(ctx)) { 885 /* Caution: a non-obvious corner case of the spec is that we 886 must produce the *entire* 64-bit addition, but produce the 887 carry into bit 32. */ 888 TCGv t1 = tcg_temp_new(); 889 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 890 tcg_gen_add_tl(t0, arg1, arg2); 891 if (add_ca) { 892 tcg_gen_add_tl(t0, t0, ca); 893 } 894 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 895 tcg_temp_free(t1); 896 tcg_gen_extract_tl(ca, ca, 32, 1); 897 if (is_isa300(ctx)) { 898 tcg_gen_mov_tl(ca32, ca); 899 } 900 } else { 901 TCGv zero = tcg_const_tl(0); 902 if (add_ca) { 903 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 904 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 905 } else { 906 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 907 } 908 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 909 tcg_temp_free(zero); 910 } 911 } else { 912 tcg_gen_add_tl(t0, arg1, arg2); 913 if (add_ca) { 914 tcg_gen_add_tl(t0, t0, ca); 915 } 916 } 917 918 if (compute_ov) { 919 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 920 } 921 if (unlikely(compute_rc0)) { 922 gen_set_Rc0(ctx, t0); 923 } 924 925 if (t0 != ret) { 926 tcg_gen_mov_tl(ret, t0); 927 tcg_temp_free(t0); 928 } 929 } 930 /* Add functions with two operands */ 931 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 932 static void glue(gen_, name)(DisasContext *ctx) \ 933 { \ 934 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 935 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 936 ca, glue(ca, 32), \ 937 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 938 } 939 /* Add functions with one operand and one immediate */ 940 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 941 add_ca, compute_ca, compute_ov) \ 942 static void glue(gen_, name)(DisasContext *ctx) \ 943 { \ 944 TCGv t0 = tcg_const_tl(const_val); \ 945 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 946 cpu_gpr[rA(ctx->opcode)], t0, \ 947 ca, glue(ca, 32), \ 948 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 949 tcg_temp_free(t0); \ 950 } 951 952 /* add add. addo addo. */ 953 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 954 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 955 /* addc addc. addco addco. */ 956 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 957 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 958 /* adde adde. addeo addeo. */ 959 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 960 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 961 /* addme addme. addmeo addmeo. */ 962 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 963 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 964 /* addex */ 965 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 966 /* addze addze. addzeo addzeo.*/ 967 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 968 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 969 /* addi */ 970 static void gen_addi(DisasContext *ctx) 971 { 972 target_long simm = SIMM(ctx->opcode); 973 974 if (rA(ctx->opcode) == 0) { 975 /* li case */ 976 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 977 } else { 978 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 979 cpu_gpr[rA(ctx->opcode)], simm); 980 } 981 } 982 /* addic addic.*/ 983 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 984 { 985 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 986 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 987 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 988 tcg_temp_free(c); 989 } 990 991 static void gen_addic(DisasContext *ctx) 992 { 993 gen_op_addic(ctx, 0); 994 } 995 996 static void gen_addic_(DisasContext *ctx) 997 { 998 gen_op_addic(ctx, 1); 999 } 1000 1001 /* addis */ 1002 static void gen_addis(DisasContext *ctx) 1003 { 1004 target_long simm = SIMM(ctx->opcode); 1005 1006 if (rA(ctx->opcode) == 0) { 1007 /* lis case */ 1008 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 1009 } else { 1010 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 1011 cpu_gpr[rA(ctx->opcode)], simm << 16); 1012 } 1013 } 1014 1015 /* addpcis */ 1016 static void gen_addpcis(DisasContext *ctx) 1017 { 1018 target_long d = DX(ctx->opcode); 1019 1020 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16)); 1021 } 1022 1023 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1024 TCGv arg2, int sign, int compute_ov) 1025 { 1026 TCGv_i32 t0 = tcg_temp_new_i32(); 1027 TCGv_i32 t1 = tcg_temp_new_i32(); 1028 TCGv_i32 t2 = tcg_temp_new_i32(); 1029 TCGv_i32 t3 = tcg_temp_new_i32(); 1030 1031 tcg_gen_trunc_tl_i32(t0, arg1); 1032 tcg_gen_trunc_tl_i32(t1, arg2); 1033 if (sign) { 1034 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1035 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1036 tcg_gen_and_i32(t2, t2, t3); 1037 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1038 tcg_gen_or_i32(t2, t2, t3); 1039 tcg_gen_movi_i32(t3, 0); 1040 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1041 tcg_gen_div_i32(t3, t0, t1); 1042 tcg_gen_extu_i32_tl(ret, t3); 1043 } else { 1044 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1045 tcg_gen_movi_i32(t3, 0); 1046 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1047 tcg_gen_divu_i32(t3, t0, t1); 1048 tcg_gen_extu_i32_tl(ret, t3); 1049 } 1050 if (compute_ov) { 1051 tcg_gen_extu_i32_tl(cpu_ov, t2); 1052 if (is_isa300(ctx)) { 1053 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1054 } 1055 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1056 } 1057 tcg_temp_free_i32(t0); 1058 tcg_temp_free_i32(t1); 1059 tcg_temp_free_i32(t2); 1060 tcg_temp_free_i32(t3); 1061 1062 if (unlikely(Rc(ctx->opcode) != 0)) 1063 gen_set_Rc0(ctx, ret); 1064 } 1065 /* Div functions */ 1066 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1067 static void glue(gen_, name)(DisasContext *ctx) \ 1068 { \ 1069 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1070 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1071 sign, compute_ov); \ 1072 } 1073 /* divwu divwu. divwuo divwuo. */ 1074 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1075 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1076 /* divw divw. divwo divwo. */ 1077 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1078 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1079 1080 /* div[wd]eu[o][.] */ 1081 #define GEN_DIVE(name, hlpr, compute_ov) \ 1082 static void gen_##name(DisasContext *ctx) \ 1083 { \ 1084 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1085 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1086 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1087 tcg_temp_free_i32(t0); \ 1088 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1089 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1090 } \ 1091 } 1092 1093 GEN_DIVE(divweu, divweu, 0); 1094 GEN_DIVE(divweuo, divweu, 1); 1095 GEN_DIVE(divwe, divwe, 0); 1096 GEN_DIVE(divweo, divwe, 1); 1097 1098 #if defined(TARGET_PPC64) 1099 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1100 TCGv arg2, int sign, int compute_ov) 1101 { 1102 TCGv_i64 t0 = tcg_temp_new_i64(); 1103 TCGv_i64 t1 = tcg_temp_new_i64(); 1104 TCGv_i64 t2 = tcg_temp_new_i64(); 1105 TCGv_i64 t3 = tcg_temp_new_i64(); 1106 1107 tcg_gen_mov_i64(t0, arg1); 1108 tcg_gen_mov_i64(t1, arg2); 1109 if (sign) { 1110 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1111 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1112 tcg_gen_and_i64(t2, t2, t3); 1113 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1114 tcg_gen_or_i64(t2, t2, t3); 1115 tcg_gen_movi_i64(t3, 0); 1116 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1117 tcg_gen_div_i64(ret, t0, t1); 1118 } else { 1119 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1120 tcg_gen_movi_i64(t3, 0); 1121 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1122 tcg_gen_divu_i64(ret, t0, t1); 1123 } 1124 if (compute_ov) { 1125 tcg_gen_mov_tl(cpu_ov, t2); 1126 if (is_isa300(ctx)) { 1127 tcg_gen_mov_tl(cpu_ov32, t2); 1128 } 1129 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1130 } 1131 tcg_temp_free_i64(t0); 1132 tcg_temp_free_i64(t1); 1133 tcg_temp_free_i64(t2); 1134 tcg_temp_free_i64(t3); 1135 1136 if (unlikely(Rc(ctx->opcode) != 0)) 1137 gen_set_Rc0(ctx, ret); 1138 } 1139 1140 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1141 static void glue(gen_, name)(DisasContext *ctx) \ 1142 { \ 1143 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1144 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1145 sign, compute_ov); \ 1146 } 1147 /* divdu divdu. divduo divduo. */ 1148 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1149 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1150 /* divd divd. divdo divdo. */ 1151 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1152 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1153 1154 GEN_DIVE(divdeu, divdeu, 0); 1155 GEN_DIVE(divdeuo, divdeu, 1); 1156 GEN_DIVE(divde, divde, 0); 1157 GEN_DIVE(divdeo, divde, 1); 1158 #endif 1159 1160 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1161 TCGv arg2, int sign) 1162 { 1163 TCGv_i32 t0 = tcg_temp_new_i32(); 1164 TCGv_i32 t1 = tcg_temp_new_i32(); 1165 1166 tcg_gen_trunc_tl_i32(t0, arg1); 1167 tcg_gen_trunc_tl_i32(t1, arg2); 1168 if (sign) { 1169 TCGv_i32 t2 = tcg_temp_new_i32(); 1170 TCGv_i32 t3 = tcg_temp_new_i32(); 1171 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1172 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1173 tcg_gen_and_i32(t2, t2, t3); 1174 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1175 tcg_gen_or_i32(t2, t2, t3); 1176 tcg_gen_movi_i32(t3, 0); 1177 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1178 tcg_gen_rem_i32(t3, t0, t1); 1179 tcg_gen_ext_i32_tl(ret, t3); 1180 tcg_temp_free_i32(t2); 1181 tcg_temp_free_i32(t3); 1182 } else { 1183 TCGv_i32 t2 = tcg_const_i32(1); 1184 TCGv_i32 t3 = tcg_const_i32(0); 1185 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1186 tcg_gen_remu_i32(t3, t0, t1); 1187 tcg_gen_extu_i32_tl(ret, t3); 1188 tcg_temp_free_i32(t2); 1189 tcg_temp_free_i32(t3); 1190 } 1191 tcg_temp_free_i32(t0); 1192 tcg_temp_free_i32(t1); 1193 } 1194 1195 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1196 static void glue(gen_, name)(DisasContext *ctx) \ 1197 { \ 1198 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1199 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1200 sign); \ 1201 } 1202 1203 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1204 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1205 1206 #if defined(TARGET_PPC64) 1207 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1208 TCGv arg2, int sign) 1209 { 1210 TCGv_i64 t0 = tcg_temp_new_i64(); 1211 TCGv_i64 t1 = tcg_temp_new_i64(); 1212 1213 tcg_gen_mov_i64(t0, arg1); 1214 tcg_gen_mov_i64(t1, arg2); 1215 if (sign) { 1216 TCGv_i64 t2 = tcg_temp_new_i64(); 1217 TCGv_i64 t3 = tcg_temp_new_i64(); 1218 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1219 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1220 tcg_gen_and_i64(t2, t2, t3); 1221 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1222 tcg_gen_or_i64(t2, t2, t3); 1223 tcg_gen_movi_i64(t3, 0); 1224 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1225 tcg_gen_rem_i64(ret, t0, t1); 1226 tcg_temp_free_i64(t2); 1227 tcg_temp_free_i64(t3); 1228 } else { 1229 TCGv_i64 t2 = tcg_const_i64(1); 1230 TCGv_i64 t3 = tcg_const_i64(0); 1231 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1232 tcg_gen_remu_i64(ret, t0, t1); 1233 tcg_temp_free_i64(t2); 1234 tcg_temp_free_i64(t3); 1235 } 1236 tcg_temp_free_i64(t0); 1237 tcg_temp_free_i64(t1); 1238 } 1239 1240 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1241 static void glue(gen_, name)(DisasContext *ctx) \ 1242 { \ 1243 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1244 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1245 sign); \ 1246 } 1247 1248 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1249 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1250 #endif 1251 1252 /* mulhw mulhw. */ 1253 static void gen_mulhw(DisasContext *ctx) 1254 { 1255 TCGv_i32 t0 = tcg_temp_new_i32(); 1256 TCGv_i32 t1 = tcg_temp_new_i32(); 1257 1258 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1259 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1260 tcg_gen_muls2_i32(t0, t1, t0, t1); 1261 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1262 tcg_temp_free_i32(t0); 1263 tcg_temp_free_i32(t1); 1264 if (unlikely(Rc(ctx->opcode) != 0)) 1265 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1266 } 1267 1268 /* mulhwu mulhwu. */ 1269 static void gen_mulhwu(DisasContext *ctx) 1270 { 1271 TCGv_i32 t0 = tcg_temp_new_i32(); 1272 TCGv_i32 t1 = tcg_temp_new_i32(); 1273 1274 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1275 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1276 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1277 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1278 tcg_temp_free_i32(t0); 1279 tcg_temp_free_i32(t1); 1280 if (unlikely(Rc(ctx->opcode) != 0)) 1281 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1282 } 1283 1284 /* mullw mullw. */ 1285 static void gen_mullw(DisasContext *ctx) 1286 { 1287 #if defined(TARGET_PPC64) 1288 TCGv_i64 t0, t1; 1289 t0 = tcg_temp_new_i64(); 1290 t1 = tcg_temp_new_i64(); 1291 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1292 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1293 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1294 tcg_temp_free(t0); 1295 tcg_temp_free(t1); 1296 #else 1297 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1298 cpu_gpr[rB(ctx->opcode)]); 1299 #endif 1300 if (unlikely(Rc(ctx->opcode) != 0)) 1301 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1302 } 1303 1304 /* mullwo mullwo. */ 1305 static void gen_mullwo(DisasContext *ctx) 1306 { 1307 TCGv_i32 t0 = tcg_temp_new_i32(); 1308 TCGv_i32 t1 = tcg_temp_new_i32(); 1309 1310 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1311 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1312 tcg_gen_muls2_i32(t0, t1, t0, t1); 1313 #if defined(TARGET_PPC64) 1314 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1315 #else 1316 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1317 #endif 1318 1319 tcg_gen_sari_i32(t0, t0, 31); 1320 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1321 tcg_gen_extu_i32_tl(cpu_ov, t0); 1322 if (is_isa300(ctx)) { 1323 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1324 } 1325 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1326 1327 tcg_temp_free_i32(t0); 1328 tcg_temp_free_i32(t1); 1329 if (unlikely(Rc(ctx->opcode) != 0)) 1330 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1331 } 1332 1333 /* mulli */ 1334 static void gen_mulli(DisasContext *ctx) 1335 { 1336 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1337 SIMM(ctx->opcode)); 1338 } 1339 1340 #if defined(TARGET_PPC64) 1341 /* mulhd mulhd. */ 1342 static void gen_mulhd(DisasContext *ctx) 1343 { 1344 TCGv lo = tcg_temp_new(); 1345 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1346 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1347 tcg_temp_free(lo); 1348 if (unlikely(Rc(ctx->opcode) != 0)) { 1349 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1350 } 1351 } 1352 1353 /* mulhdu mulhdu. */ 1354 static void gen_mulhdu(DisasContext *ctx) 1355 { 1356 TCGv lo = tcg_temp_new(); 1357 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1358 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1359 tcg_temp_free(lo); 1360 if (unlikely(Rc(ctx->opcode) != 0)) { 1361 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1362 } 1363 } 1364 1365 /* mulld mulld. */ 1366 static void gen_mulld(DisasContext *ctx) 1367 { 1368 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1369 cpu_gpr[rB(ctx->opcode)]); 1370 if (unlikely(Rc(ctx->opcode) != 0)) 1371 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1372 } 1373 1374 /* mulldo mulldo. */ 1375 static void gen_mulldo(DisasContext *ctx) 1376 { 1377 TCGv_i64 t0 = tcg_temp_new_i64(); 1378 TCGv_i64 t1 = tcg_temp_new_i64(); 1379 1380 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1381 cpu_gpr[rB(ctx->opcode)]); 1382 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1383 1384 tcg_gen_sari_i64(t0, t0, 63); 1385 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1386 if (is_isa300(ctx)) { 1387 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1388 } 1389 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1390 1391 tcg_temp_free_i64(t0); 1392 tcg_temp_free_i64(t1); 1393 1394 if (unlikely(Rc(ctx->opcode) != 0)) { 1395 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1396 } 1397 } 1398 #endif 1399 1400 /* Common subf function */ 1401 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1402 TCGv arg2, bool add_ca, bool compute_ca, 1403 bool compute_ov, bool compute_rc0) 1404 { 1405 TCGv t0 = ret; 1406 1407 if (compute_ca || compute_ov) { 1408 t0 = tcg_temp_new(); 1409 } 1410 1411 if (compute_ca) { 1412 /* dest = ~arg1 + arg2 [+ ca]. */ 1413 if (NARROW_MODE(ctx)) { 1414 /* Caution: a non-obvious corner case of the spec is that we 1415 must produce the *entire* 64-bit addition, but produce the 1416 carry into bit 32. */ 1417 TCGv inv1 = tcg_temp_new(); 1418 TCGv t1 = tcg_temp_new(); 1419 tcg_gen_not_tl(inv1, arg1); 1420 if (add_ca) { 1421 tcg_gen_add_tl(t0, arg2, cpu_ca); 1422 } else { 1423 tcg_gen_addi_tl(t0, arg2, 1); 1424 } 1425 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1426 tcg_gen_add_tl(t0, t0, inv1); 1427 tcg_temp_free(inv1); 1428 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1429 tcg_temp_free(t1); 1430 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 1431 if (is_isa300(ctx)) { 1432 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 1433 } 1434 } else if (add_ca) { 1435 TCGv zero, inv1 = tcg_temp_new(); 1436 tcg_gen_not_tl(inv1, arg1); 1437 zero = tcg_const_tl(0); 1438 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1439 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1440 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 1441 tcg_temp_free(zero); 1442 tcg_temp_free(inv1); 1443 } else { 1444 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1445 tcg_gen_sub_tl(t0, arg2, arg1); 1446 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 1447 } 1448 } else if (add_ca) { 1449 /* Since we're ignoring carry-out, we can simplify the 1450 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1451 tcg_gen_sub_tl(t0, arg2, arg1); 1452 tcg_gen_add_tl(t0, t0, cpu_ca); 1453 tcg_gen_subi_tl(t0, t0, 1); 1454 } else { 1455 tcg_gen_sub_tl(t0, arg2, arg1); 1456 } 1457 1458 if (compute_ov) { 1459 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1460 } 1461 if (unlikely(compute_rc0)) { 1462 gen_set_Rc0(ctx, t0); 1463 } 1464 1465 if (t0 != ret) { 1466 tcg_gen_mov_tl(ret, t0); 1467 tcg_temp_free(t0); 1468 } 1469 } 1470 /* Sub functions with Two operands functions */ 1471 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1472 static void glue(gen_, name)(DisasContext *ctx) \ 1473 { \ 1474 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1475 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1476 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1477 } 1478 /* Sub functions with one operand and one immediate */ 1479 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1480 add_ca, compute_ca, compute_ov) \ 1481 static void glue(gen_, name)(DisasContext *ctx) \ 1482 { \ 1483 TCGv t0 = tcg_const_tl(const_val); \ 1484 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1485 cpu_gpr[rA(ctx->opcode)], t0, \ 1486 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1487 tcg_temp_free(t0); \ 1488 } 1489 /* subf subf. subfo subfo. */ 1490 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1491 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1492 /* subfc subfc. subfco subfco. */ 1493 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1494 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1495 /* subfe subfe. subfeo subfo. */ 1496 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1497 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1498 /* subfme subfme. subfmeo subfmeo. */ 1499 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1500 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1501 /* subfze subfze. subfzeo subfzeo.*/ 1502 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1503 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1504 1505 /* subfic */ 1506 static void gen_subfic(DisasContext *ctx) 1507 { 1508 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1509 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1510 c, 0, 1, 0, 0); 1511 tcg_temp_free(c); 1512 } 1513 1514 /* neg neg. nego nego. */ 1515 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1516 { 1517 TCGv zero = tcg_const_tl(0); 1518 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1519 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1520 tcg_temp_free(zero); 1521 } 1522 1523 static void gen_neg(DisasContext *ctx) 1524 { 1525 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 1526 if (unlikely(Rc(ctx->opcode))) { 1527 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1528 } 1529 } 1530 1531 static void gen_nego(DisasContext *ctx) 1532 { 1533 gen_op_arith_neg(ctx, 1); 1534 } 1535 1536 /*** Integer logical ***/ 1537 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1538 static void glue(gen_, name)(DisasContext *ctx) \ 1539 { \ 1540 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1541 cpu_gpr[rB(ctx->opcode)]); \ 1542 if (unlikely(Rc(ctx->opcode) != 0)) \ 1543 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1544 } 1545 1546 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1547 static void glue(gen_, name)(DisasContext *ctx) \ 1548 { \ 1549 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1550 if (unlikely(Rc(ctx->opcode) != 0)) \ 1551 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1552 } 1553 1554 /* and & and. */ 1555 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1556 /* andc & andc. */ 1557 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1558 1559 /* andi. */ 1560 static void gen_andi_(DisasContext *ctx) 1561 { 1562 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1563 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1564 } 1565 1566 /* andis. */ 1567 static void gen_andis_(DisasContext *ctx) 1568 { 1569 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1570 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1571 } 1572 1573 /* cntlzw */ 1574 static void gen_cntlzw(DisasContext *ctx) 1575 { 1576 TCGv_i32 t = tcg_temp_new_i32(); 1577 1578 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1579 tcg_gen_clzi_i32(t, t, 32); 1580 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1581 tcg_temp_free_i32(t); 1582 1583 if (unlikely(Rc(ctx->opcode) != 0)) 1584 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1585 } 1586 1587 /* cnttzw */ 1588 static void gen_cnttzw(DisasContext *ctx) 1589 { 1590 TCGv_i32 t = tcg_temp_new_i32(); 1591 1592 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1593 tcg_gen_ctzi_i32(t, t, 32); 1594 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1595 tcg_temp_free_i32(t); 1596 1597 if (unlikely(Rc(ctx->opcode) != 0)) { 1598 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1599 } 1600 } 1601 1602 /* eqv & eqv. */ 1603 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1604 /* extsb & extsb. */ 1605 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1606 /* extsh & extsh. */ 1607 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1608 /* nand & nand. */ 1609 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1610 /* nor & nor. */ 1611 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1612 1613 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1614 static void gen_pause(DisasContext *ctx) 1615 { 1616 TCGv_i32 t0 = tcg_const_i32(0); 1617 tcg_gen_st_i32(t0, cpu_env, 1618 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1619 tcg_temp_free_i32(t0); 1620 1621 /* Stop translation, this gives other CPUs a chance to run */ 1622 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 1623 } 1624 #endif /* defined(TARGET_PPC64) */ 1625 1626 /* or & or. */ 1627 static void gen_or(DisasContext *ctx) 1628 { 1629 int rs, ra, rb; 1630 1631 rs = rS(ctx->opcode); 1632 ra = rA(ctx->opcode); 1633 rb = rB(ctx->opcode); 1634 /* Optimisation for mr. ri case */ 1635 if (rs != ra || rs != rb) { 1636 if (rs != rb) 1637 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1638 else 1639 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1640 if (unlikely(Rc(ctx->opcode) != 0)) 1641 gen_set_Rc0(ctx, cpu_gpr[ra]); 1642 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1643 gen_set_Rc0(ctx, cpu_gpr[rs]); 1644 #if defined(TARGET_PPC64) 1645 } else if (rs != 0) { /* 0 is nop */ 1646 int prio = 0; 1647 1648 switch (rs) { 1649 case 1: 1650 /* Set process priority to low */ 1651 prio = 2; 1652 break; 1653 case 6: 1654 /* Set process priority to medium-low */ 1655 prio = 3; 1656 break; 1657 case 2: 1658 /* Set process priority to normal */ 1659 prio = 4; 1660 break; 1661 #if !defined(CONFIG_USER_ONLY) 1662 case 31: 1663 if (!ctx->pr) { 1664 /* Set process priority to very low */ 1665 prio = 1; 1666 } 1667 break; 1668 case 5: 1669 if (!ctx->pr) { 1670 /* Set process priority to medium-hight */ 1671 prio = 5; 1672 } 1673 break; 1674 case 3: 1675 if (!ctx->pr) { 1676 /* Set process priority to high */ 1677 prio = 6; 1678 } 1679 break; 1680 case 7: 1681 if (ctx->hv && !ctx->pr) { 1682 /* Set process priority to very high */ 1683 prio = 7; 1684 } 1685 break; 1686 #endif 1687 default: 1688 break; 1689 } 1690 if (prio) { 1691 TCGv t0 = tcg_temp_new(); 1692 gen_load_spr(t0, SPR_PPR); 1693 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1694 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1695 gen_store_spr(SPR_PPR, t0); 1696 tcg_temp_free(t0); 1697 } 1698 #if !defined(CONFIG_USER_ONLY) 1699 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1700 * CPU and the kernel hangs. This applies to all encodings other 1701 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1702 * and all currently undefined. 1703 */ 1704 gen_pause(ctx); 1705 #endif 1706 #endif 1707 } 1708 } 1709 /* orc & orc. */ 1710 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1711 1712 /* xor & xor. */ 1713 static void gen_xor(DisasContext *ctx) 1714 { 1715 /* Optimisation for "set to zero" case */ 1716 if (rS(ctx->opcode) != rB(ctx->opcode)) 1717 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1718 else 1719 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1720 if (unlikely(Rc(ctx->opcode) != 0)) 1721 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1722 } 1723 1724 /* ori */ 1725 static void gen_ori(DisasContext *ctx) 1726 { 1727 target_ulong uimm = UIMM(ctx->opcode); 1728 1729 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1730 return; 1731 } 1732 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1733 } 1734 1735 /* oris */ 1736 static void gen_oris(DisasContext *ctx) 1737 { 1738 target_ulong uimm = UIMM(ctx->opcode); 1739 1740 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1741 /* NOP */ 1742 return; 1743 } 1744 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1745 } 1746 1747 /* xori */ 1748 static void gen_xori(DisasContext *ctx) 1749 { 1750 target_ulong uimm = UIMM(ctx->opcode); 1751 1752 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1753 /* NOP */ 1754 return; 1755 } 1756 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1757 } 1758 1759 /* xoris */ 1760 static void gen_xoris(DisasContext *ctx) 1761 { 1762 target_ulong uimm = UIMM(ctx->opcode); 1763 1764 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1765 /* NOP */ 1766 return; 1767 } 1768 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1769 } 1770 1771 /* popcntb : PowerPC 2.03 specification */ 1772 static void gen_popcntb(DisasContext *ctx) 1773 { 1774 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1775 } 1776 1777 static void gen_popcntw(DisasContext *ctx) 1778 { 1779 #if defined(TARGET_PPC64) 1780 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1781 #else 1782 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1783 #endif 1784 } 1785 1786 #if defined(TARGET_PPC64) 1787 /* popcntd: PowerPC 2.06 specification */ 1788 static void gen_popcntd(DisasContext *ctx) 1789 { 1790 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1791 } 1792 #endif 1793 1794 /* prtyw: PowerPC 2.05 specification */ 1795 static void gen_prtyw(DisasContext *ctx) 1796 { 1797 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1798 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1799 TCGv t0 = tcg_temp_new(); 1800 tcg_gen_shri_tl(t0, rs, 16); 1801 tcg_gen_xor_tl(ra, rs, t0); 1802 tcg_gen_shri_tl(t0, ra, 8); 1803 tcg_gen_xor_tl(ra, ra, t0); 1804 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1805 tcg_temp_free(t0); 1806 } 1807 1808 #if defined(TARGET_PPC64) 1809 /* prtyd: PowerPC 2.05 specification */ 1810 static void gen_prtyd(DisasContext *ctx) 1811 { 1812 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1813 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1814 TCGv t0 = tcg_temp_new(); 1815 tcg_gen_shri_tl(t0, rs, 32); 1816 tcg_gen_xor_tl(ra, rs, t0); 1817 tcg_gen_shri_tl(t0, ra, 16); 1818 tcg_gen_xor_tl(ra, ra, t0); 1819 tcg_gen_shri_tl(t0, ra, 8); 1820 tcg_gen_xor_tl(ra, ra, t0); 1821 tcg_gen_andi_tl(ra, ra, 1); 1822 tcg_temp_free(t0); 1823 } 1824 #endif 1825 1826 #if defined(TARGET_PPC64) 1827 /* bpermd */ 1828 static void gen_bpermd(DisasContext *ctx) 1829 { 1830 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1831 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1832 } 1833 #endif 1834 1835 #if defined(TARGET_PPC64) 1836 /* extsw & extsw. */ 1837 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1838 1839 /* cntlzd */ 1840 static void gen_cntlzd(DisasContext *ctx) 1841 { 1842 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1843 if (unlikely(Rc(ctx->opcode) != 0)) 1844 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1845 } 1846 1847 /* cnttzd */ 1848 static void gen_cnttzd(DisasContext *ctx) 1849 { 1850 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1851 if (unlikely(Rc(ctx->opcode) != 0)) { 1852 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1853 } 1854 } 1855 1856 /* darn */ 1857 static void gen_darn(DisasContext *ctx) 1858 { 1859 int l = L(ctx->opcode); 1860 1861 if (l == 0) { 1862 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1863 } else if (l <= 2) { 1864 /* Return 64-bit random for both CRN and RRN */ 1865 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1866 } else { 1867 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1868 } 1869 } 1870 #endif 1871 1872 /*** Integer rotate ***/ 1873 1874 /* rlwimi & rlwimi. */ 1875 static void gen_rlwimi(DisasContext *ctx) 1876 { 1877 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1878 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1879 uint32_t sh = SH(ctx->opcode); 1880 uint32_t mb = MB(ctx->opcode); 1881 uint32_t me = ME(ctx->opcode); 1882 1883 if (sh == (31-me) && mb <= me) { 1884 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1885 } else { 1886 target_ulong mask; 1887 TCGv t1; 1888 1889 #if defined(TARGET_PPC64) 1890 mb += 32; 1891 me += 32; 1892 #endif 1893 mask = MASK(mb, me); 1894 1895 t1 = tcg_temp_new(); 1896 if (mask <= 0xffffffffu) { 1897 TCGv_i32 t0 = tcg_temp_new_i32(); 1898 tcg_gen_trunc_tl_i32(t0, t_rs); 1899 tcg_gen_rotli_i32(t0, t0, sh); 1900 tcg_gen_extu_i32_tl(t1, t0); 1901 tcg_temp_free_i32(t0); 1902 } else { 1903 #if defined(TARGET_PPC64) 1904 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1905 tcg_gen_rotli_i64(t1, t1, sh); 1906 #else 1907 g_assert_not_reached(); 1908 #endif 1909 } 1910 1911 tcg_gen_andi_tl(t1, t1, mask); 1912 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1913 tcg_gen_or_tl(t_ra, t_ra, t1); 1914 tcg_temp_free(t1); 1915 } 1916 if (unlikely(Rc(ctx->opcode) != 0)) { 1917 gen_set_Rc0(ctx, t_ra); 1918 } 1919 } 1920 1921 /* rlwinm & rlwinm. */ 1922 static void gen_rlwinm(DisasContext *ctx) 1923 { 1924 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1925 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1926 int sh = SH(ctx->opcode); 1927 int mb = MB(ctx->opcode); 1928 int me = ME(ctx->opcode); 1929 int len = me - mb + 1; 1930 int rsh = (32 - sh) & 31; 1931 1932 if (sh != 0 && len > 0 && me == (31 - sh)) { 1933 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 1934 } else if (me == 31 && rsh + len <= 32) { 1935 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 1936 } else { 1937 target_ulong mask; 1938 #if defined(TARGET_PPC64) 1939 mb += 32; 1940 me += 32; 1941 #endif 1942 mask = MASK(mb, me); 1943 if (sh == 0) { 1944 tcg_gen_andi_tl(t_ra, t_rs, mask); 1945 } else if (mask <= 0xffffffffu) { 1946 TCGv_i32 t0 = tcg_temp_new_i32(); 1947 tcg_gen_trunc_tl_i32(t0, t_rs); 1948 tcg_gen_rotli_i32(t0, t0, sh); 1949 tcg_gen_andi_i32(t0, t0, mask); 1950 tcg_gen_extu_i32_tl(t_ra, t0); 1951 tcg_temp_free_i32(t0); 1952 } else { 1953 #if defined(TARGET_PPC64) 1954 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1955 tcg_gen_rotli_i64(t_ra, t_ra, sh); 1956 tcg_gen_andi_i64(t_ra, t_ra, mask); 1957 #else 1958 g_assert_not_reached(); 1959 #endif 1960 } 1961 } 1962 if (unlikely(Rc(ctx->opcode) != 0)) { 1963 gen_set_Rc0(ctx, t_ra); 1964 } 1965 } 1966 1967 /* rlwnm & rlwnm. */ 1968 static void gen_rlwnm(DisasContext *ctx) 1969 { 1970 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1971 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1972 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 1973 uint32_t mb = MB(ctx->opcode); 1974 uint32_t me = ME(ctx->opcode); 1975 target_ulong mask; 1976 1977 #if defined(TARGET_PPC64) 1978 mb += 32; 1979 me += 32; 1980 #endif 1981 mask = MASK(mb, me); 1982 1983 if (mask <= 0xffffffffu) { 1984 TCGv_i32 t0 = tcg_temp_new_i32(); 1985 TCGv_i32 t1 = tcg_temp_new_i32(); 1986 tcg_gen_trunc_tl_i32(t0, t_rb); 1987 tcg_gen_trunc_tl_i32(t1, t_rs); 1988 tcg_gen_andi_i32(t0, t0, 0x1f); 1989 tcg_gen_rotl_i32(t1, t1, t0); 1990 tcg_gen_extu_i32_tl(t_ra, t1); 1991 tcg_temp_free_i32(t0); 1992 tcg_temp_free_i32(t1); 1993 } else { 1994 #if defined(TARGET_PPC64) 1995 TCGv_i64 t0 = tcg_temp_new_i64(); 1996 tcg_gen_andi_i64(t0, t_rb, 0x1f); 1997 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1998 tcg_gen_rotl_i64(t_ra, t_ra, t0); 1999 tcg_temp_free_i64(t0); 2000 #else 2001 g_assert_not_reached(); 2002 #endif 2003 } 2004 2005 tcg_gen_andi_tl(t_ra, t_ra, mask); 2006 2007 if (unlikely(Rc(ctx->opcode) != 0)) { 2008 gen_set_Rc0(ctx, t_ra); 2009 } 2010 } 2011 2012 #if defined(TARGET_PPC64) 2013 #define GEN_PPC64_R2(name, opc1, opc2) \ 2014 static void glue(gen_, name##0)(DisasContext *ctx) \ 2015 { \ 2016 gen_##name(ctx, 0); \ 2017 } \ 2018 \ 2019 static void glue(gen_, name##1)(DisasContext *ctx) \ 2020 { \ 2021 gen_##name(ctx, 1); \ 2022 } 2023 #define GEN_PPC64_R4(name, opc1, opc2) \ 2024 static void glue(gen_, name##0)(DisasContext *ctx) \ 2025 { \ 2026 gen_##name(ctx, 0, 0); \ 2027 } \ 2028 \ 2029 static void glue(gen_, name##1)(DisasContext *ctx) \ 2030 { \ 2031 gen_##name(ctx, 0, 1); \ 2032 } \ 2033 \ 2034 static void glue(gen_, name##2)(DisasContext *ctx) \ 2035 { \ 2036 gen_##name(ctx, 1, 0); \ 2037 } \ 2038 \ 2039 static void glue(gen_, name##3)(DisasContext *ctx) \ 2040 { \ 2041 gen_##name(ctx, 1, 1); \ 2042 } 2043 2044 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2045 { 2046 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2047 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2048 int len = me - mb + 1; 2049 int rsh = (64 - sh) & 63; 2050 2051 if (sh != 0 && len > 0 && me == (63 - sh)) { 2052 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2053 } else if (me == 63 && rsh + len <= 64) { 2054 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2055 } else { 2056 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2057 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2058 } 2059 if (unlikely(Rc(ctx->opcode) != 0)) { 2060 gen_set_Rc0(ctx, t_ra); 2061 } 2062 } 2063 2064 /* rldicl - rldicl. */ 2065 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2066 { 2067 uint32_t sh, mb; 2068 2069 sh = SH(ctx->opcode) | (shn << 5); 2070 mb = MB(ctx->opcode) | (mbn << 5); 2071 gen_rldinm(ctx, mb, 63, sh); 2072 } 2073 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2074 2075 /* rldicr - rldicr. */ 2076 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2077 { 2078 uint32_t sh, me; 2079 2080 sh = SH(ctx->opcode) | (shn << 5); 2081 me = MB(ctx->opcode) | (men << 5); 2082 gen_rldinm(ctx, 0, me, sh); 2083 } 2084 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2085 2086 /* rldic - rldic. */ 2087 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2088 { 2089 uint32_t sh, mb; 2090 2091 sh = SH(ctx->opcode) | (shn << 5); 2092 mb = MB(ctx->opcode) | (mbn << 5); 2093 gen_rldinm(ctx, mb, 63 - sh, sh); 2094 } 2095 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2096 2097 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2098 { 2099 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2100 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2101 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2102 TCGv t0; 2103 2104 t0 = tcg_temp_new(); 2105 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2106 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2107 tcg_temp_free(t0); 2108 2109 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2110 if (unlikely(Rc(ctx->opcode) != 0)) { 2111 gen_set_Rc0(ctx, t_ra); 2112 } 2113 } 2114 2115 /* rldcl - rldcl. */ 2116 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2117 { 2118 uint32_t mb; 2119 2120 mb = MB(ctx->opcode) | (mbn << 5); 2121 gen_rldnm(ctx, mb, 63); 2122 } 2123 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2124 2125 /* rldcr - rldcr. */ 2126 static inline void gen_rldcr(DisasContext *ctx, int men) 2127 { 2128 uint32_t me; 2129 2130 me = MB(ctx->opcode) | (men << 5); 2131 gen_rldnm(ctx, 0, me); 2132 } 2133 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2134 2135 /* rldimi - rldimi. */ 2136 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2137 { 2138 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2139 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2140 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2141 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2142 uint32_t me = 63 - sh; 2143 2144 if (mb <= me) { 2145 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2146 } else { 2147 target_ulong mask = MASK(mb, me); 2148 TCGv t1 = tcg_temp_new(); 2149 2150 tcg_gen_rotli_tl(t1, t_rs, sh); 2151 tcg_gen_andi_tl(t1, t1, mask); 2152 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2153 tcg_gen_or_tl(t_ra, t_ra, t1); 2154 tcg_temp_free(t1); 2155 } 2156 if (unlikely(Rc(ctx->opcode) != 0)) { 2157 gen_set_Rc0(ctx, t_ra); 2158 } 2159 } 2160 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2161 #endif 2162 2163 /*** Integer shift ***/ 2164 2165 /* slw & slw. */ 2166 static void gen_slw(DisasContext *ctx) 2167 { 2168 TCGv t0, t1; 2169 2170 t0 = tcg_temp_new(); 2171 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2172 #if defined(TARGET_PPC64) 2173 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2174 tcg_gen_sari_tl(t0, t0, 0x3f); 2175 #else 2176 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2177 tcg_gen_sari_tl(t0, t0, 0x1f); 2178 #endif 2179 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2180 t1 = tcg_temp_new(); 2181 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2182 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2183 tcg_temp_free(t1); 2184 tcg_temp_free(t0); 2185 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2186 if (unlikely(Rc(ctx->opcode) != 0)) 2187 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2188 } 2189 2190 /* sraw & sraw. */ 2191 static void gen_sraw(DisasContext *ctx) 2192 { 2193 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2194 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2195 if (unlikely(Rc(ctx->opcode) != 0)) 2196 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2197 } 2198 2199 /* srawi & srawi. */ 2200 static void gen_srawi(DisasContext *ctx) 2201 { 2202 int sh = SH(ctx->opcode); 2203 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2204 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2205 if (sh == 0) { 2206 tcg_gen_ext32s_tl(dst, src); 2207 tcg_gen_movi_tl(cpu_ca, 0); 2208 if (is_isa300(ctx)) { 2209 tcg_gen_movi_tl(cpu_ca32, 0); 2210 } 2211 } else { 2212 TCGv t0; 2213 tcg_gen_ext32s_tl(dst, src); 2214 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2215 t0 = tcg_temp_new(); 2216 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2217 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2218 tcg_temp_free(t0); 2219 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2220 if (is_isa300(ctx)) { 2221 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2222 } 2223 tcg_gen_sari_tl(dst, dst, sh); 2224 } 2225 if (unlikely(Rc(ctx->opcode) != 0)) { 2226 gen_set_Rc0(ctx, dst); 2227 } 2228 } 2229 2230 /* srw & srw. */ 2231 static void gen_srw(DisasContext *ctx) 2232 { 2233 TCGv t0, t1; 2234 2235 t0 = tcg_temp_new(); 2236 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2237 #if defined(TARGET_PPC64) 2238 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2239 tcg_gen_sari_tl(t0, t0, 0x3f); 2240 #else 2241 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2242 tcg_gen_sari_tl(t0, t0, 0x1f); 2243 #endif 2244 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2245 tcg_gen_ext32u_tl(t0, t0); 2246 t1 = tcg_temp_new(); 2247 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2248 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2249 tcg_temp_free(t1); 2250 tcg_temp_free(t0); 2251 if (unlikely(Rc(ctx->opcode) != 0)) 2252 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2253 } 2254 2255 #if defined(TARGET_PPC64) 2256 /* sld & sld. */ 2257 static void gen_sld(DisasContext *ctx) 2258 { 2259 TCGv t0, t1; 2260 2261 t0 = tcg_temp_new(); 2262 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2263 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2264 tcg_gen_sari_tl(t0, t0, 0x3f); 2265 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2266 t1 = tcg_temp_new(); 2267 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2268 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2269 tcg_temp_free(t1); 2270 tcg_temp_free(t0); 2271 if (unlikely(Rc(ctx->opcode) != 0)) 2272 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2273 } 2274 2275 /* srad & srad. */ 2276 static void gen_srad(DisasContext *ctx) 2277 { 2278 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2279 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2280 if (unlikely(Rc(ctx->opcode) != 0)) 2281 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2282 } 2283 /* sradi & sradi. */ 2284 static inline void gen_sradi(DisasContext *ctx, int n) 2285 { 2286 int sh = SH(ctx->opcode) + (n << 5); 2287 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2288 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2289 if (sh == 0) { 2290 tcg_gen_mov_tl(dst, src); 2291 tcg_gen_movi_tl(cpu_ca, 0); 2292 if (is_isa300(ctx)) { 2293 tcg_gen_movi_tl(cpu_ca32, 0); 2294 } 2295 } else { 2296 TCGv t0; 2297 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2298 t0 = tcg_temp_new(); 2299 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2300 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2301 tcg_temp_free(t0); 2302 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2303 if (is_isa300(ctx)) { 2304 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2305 } 2306 tcg_gen_sari_tl(dst, src, sh); 2307 } 2308 if (unlikely(Rc(ctx->opcode) != 0)) { 2309 gen_set_Rc0(ctx, dst); 2310 } 2311 } 2312 2313 static void gen_sradi0(DisasContext *ctx) 2314 { 2315 gen_sradi(ctx, 0); 2316 } 2317 2318 static void gen_sradi1(DisasContext *ctx) 2319 { 2320 gen_sradi(ctx, 1); 2321 } 2322 2323 /* extswsli & extswsli. */ 2324 static inline void gen_extswsli(DisasContext *ctx, int n) 2325 { 2326 int sh = SH(ctx->opcode) + (n << 5); 2327 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2328 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2329 2330 tcg_gen_ext32s_tl(dst, src); 2331 tcg_gen_shli_tl(dst, dst, sh); 2332 if (unlikely(Rc(ctx->opcode) != 0)) { 2333 gen_set_Rc0(ctx, dst); 2334 } 2335 } 2336 2337 static void gen_extswsli0(DisasContext *ctx) 2338 { 2339 gen_extswsli(ctx, 0); 2340 } 2341 2342 static void gen_extswsli1(DisasContext *ctx) 2343 { 2344 gen_extswsli(ctx, 1); 2345 } 2346 2347 /* srd & srd. */ 2348 static void gen_srd(DisasContext *ctx) 2349 { 2350 TCGv t0, t1; 2351 2352 t0 = tcg_temp_new(); 2353 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2354 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2355 tcg_gen_sari_tl(t0, t0, 0x3f); 2356 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2357 t1 = tcg_temp_new(); 2358 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2359 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2360 tcg_temp_free(t1); 2361 tcg_temp_free(t0); 2362 if (unlikely(Rc(ctx->opcode) != 0)) 2363 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2364 } 2365 #endif 2366 2367 /*** Addressing modes ***/ 2368 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2369 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2370 target_long maskl) 2371 { 2372 target_long simm = SIMM(ctx->opcode); 2373 2374 simm &= ~maskl; 2375 if (rA(ctx->opcode) == 0) { 2376 if (NARROW_MODE(ctx)) { 2377 simm = (uint32_t)simm; 2378 } 2379 tcg_gen_movi_tl(EA, simm); 2380 } else if (likely(simm != 0)) { 2381 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2382 if (NARROW_MODE(ctx)) { 2383 tcg_gen_ext32u_tl(EA, EA); 2384 } 2385 } else { 2386 if (NARROW_MODE(ctx)) { 2387 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2388 } else { 2389 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2390 } 2391 } 2392 } 2393 2394 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2395 { 2396 if (rA(ctx->opcode) == 0) { 2397 if (NARROW_MODE(ctx)) { 2398 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2399 } else { 2400 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2401 } 2402 } else { 2403 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2404 if (NARROW_MODE(ctx)) { 2405 tcg_gen_ext32u_tl(EA, EA); 2406 } 2407 } 2408 } 2409 2410 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2411 { 2412 if (rA(ctx->opcode) == 0) { 2413 tcg_gen_movi_tl(EA, 0); 2414 } else if (NARROW_MODE(ctx)) { 2415 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2416 } else { 2417 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2418 } 2419 } 2420 2421 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2422 target_long val) 2423 { 2424 tcg_gen_addi_tl(ret, arg1, val); 2425 if (NARROW_MODE(ctx)) { 2426 tcg_gen_ext32u_tl(ret, ret); 2427 } 2428 } 2429 2430 static inline void gen_align_no_le(DisasContext *ctx) 2431 { 2432 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2433 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2434 } 2435 2436 /*** Integer load ***/ 2437 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2438 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2439 2440 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2441 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2442 TCGv val, \ 2443 TCGv addr) \ 2444 { \ 2445 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2446 } 2447 2448 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2449 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2450 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2451 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2452 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2453 2454 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2455 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2456 2457 #define GEN_QEMU_LOAD_64(ldop, op) \ 2458 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2459 TCGv_i64 val, \ 2460 TCGv addr) \ 2461 { \ 2462 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2463 } 2464 2465 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2466 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2467 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2468 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2469 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2470 2471 #if defined(TARGET_PPC64) 2472 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2473 #endif 2474 2475 #define GEN_QEMU_STORE_TL(stop, op) \ 2476 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2477 TCGv val, \ 2478 TCGv addr) \ 2479 { \ 2480 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2481 } 2482 2483 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2484 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2485 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2486 2487 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2488 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2489 2490 #define GEN_QEMU_STORE_64(stop, op) \ 2491 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2492 TCGv_i64 val, \ 2493 TCGv addr) \ 2494 { \ 2495 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2496 } 2497 2498 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2499 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2500 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2501 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2502 2503 #if defined(TARGET_PPC64) 2504 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2505 #endif 2506 2507 #define GEN_LD(name, ldop, opc, type) \ 2508 static void glue(gen_, name)(DisasContext *ctx) \ 2509 { \ 2510 TCGv EA; \ 2511 gen_set_access_type(ctx, ACCESS_INT); \ 2512 EA = tcg_temp_new(); \ 2513 gen_addr_imm_index(ctx, EA, 0); \ 2514 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2515 tcg_temp_free(EA); \ 2516 } 2517 2518 #define GEN_LDU(name, ldop, opc, type) \ 2519 static void glue(gen_, name##u)(DisasContext *ctx) \ 2520 { \ 2521 TCGv EA; \ 2522 if (unlikely(rA(ctx->opcode) == 0 || \ 2523 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2524 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2525 return; \ 2526 } \ 2527 gen_set_access_type(ctx, ACCESS_INT); \ 2528 EA = tcg_temp_new(); \ 2529 if (type == PPC_64B) \ 2530 gen_addr_imm_index(ctx, EA, 0x03); \ 2531 else \ 2532 gen_addr_imm_index(ctx, EA, 0); \ 2533 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2534 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2535 tcg_temp_free(EA); \ 2536 } 2537 2538 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2539 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2540 { \ 2541 TCGv EA; \ 2542 if (unlikely(rA(ctx->opcode) == 0 || \ 2543 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2544 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2545 return; \ 2546 } \ 2547 gen_set_access_type(ctx, ACCESS_INT); \ 2548 EA = tcg_temp_new(); \ 2549 gen_addr_reg_index(ctx, EA); \ 2550 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2551 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2552 tcg_temp_free(EA); \ 2553 } 2554 2555 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2556 static void glue(gen_, name##x)(DisasContext *ctx) \ 2557 { \ 2558 TCGv EA; \ 2559 chk; \ 2560 gen_set_access_type(ctx, ACCESS_INT); \ 2561 EA = tcg_temp_new(); \ 2562 gen_addr_reg_index(ctx, EA); \ 2563 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2564 tcg_temp_free(EA); \ 2565 } 2566 2567 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2568 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2569 2570 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2571 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2572 2573 #define GEN_LDS(name, ldop, op, type) \ 2574 GEN_LD(name, ldop, op | 0x20, type); \ 2575 GEN_LDU(name, ldop, op | 0x21, type); \ 2576 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2577 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2578 2579 /* lbz lbzu lbzux lbzx */ 2580 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2581 /* lha lhau lhaux lhax */ 2582 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2583 /* lhz lhzu lhzux lhzx */ 2584 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2585 /* lwz lwzu lwzux lwzx */ 2586 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2587 2588 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 2589 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2590 { \ 2591 TCGv EA; \ 2592 CHK_SV; \ 2593 gen_set_access_type(ctx, ACCESS_INT); \ 2594 EA = tcg_temp_new(); \ 2595 gen_addr_reg_index(ctx, EA); \ 2596 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 2597 tcg_temp_free(EA); \ 2598 } 2599 2600 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 2601 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 2602 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 2603 #if defined(TARGET_PPC64) 2604 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 2605 #endif 2606 2607 #if defined(TARGET_PPC64) 2608 /* lwaux */ 2609 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2610 /* lwax */ 2611 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2612 /* ldux */ 2613 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2614 /* ldx */ 2615 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2616 2617 /* CI load/store variants */ 2618 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2619 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2620 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2621 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2622 2623 static void gen_ld(DisasContext *ctx) 2624 { 2625 TCGv EA; 2626 if (Rc(ctx->opcode)) { 2627 if (unlikely(rA(ctx->opcode) == 0 || 2628 rA(ctx->opcode) == rD(ctx->opcode))) { 2629 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2630 return; 2631 } 2632 } 2633 gen_set_access_type(ctx, ACCESS_INT); 2634 EA = tcg_temp_new(); 2635 gen_addr_imm_index(ctx, EA, 0x03); 2636 if (ctx->opcode & 0x02) { 2637 /* lwa (lwau is undefined) */ 2638 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2639 } else { 2640 /* ld - ldu */ 2641 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2642 } 2643 if (Rc(ctx->opcode)) 2644 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2645 tcg_temp_free(EA); 2646 } 2647 2648 /* lq */ 2649 static void gen_lq(DisasContext *ctx) 2650 { 2651 int ra, rd; 2652 TCGv EA, hi, lo; 2653 2654 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2655 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2656 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2657 2658 if (!legal_in_user_mode && ctx->pr) { 2659 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2660 return; 2661 } 2662 2663 if (!le_is_supported && ctx->le_mode) { 2664 gen_align_no_le(ctx); 2665 return; 2666 } 2667 ra = rA(ctx->opcode); 2668 rd = rD(ctx->opcode); 2669 if (unlikely((rd & 1) || rd == ra)) { 2670 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2671 return; 2672 } 2673 2674 gen_set_access_type(ctx, ACCESS_INT); 2675 EA = tcg_temp_new(); 2676 gen_addr_imm_index(ctx, EA, 0x0F); 2677 2678 /* Note that the low part is always in RD+1, even in LE mode. */ 2679 lo = cpu_gpr[rd + 1]; 2680 hi = cpu_gpr[rd]; 2681 2682 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2683 if (HAVE_ATOMIC128) { 2684 TCGv_i32 oi = tcg_temp_new_i32(); 2685 if (ctx->le_mode) { 2686 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2687 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 2688 } else { 2689 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2690 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 2691 } 2692 tcg_temp_free_i32(oi); 2693 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 2694 } else { 2695 /* Restart with exclusive lock. */ 2696 gen_helper_exit_atomic(cpu_env); 2697 ctx->base.is_jmp = DISAS_NORETURN; 2698 } 2699 } else if (ctx->le_mode) { 2700 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2701 gen_addr_add(ctx, EA, EA, 8); 2702 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2703 } else { 2704 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2705 gen_addr_add(ctx, EA, EA, 8); 2706 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2707 } 2708 tcg_temp_free(EA); 2709 } 2710 #endif 2711 2712 /*** Integer store ***/ 2713 #define GEN_ST(name, stop, opc, type) \ 2714 static void glue(gen_, name)(DisasContext *ctx) \ 2715 { \ 2716 TCGv EA; \ 2717 gen_set_access_type(ctx, ACCESS_INT); \ 2718 EA = tcg_temp_new(); \ 2719 gen_addr_imm_index(ctx, EA, 0); \ 2720 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2721 tcg_temp_free(EA); \ 2722 } 2723 2724 #define GEN_STU(name, stop, opc, type) \ 2725 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2726 { \ 2727 TCGv EA; \ 2728 if (unlikely(rA(ctx->opcode) == 0)) { \ 2729 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2730 return; \ 2731 } \ 2732 gen_set_access_type(ctx, ACCESS_INT); \ 2733 EA = tcg_temp_new(); \ 2734 if (type == PPC_64B) \ 2735 gen_addr_imm_index(ctx, EA, 0x03); \ 2736 else \ 2737 gen_addr_imm_index(ctx, EA, 0); \ 2738 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2739 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2740 tcg_temp_free(EA); \ 2741 } 2742 2743 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2744 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2745 { \ 2746 TCGv EA; \ 2747 if (unlikely(rA(ctx->opcode) == 0)) { \ 2748 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2749 return; \ 2750 } \ 2751 gen_set_access_type(ctx, ACCESS_INT); \ 2752 EA = tcg_temp_new(); \ 2753 gen_addr_reg_index(ctx, EA); \ 2754 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2755 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2756 tcg_temp_free(EA); \ 2757 } 2758 2759 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2760 static void glue(gen_, name##x)(DisasContext *ctx) \ 2761 { \ 2762 TCGv EA; \ 2763 chk; \ 2764 gen_set_access_type(ctx, ACCESS_INT); \ 2765 EA = tcg_temp_new(); \ 2766 gen_addr_reg_index(ctx, EA); \ 2767 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2768 tcg_temp_free(EA); \ 2769 } 2770 #define GEN_STX(name, stop, opc2, opc3, type) \ 2771 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2772 2773 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2774 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2775 2776 #define GEN_STS(name, stop, op, type) \ 2777 GEN_ST(name, stop, op | 0x20, type); \ 2778 GEN_STU(name, stop, op | 0x21, type); \ 2779 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2780 GEN_STX(name, stop, 0x17, op | 0x00, type) 2781 2782 /* stb stbu stbux stbx */ 2783 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2784 /* sth sthu sthux sthx */ 2785 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2786 /* stw stwu stwux stwx */ 2787 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2788 2789 #define GEN_STEPX(name, stop, opc2, opc3) \ 2790 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2791 { \ 2792 TCGv EA; \ 2793 CHK_SV; \ 2794 gen_set_access_type(ctx, ACCESS_INT); \ 2795 EA = tcg_temp_new(); \ 2796 gen_addr_reg_index(ctx, EA); \ 2797 tcg_gen_qemu_st_tl( \ 2798 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 2799 tcg_temp_free(EA); \ 2800 } 2801 2802 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 2803 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 2804 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 2805 #if defined(TARGET_PPC64) 2806 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04) 2807 #endif 2808 2809 #if defined(TARGET_PPC64) 2810 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2811 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2812 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2813 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2814 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2815 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2816 2817 static void gen_std(DisasContext *ctx) 2818 { 2819 int rs; 2820 TCGv EA; 2821 2822 rs = rS(ctx->opcode); 2823 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2824 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2825 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2826 TCGv hi, lo; 2827 2828 if (!(ctx->insns_flags & PPC_64BX)) { 2829 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2830 } 2831 2832 if (!legal_in_user_mode && ctx->pr) { 2833 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2834 return; 2835 } 2836 2837 if (!le_is_supported && ctx->le_mode) { 2838 gen_align_no_le(ctx); 2839 return; 2840 } 2841 2842 if (unlikely(rs & 1)) { 2843 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2844 return; 2845 } 2846 gen_set_access_type(ctx, ACCESS_INT); 2847 EA = tcg_temp_new(); 2848 gen_addr_imm_index(ctx, EA, 0x03); 2849 2850 /* Note that the low part is always in RS+1, even in LE mode. */ 2851 lo = cpu_gpr[rs + 1]; 2852 hi = cpu_gpr[rs]; 2853 2854 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2855 if (HAVE_ATOMIC128) { 2856 TCGv_i32 oi = tcg_temp_new_i32(); 2857 if (ctx->le_mode) { 2858 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2859 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 2860 } else { 2861 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2862 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 2863 } 2864 tcg_temp_free_i32(oi); 2865 } else { 2866 /* Restart with exclusive lock. */ 2867 gen_helper_exit_atomic(cpu_env); 2868 ctx->base.is_jmp = DISAS_NORETURN; 2869 } 2870 } else if (ctx->le_mode) { 2871 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2872 gen_addr_add(ctx, EA, EA, 8); 2873 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2874 } else { 2875 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2876 gen_addr_add(ctx, EA, EA, 8); 2877 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2878 } 2879 tcg_temp_free(EA); 2880 } else { 2881 /* std / stdu */ 2882 if (Rc(ctx->opcode)) { 2883 if (unlikely(rA(ctx->opcode) == 0)) { 2884 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2885 return; 2886 } 2887 } 2888 gen_set_access_type(ctx, ACCESS_INT); 2889 EA = tcg_temp_new(); 2890 gen_addr_imm_index(ctx, EA, 0x03); 2891 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2892 if (Rc(ctx->opcode)) 2893 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2894 tcg_temp_free(EA); 2895 } 2896 } 2897 #endif 2898 /*** Integer load and store with byte reverse ***/ 2899 2900 /* lhbrx */ 2901 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2902 2903 /* lwbrx */ 2904 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2905 2906 #if defined(TARGET_PPC64) 2907 /* ldbrx */ 2908 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2909 /* stdbrx */ 2910 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2911 #endif /* TARGET_PPC64 */ 2912 2913 /* sthbrx */ 2914 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2915 /* stwbrx */ 2916 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2917 2918 /*** Integer load and store multiple ***/ 2919 2920 /* lmw */ 2921 static void gen_lmw(DisasContext *ctx) 2922 { 2923 TCGv t0; 2924 TCGv_i32 t1; 2925 2926 if (ctx->le_mode) { 2927 gen_align_no_le(ctx); 2928 return; 2929 } 2930 gen_set_access_type(ctx, ACCESS_INT); 2931 t0 = tcg_temp_new(); 2932 t1 = tcg_const_i32(rD(ctx->opcode)); 2933 gen_addr_imm_index(ctx, t0, 0); 2934 gen_helper_lmw(cpu_env, t0, t1); 2935 tcg_temp_free(t0); 2936 tcg_temp_free_i32(t1); 2937 } 2938 2939 /* stmw */ 2940 static void gen_stmw(DisasContext *ctx) 2941 { 2942 TCGv t0; 2943 TCGv_i32 t1; 2944 2945 if (ctx->le_mode) { 2946 gen_align_no_le(ctx); 2947 return; 2948 } 2949 gen_set_access_type(ctx, ACCESS_INT); 2950 t0 = tcg_temp_new(); 2951 t1 = tcg_const_i32(rS(ctx->opcode)); 2952 gen_addr_imm_index(ctx, t0, 0); 2953 gen_helper_stmw(cpu_env, t0, t1); 2954 tcg_temp_free(t0); 2955 tcg_temp_free_i32(t1); 2956 } 2957 2958 /*** Integer load and store strings ***/ 2959 2960 /* lswi */ 2961 /* PowerPC32 specification says we must generate an exception if 2962 * rA is in the range of registers to be loaded. 2963 * In an other hand, IBM says this is valid, but rA won't be loaded. 2964 * For now, I'll follow the spec... 2965 */ 2966 static void gen_lswi(DisasContext *ctx) 2967 { 2968 TCGv t0; 2969 TCGv_i32 t1, t2; 2970 int nb = NB(ctx->opcode); 2971 int start = rD(ctx->opcode); 2972 int ra = rA(ctx->opcode); 2973 int nr; 2974 2975 if (ctx->le_mode) { 2976 gen_align_no_le(ctx); 2977 return; 2978 } 2979 if (nb == 0) 2980 nb = 32; 2981 nr = DIV_ROUND_UP(nb, 4); 2982 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2983 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2984 return; 2985 } 2986 gen_set_access_type(ctx, ACCESS_INT); 2987 t0 = tcg_temp_new(); 2988 gen_addr_register(ctx, t0); 2989 t1 = tcg_const_i32(nb); 2990 t2 = tcg_const_i32(start); 2991 gen_helper_lsw(cpu_env, t0, t1, t2); 2992 tcg_temp_free(t0); 2993 tcg_temp_free_i32(t1); 2994 tcg_temp_free_i32(t2); 2995 } 2996 2997 /* lswx */ 2998 static void gen_lswx(DisasContext *ctx) 2999 { 3000 TCGv t0; 3001 TCGv_i32 t1, t2, t3; 3002 3003 if (ctx->le_mode) { 3004 gen_align_no_le(ctx); 3005 return; 3006 } 3007 gen_set_access_type(ctx, ACCESS_INT); 3008 t0 = tcg_temp_new(); 3009 gen_addr_reg_index(ctx, t0); 3010 t1 = tcg_const_i32(rD(ctx->opcode)); 3011 t2 = tcg_const_i32(rA(ctx->opcode)); 3012 t3 = tcg_const_i32(rB(ctx->opcode)); 3013 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 3014 tcg_temp_free(t0); 3015 tcg_temp_free_i32(t1); 3016 tcg_temp_free_i32(t2); 3017 tcg_temp_free_i32(t3); 3018 } 3019 3020 /* stswi */ 3021 static void gen_stswi(DisasContext *ctx) 3022 { 3023 TCGv t0; 3024 TCGv_i32 t1, t2; 3025 int nb = NB(ctx->opcode); 3026 3027 if (ctx->le_mode) { 3028 gen_align_no_le(ctx); 3029 return; 3030 } 3031 gen_set_access_type(ctx, ACCESS_INT); 3032 t0 = tcg_temp_new(); 3033 gen_addr_register(ctx, t0); 3034 if (nb == 0) 3035 nb = 32; 3036 t1 = tcg_const_i32(nb); 3037 t2 = tcg_const_i32(rS(ctx->opcode)); 3038 gen_helper_stsw(cpu_env, t0, t1, t2); 3039 tcg_temp_free(t0); 3040 tcg_temp_free_i32(t1); 3041 tcg_temp_free_i32(t2); 3042 } 3043 3044 /* stswx */ 3045 static void gen_stswx(DisasContext *ctx) 3046 { 3047 TCGv t0; 3048 TCGv_i32 t1, t2; 3049 3050 if (ctx->le_mode) { 3051 gen_align_no_le(ctx); 3052 return; 3053 } 3054 gen_set_access_type(ctx, ACCESS_INT); 3055 t0 = tcg_temp_new(); 3056 gen_addr_reg_index(ctx, t0); 3057 t1 = tcg_temp_new_i32(); 3058 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3059 tcg_gen_andi_i32(t1, t1, 0x7F); 3060 t2 = tcg_const_i32(rS(ctx->opcode)); 3061 gen_helper_stsw(cpu_env, t0, t1, t2); 3062 tcg_temp_free(t0); 3063 tcg_temp_free_i32(t1); 3064 tcg_temp_free_i32(t2); 3065 } 3066 3067 /*** Memory synchronisation ***/ 3068 /* eieio */ 3069 static void gen_eieio(DisasContext *ctx) 3070 { 3071 TCGBar bar = TCG_MO_LD_ST; 3072 3073 /* 3074 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3075 * tell the CPU it is a store-forwarding barrier. 3076 */ 3077 if (ctx->opcode & 0x2000000) { 3078 /* 3079 * ISA says that "Reserved fields in instructions are ignored 3080 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3081 * as this is not an instruction software should be using, 3082 * complain to the user. 3083 */ 3084 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3085 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3086 TARGET_FMT_lx "\n", ctx->base.pc_next - 4); 3087 } else { 3088 bar = TCG_MO_ST_LD; 3089 } 3090 } 3091 3092 tcg_gen_mb(bar | TCG_BAR_SC); 3093 } 3094 3095 #if !defined(CONFIG_USER_ONLY) 3096 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3097 { 3098 TCGv_i32 t; 3099 TCGLabel *l; 3100 3101 if (!ctx->lazy_tlb_flush) { 3102 return; 3103 } 3104 l = gen_new_label(); 3105 t = tcg_temp_new_i32(); 3106 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3107 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3108 if (global) { 3109 gen_helper_check_tlb_flush_global(cpu_env); 3110 } else { 3111 gen_helper_check_tlb_flush_local(cpu_env); 3112 } 3113 gen_set_label(l); 3114 tcg_temp_free_i32(t); 3115 } 3116 #else 3117 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3118 #endif 3119 3120 /* isync */ 3121 static void gen_isync(DisasContext *ctx) 3122 { 3123 /* 3124 * We need to check for a pending TLB flush. This can only happen in 3125 * kernel mode however so check MSR_PR 3126 */ 3127 if (!ctx->pr) { 3128 gen_check_tlb_flush(ctx, false); 3129 } 3130 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3131 gen_stop_exception(ctx); 3132 } 3133 3134 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3135 3136 static void gen_load_locked(DisasContext *ctx, TCGMemOp memop) 3137 { 3138 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3139 TCGv t0 = tcg_temp_new(); 3140 3141 gen_set_access_type(ctx, ACCESS_RES); 3142 gen_addr_reg_index(ctx, t0); 3143 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3144 tcg_gen_mov_tl(cpu_reserve, t0); 3145 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3146 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3147 tcg_temp_free(t0); 3148 } 3149 3150 #define LARX(name, memop) \ 3151 static void gen_##name(DisasContext *ctx) \ 3152 { \ 3153 gen_load_locked(ctx, memop); \ 3154 } 3155 3156 /* lwarx */ 3157 LARX(lbarx, DEF_MEMOP(MO_UB)) 3158 LARX(lharx, DEF_MEMOP(MO_UW)) 3159 LARX(lwarx, DEF_MEMOP(MO_UL)) 3160 3161 static void gen_fetch_inc_conditional(DisasContext *ctx, TCGMemOp memop, 3162 TCGv EA, TCGCond cond, int addend) 3163 { 3164 TCGv t = tcg_temp_new(); 3165 TCGv t2 = tcg_temp_new(); 3166 TCGv u = tcg_temp_new(); 3167 3168 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3169 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3170 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3171 tcg_gen_addi_tl(u, t, addend); 3172 3173 /* E.g. for fetch and increment bounded... */ 3174 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3175 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3176 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3177 3178 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3179 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3180 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3181 3182 tcg_temp_free(t); 3183 tcg_temp_free(t2); 3184 tcg_temp_free(u); 3185 } 3186 3187 static void gen_ld_atomic(DisasContext *ctx, TCGMemOp memop) 3188 { 3189 uint32_t gpr_FC = FC(ctx->opcode); 3190 TCGv EA = tcg_temp_new(); 3191 int rt = rD(ctx->opcode); 3192 bool need_serial; 3193 TCGv src, dst; 3194 3195 gen_addr_register(ctx, EA); 3196 dst = cpu_gpr[rt]; 3197 src = cpu_gpr[(rt + 1) & 31]; 3198 3199 need_serial = false; 3200 memop |= MO_ALIGN; 3201 switch (gpr_FC) { 3202 case 0: /* Fetch and add */ 3203 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3204 break; 3205 case 1: /* Fetch and xor */ 3206 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3207 break; 3208 case 2: /* Fetch and or */ 3209 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3210 break; 3211 case 3: /* Fetch and 'and' */ 3212 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3213 break; 3214 case 4: /* Fetch and max unsigned */ 3215 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3216 break; 3217 case 5: /* Fetch and max signed */ 3218 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3219 break; 3220 case 6: /* Fetch and min unsigned */ 3221 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3222 break; 3223 case 7: /* Fetch and min signed */ 3224 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3225 break; 3226 case 8: /* Swap */ 3227 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3228 break; 3229 3230 case 16: /* Compare and swap not equal */ 3231 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3232 need_serial = true; 3233 } else { 3234 TCGv t0 = tcg_temp_new(); 3235 TCGv t1 = tcg_temp_new(); 3236 3237 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3238 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3239 tcg_gen_mov_tl(t1, src); 3240 } else { 3241 tcg_gen_ext32u_tl(t1, src); 3242 } 3243 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3244 cpu_gpr[(rt + 2) & 31], t0); 3245 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3246 tcg_gen_mov_tl(dst, t0); 3247 3248 tcg_temp_free(t0); 3249 tcg_temp_free(t1); 3250 } 3251 break; 3252 3253 case 24: /* Fetch and increment bounded */ 3254 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3255 need_serial = true; 3256 } else { 3257 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3258 } 3259 break; 3260 case 25: /* Fetch and increment equal */ 3261 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3262 need_serial = true; 3263 } else { 3264 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3265 } 3266 break; 3267 case 28: /* Fetch and decrement bounded */ 3268 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3269 need_serial = true; 3270 } else { 3271 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3272 } 3273 break; 3274 3275 default: 3276 /* invoke data storage error handler */ 3277 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3278 } 3279 tcg_temp_free(EA); 3280 3281 if (need_serial) { 3282 /* Restart with exclusive lock. */ 3283 gen_helper_exit_atomic(cpu_env); 3284 ctx->base.is_jmp = DISAS_NORETURN; 3285 } 3286 } 3287 3288 static void gen_lwat(DisasContext *ctx) 3289 { 3290 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3291 } 3292 3293 #ifdef TARGET_PPC64 3294 static void gen_ldat(DisasContext *ctx) 3295 { 3296 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3297 } 3298 #endif 3299 3300 static void gen_st_atomic(DisasContext *ctx, TCGMemOp memop) 3301 { 3302 uint32_t gpr_FC = FC(ctx->opcode); 3303 TCGv EA = tcg_temp_new(); 3304 TCGv src, discard; 3305 3306 gen_addr_register(ctx, EA); 3307 src = cpu_gpr[rD(ctx->opcode)]; 3308 discard = tcg_temp_new(); 3309 3310 memop |= MO_ALIGN; 3311 switch (gpr_FC) { 3312 case 0: /* add and Store */ 3313 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3314 break; 3315 case 1: /* xor and Store */ 3316 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3317 break; 3318 case 2: /* Or and Store */ 3319 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3320 break; 3321 case 3: /* 'and' and Store */ 3322 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3323 break; 3324 case 4: /* Store max unsigned */ 3325 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3326 break; 3327 case 5: /* Store max signed */ 3328 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3329 break; 3330 case 6: /* Store min unsigned */ 3331 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3332 break; 3333 case 7: /* Store min signed */ 3334 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3335 break; 3336 case 24: /* Store twin */ 3337 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3338 /* Restart with exclusive lock. */ 3339 gen_helper_exit_atomic(cpu_env); 3340 ctx->base.is_jmp = DISAS_NORETURN; 3341 } else { 3342 TCGv t = tcg_temp_new(); 3343 TCGv t2 = tcg_temp_new(); 3344 TCGv s = tcg_temp_new(); 3345 TCGv s2 = tcg_temp_new(); 3346 TCGv ea_plus_s = tcg_temp_new(); 3347 3348 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3349 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3350 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3351 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3352 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3353 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3354 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3355 3356 tcg_temp_free(ea_plus_s); 3357 tcg_temp_free(s2); 3358 tcg_temp_free(s); 3359 tcg_temp_free(t2); 3360 tcg_temp_free(t); 3361 } 3362 break; 3363 default: 3364 /* invoke data storage error handler */ 3365 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3366 } 3367 tcg_temp_free(discard); 3368 tcg_temp_free(EA); 3369 } 3370 3371 static void gen_stwat(DisasContext *ctx) 3372 { 3373 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3374 } 3375 3376 #ifdef TARGET_PPC64 3377 static void gen_stdat(DisasContext *ctx) 3378 { 3379 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3380 } 3381 #endif 3382 3383 static void gen_conditional_store(DisasContext *ctx, TCGMemOp memop) 3384 { 3385 TCGLabel *l1 = gen_new_label(); 3386 TCGLabel *l2 = gen_new_label(); 3387 TCGv t0 = tcg_temp_new(); 3388 int reg = rS(ctx->opcode); 3389 3390 gen_set_access_type(ctx, ACCESS_RES); 3391 gen_addr_reg_index(ctx, t0); 3392 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 3393 tcg_temp_free(t0); 3394 3395 t0 = tcg_temp_new(); 3396 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3397 cpu_gpr[reg], ctx->mem_idx, 3398 DEF_MEMOP(memop) | MO_ALIGN); 3399 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3400 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3401 tcg_gen_or_tl(t0, t0, cpu_so); 3402 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3403 tcg_temp_free(t0); 3404 tcg_gen_br(l2); 3405 3406 gen_set_label(l1); 3407 3408 /* Address mismatch implies failure. But we still need to provide the 3409 memory barrier semantics of the instruction. */ 3410 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3411 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3412 3413 gen_set_label(l2); 3414 tcg_gen_movi_tl(cpu_reserve, -1); 3415 } 3416 3417 #define STCX(name, memop) \ 3418 static void gen_##name(DisasContext *ctx) \ 3419 { \ 3420 gen_conditional_store(ctx, memop); \ 3421 } 3422 3423 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3424 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3425 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3426 3427 #if defined(TARGET_PPC64) 3428 /* ldarx */ 3429 LARX(ldarx, DEF_MEMOP(MO_Q)) 3430 /* stdcx. */ 3431 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3432 3433 /* lqarx */ 3434 static void gen_lqarx(DisasContext *ctx) 3435 { 3436 int rd = rD(ctx->opcode); 3437 TCGv EA, hi, lo; 3438 3439 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3440 (rd == rB(ctx->opcode)))) { 3441 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3442 return; 3443 } 3444 3445 gen_set_access_type(ctx, ACCESS_RES); 3446 EA = tcg_temp_new(); 3447 gen_addr_reg_index(ctx, EA); 3448 3449 /* Note that the low part is always in RD+1, even in LE mode. */ 3450 lo = cpu_gpr[rd + 1]; 3451 hi = cpu_gpr[rd]; 3452 3453 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3454 if (HAVE_ATOMIC128) { 3455 TCGv_i32 oi = tcg_temp_new_i32(); 3456 if (ctx->le_mode) { 3457 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16, 3458 ctx->mem_idx)); 3459 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3460 } else { 3461 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16, 3462 ctx->mem_idx)); 3463 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3464 } 3465 tcg_temp_free_i32(oi); 3466 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3467 } else { 3468 /* Restart with exclusive lock. */ 3469 gen_helper_exit_atomic(cpu_env); 3470 ctx->base.is_jmp = DISAS_NORETURN; 3471 tcg_temp_free(EA); 3472 return; 3473 } 3474 } else if (ctx->le_mode) { 3475 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 3476 tcg_gen_mov_tl(cpu_reserve, EA); 3477 gen_addr_add(ctx, EA, EA, 8); 3478 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3479 } else { 3480 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 3481 tcg_gen_mov_tl(cpu_reserve, EA); 3482 gen_addr_add(ctx, EA, EA, 8); 3483 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3484 } 3485 tcg_temp_free(EA); 3486 3487 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 3488 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3489 } 3490 3491 /* stqcx. */ 3492 static void gen_stqcx_(DisasContext *ctx) 3493 { 3494 int rs = rS(ctx->opcode); 3495 TCGv EA, hi, lo; 3496 3497 if (unlikely(rs & 1)) { 3498 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3499 return; 3500 } 3501 3502 gen_set_access_type(ctx, ACCESS_RES); 3503 EA = tcg_temp_new(); 3504 gen_addr_reg_index(ctx, EA); 3505 3506 /* Note that the low part is always in RS+1, even in LE mode. */ 3507 lo = cpu_gpr[rs + 1]; 3508 hi = cpu_gpr[rs]; 3509 3510 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3511 if (HAVE_CMPXCHG128) { 3512 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16); 3513 if (ctx->le_mode) { 3514 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 3515 EA, lo, hi, oi); 3516 } else { 3517 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 3518 EA, lo, hi, oi); 3519 } 3520 tcg_temp_free_i32(oi); 3521 } else { 3522 /* Restart with exclusive lock. */ 3523 gen_helper_exit_atomic(cpu_env); 3524 ctx->base.is_jmp = DISAS_NORETURN; 3525 } 3526 tcg_temp_free(EA); 3527 } else { 3528 TCGLabel *lab_fail = gen_new_label(); 3529 TCGLabel *lab_over = gen_new_label(); 3530 TCGv_i64 t0 = tcg_temp_new_i64(); 3531 TCGv_i64 t1 = tcg_temp_new_i64(); 3532 3533 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 3534 tcg_temp_free(EA); 3535 3536 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 3537 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3538 ? offsetof(CPUPPCState, reserve_val2) 3539 : offsetof(CPUPPCState, reserve_val))); 3540 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3541 3542 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3543 gen_qemu_ld64_i64(ctx, t0, t0); 3544 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3545 ? offsetof(CPUPPCState, reserve_val) 3546 : offsetof(CPUPPCState, reserve_val2))); 3547 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3548 3549 /* Success */ 3550 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 3551 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3552 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 3553 3554 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3555 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 3556 tcg_gen_br(lab_over); 3557 3558 gen_set_label(lab_fail); 3559 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3560 3561 gen_set_label(lab_over); 3562 tcg_gen_movi_tl(cpu_reserve, -1); 3563 tcg_temp_free_i64(t0); 3564 tcg_temp_free_i64(t1); 3565 } 3566 } 3567 #endif /* defined(TARGET_PPC64) */ 3568 3569 /* sync */ 3570 static void gen_sync(DisasContext *ctx) 3571 { 3572 uint32_t l = (ctx->opcode >> 21) & 3; 3573 3574 /* 3575 * We may need to check for a pending TLB flush. 3576 * 3577 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3578 * 3579 * Additionally, this can only happen in kernel mode however so 3580 * check MSR_PR as well. 3581 */ 3582 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3583 gen_check_tlb_flush(ctx, true); 3584 } 3585 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3586 } 3587 3588 /* wait */ 3589 static void gen_wait(DisasContext *ctx) 3590 { 3591 TCGv_i32 t0 = tcg_const_i32(1); 3592 tcg_gen_st_i32(t0, cpu_env, 3593 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3594 tcg_temp_free_i32(t0); 3595 /* Stop translation, as the CPU is supposed to sleep from now */ 3596 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3597 } 3598 3599 #if defined(TARGET_PPC64) 3600 static void gen_doze(DisasContext *ctx) 3601 { 3602 #if defined(CONFIG_USER_ONLY) 3603 GEN_PRIV; 3604 #else 3605 TCGv_i32 t; 3606 3607 CHK_HV; 3608 t = tcg_const_i32(PPC_PM_DOZE); 3609 gen_helper_pminsn(cpu_env, t); 3610 tcg_temp_free_i32(t); 3611 gen_stop_exception(ctx); 3612 #endif /* defined(CONFIG_USER_ONLY) */ 3613 } 3614 3615 static void gen_nap(DisasContext *ctx) 3616 { 3617 #if defined(CONFIG_USER_ONLY) 3618 GEN_PRIV; 3619 #else 3620 TCGv_i32 t; 3621 3622 CHK_HV; 3623 t = tcg_const_i32(PPC_PM_NAP); 3624 gen_helper_pminsn(cpu_env, t); 3625 tcg_temp_free_i32(t); 3626 gen_stop_exception(ctx); 3627 #endif /* defined(CONFIG_USER_ONLY) */ 3628 } 3629 3630 static void gen_stop(DisasContext *ctx) 3631 { 3632 gen_nap(ctx); 3633 } 3634 3635 static void gen_sleep(DisasContext *ctx) 3636 { 3637 #if defined(CONFIG_USER_ONLY) 3638 GEN_PRIV; 3639 #else 3640 TCGv_i32 t; 3641 3642 CHK_HV; 3643 t = tcg_const_i32(PPC_PM_SLEEP); 3644 gen_helper_pminsn(cpu_env, t); 3645 tcg_temp_free_i32(t); 3646 gen_stop_exception(ctx); 3647 #endif /* defined(CONFIG_USER_ONLY) */ 3648 } 3649 3650 static void gen_rvwinkle(DisasContext *ctx) 3651 { 3652 #if defined(CONFIG_USER_ONLY) 3653 GEN_PRIV; 3654 #else 3655 TCGv_i32 t; 3656 3657 CHK_HV; 3658 t = tcg_const_i32(PPC_PM_RVWINKLE); 3659 gen_helper_pminsn(cpu_env, t); 3660 tcg_temp_free_i32(t); 3661 gen_stop_exception(ctx); 3662 #endif /* defined(CONFIG_USER_ONLY) */ 3663 } 3664 #endif /* #if defined(TARGET_PPC64) */ 3665 3666 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3667 { 3668 #if defined(TARGET_PPC64) 3669 if (ctx->has_cfar) 3670 tcg_gen_movi_tl(cpu_cfar, nip); 3671 #endif 3672 } 3673 3674 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3675 { 3676 if (unlikely(ctx->singlestep_enabled)) { 3677 return false; 3678 } 3679 3680 #ifndef CONFIG_USER_ONLY 3681 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3682 #else 3683 return true; 3684 #endif 3685 } 3686 3687 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 3688 { 3689 int sse = ctx->singlestep_enabled; 3690 if (unlikely(sse)) { 3691 if (sse & GDBSTUB_SINGLE_STEP) { 3692 gen_debug_exception(ctx); 3693 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) { 3694 uint32_t excp = gen_prep_dbgex(ctx, POWERPC_EXCP_BRANCH); 3695 if (excp != POWERPC_EXCP_NONE) { 3696 gen_exception(ctx, excp); 3697 } 3698 } 3699 tcg_gen_exit_tb(NULL, 0); 3700 } else { 3701 tcg_gen_lookup_and_goto_ptr(); 3702 } 3703 } 3704 3705 /*** Branch ***/ 3706 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3707 { 3708 if (NARROW_MODE(ctx)) { 3709 dest = (uint32_t) dest; 3710 } 3711 if (use_goto_tb(ctx, dest)) { 3712 tcg_gen_goto_tb(n); 3713 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3714 tcg_gen_exit_tb(ctx->base.tb, n); 3715 } else { 3716 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3717 gen_lookup_and_goto_ptr(ctx); 3718 } 3719 } 3720 3721 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3722 { 3723 if (NARROW_MODE(ctx)) { 3724 nip = (uint32_t)nip; 3725 } 3726 tcg_gen_movi_tl(cpu_lr, nip); 3727 } 3728 3729 /* b ba bl bla */ 3730 static void gen_b(DisasContext *ctx) 3731 { 3732 target_ulong li, target; 3733 3734 ctx->exception = POWERPC_EXCP_BRANCH; 3735 /* sign extend LI */ 3736 li = LI(ctx->opcode); 3737 li = (li ^ 0x02000000) - 0x02000000; 3738 if (likely(AA(ctx->opcode) == 0)) { 3739 target = ctx->base.pc_next + li - 4; 3740 } else { 3741 target = li; 3742 } 3743 if (LK(ctx->opcode)) { 3744 gen_setlr(ctx, ctx->base.pc_next); 3745 } 3746 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3747 gen_goto_tb(ctx, 0, target); 3748 } 3749 3750 #define BCOND_IM 0 3751 #define BCOND_LR 1 3752 #define BCOND_CTR 2 3753 #define BCOND_TAR 3 3754 3755 static void gen_bcond(DisasContext *ctx, int type) 3756 { 3757 uint32_t bo = BO(ctx->opcode); 3758 TCGLabel *l1; 3759 TCGv target; 3760 ctx->exception = POWERPC_EXCP_BRANCH; 3761 3762 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3763 target = tcg_temp_local_new(); 3764 if (type == BCOND_CTR) 3765 tcg_gen_mov_tl(target, cpu_ctr); 3766 else if (type == BCOND_TAR) 3767 gen_load_spr(target, SPR_TAR); 3768 else 3769 tcg_gen_mov_tl(target, cpu_lr); 3770 } else { 3771 target = NULL; 3772 } 3773 if (LK(ctx->opcode)) 3774 gen_setlr(ctx, ctx->base.pc_next); 3775 l1 = gen_new_label(); 3776 if ((bo & 0x4) == 0) { 3777 /* Decrement and test CTR */ 3778 TCGv temp = tcg_temp_new(); 3779 if (unlikely(type == BCOND_CTR)) { 3780 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3781 return; 3782 } 3783 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3784 if (NARROW_MODE(ctx)) { 3785 tcg_gen_ext32u_tl(temp, cpu_ctr); 3786 } else { 3787 tcg_gen_mov_tl(temp, cpu_ctr); 3788 } 3789 if (bo & 0x2) { 3790 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3791 } else { 3792 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3793 } 3794 tcg_temp_free(temp); 3795 } 3796 if ((bo & 0x10) == 0) { 3797 /* Test CR */ 3798 uint32_t bi = BI(ctx->opcode); 3799 uint32_t mask = 0x08 >> (bi & 0x03); 3800 TCGv_i32 temp = tcg_temp_new_i32(); 3801 3802 if (bo & 0x8) { 3803 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3804 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3805 } else { 3806 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3807 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3808 } 3809 tcg_temp_free_i32(temp); 3810 } 3811 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3812 if (type == BCOND_IM) { 3813 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3814 if (likely(AA(ctx->opcode) == 0)) { 3815 gen_goto_tb(ctx, 0, ctx->base.pc_next + li - 4); 3816 } else { 3817 gen_goto_tb(ctx, 0, li); 3818 } 3819 } else { 3820 if (NARROW_MODE(ctx)) { 3821 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3822 } else { 3823 tcg_gen_andi_tl(cpu_nip, target, ~3); 3824 } 3825 gen_lookup_and_goto_ptr(ctx); 3826 tcg_temp_free(target); 3827 } 3828 if ((bo & 0x14) != 0x14) { 3829 /* fallthrough case */ 3830 gen_set_label(l1); 3831 gen_goto_tb(ctx, 1, ctx->base.pc_next); 3832 } 3833 } 3834 3835 static void gen_bc(DisasContext *ctx) 3836 { 3837 gen_bcond(ctx, BCOND_IM); 3838 } 3839 3840 static void gen_bcctr(DisasContext *ctx) 3841 { 3842 gen_bcond(ctx, BCOND_CTR); 3843 } 3844 3845 static void gen_bclr(DisasContext *ctx) 3846 { 3847 gen_bcond(ctx, BCOND_LR); 3848 } 3849 3850 static void gen_bctar(DisasContext *ctx) 3851 { 3852 gen_bcond(ctx, BCOND_TAR); 3853 } 3854 3855 /*** Condition register logical ***/ 3856 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3857 static void glue(gen_, name)(DisasContext *ctx) \ 3858 { \ 3859 uint8_t bitmask; \ 3860 int sh; \ 3861 TCGv_i32 t0, t1; \ 3862 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3863 t0 = tcg_temp_new_i32(); \ 3864 if (sh > 0) \ 3865 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3866 else if (sh < 0) \ 3867 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3868 else \ 3869 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3870 t1 = tcg_temp_new_i32(); \ 3871 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3872 if (sh > 0) \ 3873 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3874 else if (sh < 0) \ 3875 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3876 else \ 3877 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3878 tcg_op(t0, t0, t1); \ 3879 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3880 tcg_gen_andi_i32(t0, t0, bitmask); \ 3881 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3882 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3883 tcg_temp_free_i32(t0); \ 3884 tcg_temp_free_i32(t1); \ 3885 } 3886 3887 /* crand */ 3888 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3889 /* crandc */ 3890 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3891 /* creqv */ 3892 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3893 /* crnand */ 3894 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3895 /* crnor */ 3896 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3897 /* cror */ 3898 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3899 /* crorc */ 3900 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3901 /* crxor */ 3902 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3903 3904 /* mcrf */ 3905 static void gen_mcrf(DisasContext *ctx) 3906 { 3907 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3908 } 3909 3910 /*** System linkage ***/ 3911 3912 /* rfi (supervisor only) */ 3913 static void gen_rfi(DisasContext *ctx) 3914 { 3915 #if defined(CONFIG_USER_ONLY) 3916 GEN_PRIV; 3917 #else 3918 /* This instruction doesn't exist anymore on 64-bit server 3919 * processors compliant with arch 2.x 3920 */ 3921 if (ctx->insns_flags & PPC_SEGMENT_64B) { 3922 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3923 return; 3924 } 3925 /* Restore CPU state */ 3926 CHK_SV; 3927 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3928 gen_io_start(); 3929 } 3930 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3931 gen_helper_rfi(cpu_env); 3932 gen_sync_exception(ctx); 3933 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3934 gen_io_end(); 3935 } 3936 #endif 3937 } 3938 3939 #if defined(TARGET_PPC64) 3940 static void gen_rfid(DisasContext *ctx) 3941 { 3942 #if defined(CONFIG_USER_ONLY) 3943 GEN_PRIV; 3944 #else 3945 /* Restore CPU state */ 3946 CHK_SV; 3947 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3948 gen_io_start(); 3949 } 3950 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3951 gen_helper_rfid(cpu_env); 3952 gen_sync_exception(ctx); 3953 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3954 gen_io_end(); 3955 } 3956 #endif 3957 } 3958 3959 static void gen_hrfid(DisasContext *ctx) 3960 { 3961 #if defined(CONFIG_USER_ONLY) 3962 GEN_PRIV; 3963 #else 3964 /* Restore CPU state */ 3965 CHK_HV; 3966 gen_helper_hrfid(cpu_env); 3967 gen_sync_exception(ctx); 3968 #endif 3969 } 3970 #endif 3971 3972 /* sc */ 3973 #if defined(CONFIG_USER_ONLY) 3974 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3975 #else 3976 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3977 #endif 3978 static void gen_sc(DisasContext *ctx) 3979 { 3980 uint32_t lev; 3981 3982 lev = (ctx->opcode >> 5) & 0x7F; 3983 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3984 } 3985 3986 /*** Trap ***/ 3987 3988 /* Check for unconditional traps (always or never) */ 3989 static bool check_unconditional_trap(DisasContext *ctx) 3990 { 3991 /* Trap never */ 3992 if (TO(ctx->opcode) == 0) { 3993 return true; 3994 } 3995 /* Trap always */ 3996 if (TO(ctx->opcode) == 31) { 3997 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 3998 return true; 3999 } 4000 return false; 4001 } 4002 4003 /* tw */ 4004 static void gen_tw(DisasContext *ctx) 4005 { 4006 TCGv_i32 t0; 4007 4008 if (check_unconditional_trap(ctx)) { 4009 return; 4010 } 4011 t0 = tcg_const_i32(TO(ctx->opcode)); 4012 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4013 t0); 4014 tcg_temp_free_i32(t0); 4015 } 4016 4017 /* twi */ 4018 static void gen_twi(DisasContext *ctx) 4019 { 4020 TCGv t0; 4021 TCGv_i32 t1; 4022 4023 if (check_unconditional_trap(ctx)) { 4024 return; 4025 } 4026 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4027 t1 = tcg_const_i32(TO(ctx->opcode)); 4028 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4029 tcg_temp_free(t0); 4030 tcg_temp_free_i32(t1); 4031 } 4032 4033 #if defined(TARGET_PPC64) 4034 /* td */ 4035 static void gen_td(DisasContext *ctx) 4036 { 4037 TCGv_i32 t0; 4038 4039 if (check_unconditional_trap(ctx)) { 4040 return; 4041 } 4042 t0 = tcg_const_i32(TO(ctx->opcode)); 4043 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4044 t0); 4045 tcg_temp_free_i32(t0); 4046 } 4047 4048 /* tdi */ 4049 static void gen_tdi(DisasContext *ctx) 4050 { 4051 TCGv t0; 4052 TCGv_i32 t1; 4053 4054 if (check_unconditional_trap(ctx)) { 4055 return; 4056 } 4057 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4058 t1 = tcg_const_i32(TO(ctx->opcode)); 4059 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4060 tcg_temp_free(t0); 4061 tcg_temp_free_i32(t1); 4062 } 4063 #endif 4064 4065 /*** Processor control ***/ 4066 4067 static void gen_read_xer(DisasContext *ctx, TCGv dst) 4068 { 4069 TCGv t0 = tcg_temp_new(); 4070 TCGv t1 = tcg_temp_new(); 4071 TCGv t2 = tcg_temp_new(); 4072 tcg_gen_mov_tl(dst, cpu_xer); 4073 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 4074 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 4075 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 4076 tcg_gen_or_tl(t0, t0, t1); 4077 tcg_gen_or_tl(dst, dst, t2); 4078 tcg_gen_or_tl(dst, dst, t0); 4079 if (is_isa300(ctx)) { 4080 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 4081 tcg_gen_or_tl(dst, dst, t0); 4082 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 4083 tcg_gen_or_tl(dst, dst, t0); 4084 } 4085 tcg_temp_free(t0); 4086 tcg_temp_free(t1); 4087 tcg_temp_free(t2); 4088 } 4089 4090 static void gen_write_xer(TCGv src) 4091 { 4092 /* Write all flags, while reading back check for isa300 */ 4093 tcg_gen_andi_tl(cpu_xer, src, 4094 ~((1u << XER_SO) | 4095 (1u << XER_OV) | (1u << XER_OV32) | 4096 (1u << XER_CA) | (1u << XER_CA32))); 4097 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 4098 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 4099 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 4100 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 4101 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 4102 } 4103 4104 /* mcrxr */ 4105 static void gen_mcrxr(DisasContext *ctx) 4106 { 4107 TCGv_i32 t0 = tcg_temp_new_i32(); 4108 TCGv_i32 t1 = tcg_temp_new_i32(); 4109 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4110 4111 tcg_gen_trunc_tl_i32(t0, cpu_so); 4112 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4113 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4114 tcg_gen_shli_i32(t0, t0, 3); 4115 tcg_gen_shli_i32(t1, t1, 2); 4116 tcg_gen_shli_i32(dst, dst, 1); 4117 tcg_gen_or_i32(dst, dst, t0); 4118 tcg_gen_or_i32(dst, dst, t1); 4119 tcg_temp_free_i32(t0); 4120 tcg_temp_free_i32(t1); 4121 4122 tcg_gen_movi_tl(cpu_so, 0); 4123 tcg_gen_movi_tl(cpu_ov, 0); 4124 tcg_gen_movi_tl(cpu_ca, 0); 4125 } 4126 4127 #ifdef TARGET_PPC64 4128 /* mcrxrx */ 4129 static void gen_mcrxrx(DisasContext *ctx) 4130 { 4131 TCGv t0 = tcg_temp_new(); 4132 TCGv t1 = tcg_temp_new(); 4133 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4134 4135 /* copy OV and OV32 */ 4136 tcg_gen_shli_tl(t0, cpu_ov, 1); 4137 tcg_gen_or_tl(t0, t0, cpu_ov32); 4138 tcg_gen_shli_tl(t0, t0, 2); 4139 /* copy CA and CA32 */ 4140 tcg_gen_shli_tl(t1, cpu_ca, 1); 4141 tcg_gen_or_tl(t1, t1, cpu_ca32); 4142 tcg_gen_or_tl(t0, t0, t1); 4143 tcg_gen_trunc_tl_i32(dst, t0); 4144 tcg_temp_free(t0); 4145 tcg_temp_free(t1); 4146 } 4147 #endif 4148 4149 /* mfcr mfocrf */ 4150 static void gen_mfcr(DisasContext *ctx) 4151 { 4152 uint32_t crm, crn; 4153 4154 if (likely(ctx->opcode & 0x00100000)) { 4155 crm = CRM(ctx->opcode); 4156 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4157 crn = ctz32 (crm); 4158 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4159 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4160 cpu_gpr[rD(ctx->opcode)], crn * 4); 4161 } 4162 } else { 4163 TCGv_i32 t0 = tcg_temp_new_i32(); 4164 tcg_gen_mov_i32(t0, cpu_crf[0]); 4165 tcg_gen_shli_i32(t0, t0, 4); 4166 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4167 tcg_gen_shli_i32(t0, t0, 4); 4168 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4169 tcg_gen_shli_i32(t0, t0, 4); 4170 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4171 tcg_gen_shli_i32(t0, t0, 4); 4172 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4173 tcg_gen_shli_i32(t0, t0, 4); 4174 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4175 tcg_gen_shli_i32(t0, t0, 4); 4176 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4177 tcg_gen_shli_i32(t0, t0, 4); 4178 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4179 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4180 tcg_temp_free_i32(t0); 4181 } 4182 } 4183 4184 /* mfmsr */ 4185 static void gen_mfmsr(DisasContext *ctx) 4186 { 4187 CHK_SV; 4188 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4189 } 4190 4191 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 4192 { 4193 #if 0 4194 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 4195 printf("ERROR: try to access SPR %d !\n", sprn); 4196 #endif 4197 } 4198 #define SPR_NOACCESS (&spr_noaccess) 4199 4200 /* mfspr */ 4201 static inline void gen_op_mfspr(DisasContext *ctx) 4202 { 4203 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4204 uint32_t sprn = SPR(ctx->opcode); 4205 4206 #if defined(CONFIG_USER_ONLY) 4207 read_cb = ctx->spr_cb[sprn].uea_read; 4208 #else 4209 if (ctx->pr) { 4210 read_cb = ctx->spr_cb[sprn].uea_read; 4211 } else if (ctx->hv) { 4212 read_cb = ctx->spr_cb[sprn].hea_read; 4213 } else { 4214 read_cb = ctx->spr_cb[sprn].oea_read; 4215 } 4216 #endif 4217 if (likely(read_cb != NULL)) { 4218 if (likely(read_cb != SPR_NOACCESS)) { 4219 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4220 } else { 4221 /* Privilege exception */ 4222 /* This is a hack to avoid warnings when running Linux: 4223 * this OS breaks the PowerPC virtualisation model, 4224 * allowing userland application to read the PVR 4225 */ 4226 if (sprn != SPR_PVR) { 4227 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4228 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4229 ctx->base.pc_next - 4); 4230 } 4231 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4232 } 4233 } else { 4234 /* ISA 2.07 defines these as no-ops */ 4235 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4236 (sprn >= 808 && sprn <= 811)) { 4237 /* This is a nop */ 4238 return; 4239 } 4240 /* Not defined */ 4241 qemu_log_mask(LOG_GUEST_ERROR, 4242 "Trying to read invalid spr %d (0x%03x) at " 4243 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4244 4245 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4246 * it can generate a priv, a hv emu or a no-op 4247 */ 4248 if (sprn & 0x10) { 4249 if (ctx->pr) { 4250 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4251 } 4252 } else { 4253 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4254 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4255 } 4256 } 4257 } 4258 } 4259 4260 static void gen_mfspr(DisasContext *ctx) 4261 { 4262 gen_op_mfspr(ctx); 4263 } 4264 4265 /* mftb */ 4266 static void gen_mftb(DisasContext *ctx) 4267 { 4268 gen_op_mfspr(ctx); 4269 } 4270 4271 /* mtcrf mtocrf*/ 4272 static void gen_mtcrf(DisasContext *ctx) 4273 { 4274 uint32_t crm, crn; 4275 4276 crm = CRM(ctx->opcode); 4277 if (likely((ctx->opcode & 0x00100000))) { 4278 if (crm && ((crm & (crm - 1)) == 0)) { 4279 TCGv_i32 temp = tcg_temp_new_i32(); 4280 crn = ctz32 (crm); 4281 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4282 tcg_gen_shri_i32(temp, temp, crn * 4); 4283 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4284 tcg_temp_free_i32(temp); 4285 } 4286 } else { 4287 TCGv_i32 temp = tcg_temp_new_i32(); 4288 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4289 for (crn = 0 ; crn < 8 ; crn++) { 4290 if (crm & (1 << crn)) { 4291 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4292 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4293 } 4294 } 4295 tcg_temp_free_i32(temp); 4296 } 4297 } 4298 4299 /* mtmsr */ 4300 #if defined(TARGET_PPC64) 4301 static void gen_mtmsrd(DisasContext *ctx) 4302 { 4303 CHK_SV; 4304 4305 #if !defined(CONFIG_USER_ONLY) 4306 if (ctx->opcode & 0x00010000) { 4307 /* Special form that does not need any synchronisation */ 4308 TCGv t0 = tcg_temp_new(); 4309 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4310 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4311 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4312 tcg_temp_free(t0); 4313 } else { 4314 /* XXX: we need to update nip before the store 4315 * if we enter power saving mode, we will exit the loop 4316 * directly from ppc_store_msr 4317 */ 4318 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4319 gen_io_start(); 4320 } 4321 gen_update_nip(ctx, ctx->base.pc_next); 4322 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4323 /* Must stop the translation as machine state (may have) changed */ 4324 /* Note that mtmsr is not always defined as context-synchronizing */ 4325 gen_stop_exception(ctx); 4326 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4327 gen_io_end(); 4328 } 4329 } 4330 #endif /* !defined(CONFIG_USER_ONLY) */ 4331 } 4332 #endif /* defined(TARGET_PPC64) */ 4333 4334 static void gen_mtmsr(DisasContext *ctx) 4335 { 4336 CHK_SV; 4337 4338 #if !defined(CONFIG_USER_ONLY) 4339 if (ctx->opcode & 0x00010000) { 4340 /* Special form that does not need any synchronisation */ 4341 TCGv t0 = tcg_temp_new(); 4342 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4343 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4344 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4345 tcg_temp_free(t0); 4346 } else { 4347 TCGv msr = tcg_temp_new(); 4348 4349 /* XXX: we need to update nip before the store 4350 * if we enter power saving mode, we will exit the loop 4351 * directly from ppc_store_msr 4352 */ 4353 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4354 gen_io_start(); 4355 } 4356 gen_update_nip(ctx, ctx->base.pc_next); 4357 #if defined(TARGET_PPC64) 4358 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4359 #else 4360 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4361 #endif 4362 gen_helper_store_msr(cpu_env, msr); 4363 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4364 gen_io_end(); 4365 } 4366 tcg_temp_free(msr); 4367 /* Must stop the translation as machine state (may have) changed */ 4368 /* Note that mtmsr is not always defined as context-synchronizing */ 4369 gen_stop_exception(ctx); 4370 } 4371 #endif 4372 } 4373 4374 /* mtspr */ 4375 static void gen_mtspr(DisasContext *ctx) 4376 { 4377 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4378 uint32_t sprn = SPR(ctx->opcode); 4379 4380 #if defined(CONFIG_USER_ONLY) 4381 write_cb = ctx->spr_cb[sprn].uea_write; 4382 #else 4383 if (ctx->pr) { 4384 write_cb = ctx->spr_cb[sprn].uea_write; 4385 } else if (ctx->hv) { 4386 write_cb = ctx->spr_cb[sprn].hea_write; 4387 } else { 4388 write_cb = ctx->spr_cb[sprn].oea_write; 4389 } 4390 #endif 4391 if (likely(write_cb != NULL)) { 4392 if (likely(write_cb != SPR_NOACCESS)) { 4393 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4394 } else { 4395 /* Privilege exception */ 4396 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 4397 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4398 ctx->base.pc_next - 4); 4399 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4400 } 4401 } else { 4402 /* ISA 2.07 defines these as no-ops */ 4403 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4404 (sprn >= 808 && sprn <= 811)) { 4405 /* This is a nop */ 4406 return; 4407 } 4408 4409 /* Not defined */ 4410 qemu_log_mask(LOG_GUEST_ERROR, 4411 "Trying to write invalid spr %d (0x%03x) at " 4412 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4413 4414 4415 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4416 * it can generate a priv, a hv emu or a no-op 4417 */ 4418 if (sprn & 0x10) { 4419 if (ctx->pr) { 4420 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4421 } 4422 } else { 4423 if (ctx->pr || sprn == 0) { 4424 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4425 } 4426 } 4427 } 4428 } 4429 4430 #if defined(TARGET_PPC64) 4431 /* setb */ 4432 static void gen_setb(DisasContext *ctx) 4433 { 4434 TCGv_i32 t0 = tcg_temp_new_i32(); 4435 TCGv_i32 t8 = tcg_temp_new_i32(); 4436 TCGv_i32 tm1 = tcg_temp_new_i32(); 4437 int crf = crfS(ctx->opcode); 4438 4439 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4440 tcg_gen_movi_i32(t8, 8); 4441 tcg_gen_movi_i32(tm1, -1); 4442 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4443 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4444 4445 tcg_temp_free_i32(t0); 4446 tcg_temp_free_i32(t8); 4447 tcg_temp_free_i32(tm1); 4448 } 4449 #endif 4450 4451 /*** Cache management ***/ 4452 4453 /* dcbf */ 4454 static void gen_dcbf(DisasContext *ctx) 4455 { 4456 /* XXX: specification says this is treated as a load by the MMU */ 4457 TCGv t0; 4458 gen_set_access_type(ctx, ACCESS_CACHE); 4459 t0 = tcg_temp_new(); 4460 gen_addr_reg_index(ctx, t0); 4461 gen_qemu_ld8u(ctx, t0, t0); 4462 tcg_temp_free(t0); 4463 } 4464 4465 /* dcbfep (external PID dcbf) */ 4466 static void gen_dcbfep(DisasContext *ctx) 4467 { 4468 /* XXX: specification says this is treated as a load by the MMU */ 4469 TCGv t0; 4470 CHK_SV; 4471 gen_set_access_type(ctx, ACCESS_CACHE); 4472 t0 = tcg_temp_new(); 4473 gen_addr_reg_index(ctx, t0); 4474 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4475 tcg_temp_free(t0); 4476 } 4477 4478 /* dcbi (Supervisor only) */ 4479 static void gen_dcbi(DisasContext *ctx) 4480 { 4481 #if defined(CONFIG_USER_ONLY) 4482 GEN_PRIV; 4483 #else 4484 TCGv EA, val; 4485 4486 CHK_SV; 4487 EA = tcg_temp_new(); 4488 gen_set_access_type(ctx, ACCESS_CACHE); 4489 gen_addr_reg_index(ctx, EA); 4490 val = tcg_temp_new(); 4491 /* XXX: specification says this should be treated as a store by the MMU */ 4492 gen_qemu_ld8u(ctx, val, EA); 4493 gen_qemu_st8(ctx, val, EA); 4494 tcg_temp_free(val); 4495 tcg_temp_free(EA); 4496 #endif /* defined(CONFIG_USER_ONLY) */ 4497 } 4498 4499 /* dcdst */ 4500 static void gen_dcbst(DisasContext *ctx) 4501 { 4502 /* XXX: specification say this is treated as a load by the MMU */ 4503 TCGv t0; 4504 gen_set_access_type(ctx, ACCESS_CACHE); 4505 t0 = tcg_temp_new(); 4506 gen_addr_reg_index(ctx, t0); 4507 gen_qemu_ld8u(ctx, t0, t0); 4508 tcg_temp_free(t0); 4509 } 4510 4511 /* dcbstep (dcbstep External PID version) */ 4512 static void gen_dcbstep(DisasContext *ctx) 4513 { 4514 /* XXX: specification say this is treated as a load by the MMU */ 4515 TCGv t0; 4516 gen_set_access_type(ctx, ACCESS_CACHE); 4517 t0 = tcg_temp_new(); 4518 gen_addr_reg_index(ctx, t0); 4519 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4520 tcg_temp_free(t0); 4521 } 4522 4523 /* dcbt */ 4524 static void gen_dcbt(DisasContext *ctx) 4525 { 4526 /* interpreted as no-op */ 4527 /* XXX: specification say this is treated as a load by the MMU 4528 * but does not generate any exception 4529 */ 4530 } 4531 4532 /* dcbtep */ 4533 static void gen_dcbtep(DisasContext *ctx) 4534 { 4535 /* interpreted as no-op */ 4536 /* XXX: specification say this is treated as a load by the MMU 4537 * but does not generate any exception 4538 */ 4539 } 4540 4541 /* dcbtst */ 4542 static void gen_dcbtst(DisasContext *ctx) 4543 { 4544 /* interpreted as no-op */ 4545 /* XXX: specification say this is treated as a load by the MMU 4546 * but does not generate any exception 4547 */ 4548 } 4549 4550 /* dcbtstep */ 4551 static void gen_dcbtstep(DisasContext *ctx) 4552 { 4553 /* interpreted as no-op */ 4554 /* XXX: specification say this is treated as a load by the MMU 4555 * but does not generate any exception 4556 */ 4557 } 4558 4559 /* dcbtls */ 4560 static void gen_dcbtls(DisasContext *ctx) 4561 { 4562 /* Always fails locking the cache */ 4563 TCGv t0 = tcg_temp_new(); 4564 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4565 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4566 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4567 tcg_temp_free(t0); 4568 } 4569 4570 /* dcbz */ 4571 static void gen_dcbz(DisasContext *ctx) 4572 { 4573 TCGv tcgv_addr; 4574 TCGv_i32 tcgv_op; 4575 4576 gen_set_access_type(ctx, ACCESS_CACHE); 4577 tcgv_addr = tcg_temp_new(); 4578 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4579 gen_addr_reg_index(ctx, tcgv_addr); 4580 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4581 tcg_temp_free(tcgv_addr); 4582 tcg_temp_free_i32(tcgv_op); 4583 } 4584 4585 /* dcbzep */ 4586 static void gen_dcbzep(DisasContext *ctx) 4587 { 4588 TCGv tcgv_addr; 4589 TCGv_i32 tcgv_op; 4590 4591 gen_set_access_type(ctx, ACCESS_CACHE); 4592 tcgv_addr = tcg_temp_new(); 4593 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4594 gen_addr_reg_index(ctx, tcgv_addr); 4595 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 4596 tcg_temp_free(tcgv_addr); 4597 tcg_temp_free_i32(tcgv_op); 4598 } 4599 4600 /* dst / dstt */ 4601 static void gen_dst(DisasContext *ctx) 4602 { 4603 if (rA(ctx->opcode) == 0) { 4604 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4605 } else { 4606 /* interpreted as no-op */ 4607 } 4608 } 4609 4610 /* dstst /dststt */ 4611 static void gen_dstst(DisasContext *ctx) 4612 { 4613 if (rA(ctx->opcode) == 0) { 4614 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4615 } else { 4616 /* interpreted as no-op */ 4617 } 4618 4619 } 4620 4621 /* dss / dssall */ 4622 static void gen_dss(DisasContext *ctx) 4623 { 4624 /* interpreted as no-op */ 4625 } 4626 4627 /* icbi */ 4628 static void gen_icbi(DisasContext *ctx) 4629 { 4630 TCGv t0; 4631 gen_set_access_type(ctx, ACCESS_CACHE); 4632 t0 = tcg_temp_new(); 4633 gen_addr_reg_index(ctx, t0); 4634 gen_helper_icbi(cpu_env, t0); 4635 tcg_temp_free(t0); 4636 } 4637 4638 /* icbiep */ 4639 static void gen_icbiep(DisasContext *ctx) 4640 { 4641 TCGv t0; 4642 gen_set_access_type(ctx, ACCESS_CACHE); 4643 t0 = tcg_temp_new(); 4644 gen_addr_reg_index(ctx, t0); 4645 gen_helper_icbiep(cpu_env, t0); 4646 tcg_temp_free(t0); 4647 } 4648 4649 /* Optional: */ 4650 /* dcba */ 4651 static void gen_dcba(DisasContext *ctx) 4652 { 4653 /* interpreted as no-op */ 4654 /* XXX: specification say this is treated as a store by the MMU 4655 * but does not generate any exception 4656 */ 4657 } 4658 4659 /*** Segment register manipulation ***/ 4660 /* Supervisor only: */ 4661 4662 /* mfsr */ 4663 static void gen_mfsr(DisasContext *ctx) 4664 { 4665 #if defined(CONFIG_USER_ONLY) 4666 GEN_PRIV; 4667 #else 4668 TCGv t0; 4669 4670 CHK_SV; 4671 t0 = tcg_const_tl(SR(ctx->opcode)); 4672 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4673 tcg_temp_free(t0); 4674 #endif /* defined(CONFIG_USER_ONLY) */ 4675 } 4676 4677 /* mfsrin */ 4678 static void gen_mfsrin(DisasContext *ctx) 4679 { 4680 #if defined(CONFIG_USER_ONLY) 4681 GEN_PRIV; 4682 #else 4683 TCGv t0; 4684 4685 CHK_SV; 4686 t0 = tcg_temp_new(); 4687 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4688 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4689 tcg_temp_free(t0); 4690 #endif /* defined(CONFIG_USER_ONLY) */ 4691 } 4692 4693 /* mtsr */ 4694 static void gen_mtsr(DisasContext *ctx) 4695 { 4696 #if defined(CONFIG_USER_ONLY) 4697 GEN_PRIV; 4698 #else 4699 TCGv t0; 4700 4701 CHK_SV; 4702 t0 = tcg_const_tl(SR(ctx->opcode)); 4703 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4704 tcg_temp_free(t0); 4705 #endif /* defined(CONFIG_USER_ONLY) */ 4706 } 4707 4708 /* mtsrin */ 4709 static void gen_mtsrin(DisasContext *ctx) 4710 { 4711 #if defined(CONFIG_USER_ONLY) 4712 GEN_PRIV; 4713 #else 4714 TCGv t0; 4715 CHK_SV; 4716 4717 t0 = tcg_temp_new(); 4718 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4719 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4720 tcg_temp_free(t0); 4721 #endif /* defined(CONFIG_USER_ONLY) */ 4722 } 4723 4724 #if defined(TARGET_PPC64) 4725 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4726 4727 /* mfsr */ 4728 static void gen_mfsr_64b(DisasContext *ctx) 4729 { 4730 #if defined(CONFIG_USER_ONLY) 4731 GEN_PRIV; 4732 #else 4733 TCGv t0; 4734 4735 CHK_SV; 4736 t0 = tcg_const_tl(SR(ctx->opcode)); 4737 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4738 tcg_temp_free(t0); 4739 #endif /* defined(CONFIG_USER_ONLY) */ 4740 } 4741 4742 /* mfsrin */ 4743 static void gen_mfsrin_64b(DisasContext *ctx) 4744 { 4745 #if defined(CONFIG_USER_ONLY) 4746 GEN_PRIV; 4747 #else 4748 TCGv t0; 4749 4750 CHK_SV; 4751 t0 = tcg_temp_new(); 4752 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4753 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4754 tcg_temp_free(t0); 4755 #endif /* defined(CONFIG_USER_ONLY) */ 4756 } 4757 4758 /* mtsr */ 4759 static void gen_mtsr_64b(DisasContext *ctx) 4760 { 4761 #if defined(CONFIG_USER_ONLY) 4762 GEN_PRIV; 4763 #else 4764 TCGv t0; 4765 4766 CHK_SV; 4767 t0 = tcg_const_tl(SR(ctx->opcode)); 4768 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4769 tcg_temp_free(t0); 4770 #endif /* defined(CONFIG_USER_ONLY) */ 4771 } 4772 4773 /* mtsrin */ 4774 static void gen_mtsrin_64b(DisasContext *ctx) 4775 { 4776 #if defined(CONFIG_USER_ONLY) 4777 GEN_PRIV; 4778 #else 4779 TCGv t0; 4780 4781 CHK_SV; 4782 t0 = tcg_temp_new(); 4783 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4784 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4785 tcg_temp_free(t0); 4786 #endif /* defined(CONFIG_USER_ONLY) */ 4787 } 4788 4789 /* slbmte */ 4790 static void gen_slbmte(DisasContext *ctx) 4791 { 4792 #if defined(CONFIG_USER_ONLY) 4793 GEN_PRIV; 4794 #else 4795 CHK_SV; 4796 4797 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4798 cpu_gpr[rS(ctx->opcode)]); 4799 #endif /* defined(CONFIG_USER_ONLY) */ 4800 } 4801 4802 static void gen_slbmfee(DisasContext *ctx) 4803 { 4804 #if defined(CONFIG_USER_ONLY) 4805 GEN_PRIV; 4806 #else 4807 CHK_SV; 4808 4809 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4810 cpu_gpr[rB(ctx->opcode)]); 4811 #endif /* defined(CONFIG_USER_ONLY) */ 4812 } 4813 4814 static void gen_slbmfev(DisasContext *ctx) 4815 { 4816 #if defined(CONFIG_USER_ONLY) 4817 GEN_PRIV; 4818 #else 4819 CHK_SV; 4820 4821 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4822 cpu_gpr[rB(ctx->opcode)]); 4823 #endif /* defined(CONFIG_USER_ONLY) */ 4824 } 4825 4826 static void gen_slbfee_(DisasContext *ctx) 4827 { 4828 #if defined(CONFIG_USER_ONLY) 4829 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4830 #else 4831 TCGLabel *l1, *l2; 4832 4833 if (unlikely(ctx->pr)) { 4834 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4835 return; 4836 } 4837 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4838 cpu_gpr[rB(ctx->opcode)]); 4839 l1 = gen_new_label(); 4840 l2 = gen_new_label(); 4841 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4842 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4843 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4844 tcg_gen_br(l2); 4845 gen_set_label(l1); 4846 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4847 gen_set_label(l2); 4848 #endif 4849 } 4850 #endif /* defined(TARGET_PPC64) */ 4851 4852 /*** Lookaside buffer management ***/ 4853 /* Optional & supervisor only: */ 4854 4855 /* tlbia */ 4856 static void gen_tlbia(DisasContext *ctx) 4857 { 4858 #if defined(CONFIG_USER_ONLY) 4859 GEN_PRIV; 4860 #else 4861 CHK_HV; 4862 4863 gen_helper_tlbia(cpu_env); 4864 #endif /* defined(CONFIG_USER_ONLY) */ 4865 } 4866 4867 /* tlbiel */ 4868 static void gen_tlbiel(DisasContext *ctx) 4869 { 4870 #if defined(CONFIG_USER_ONLY) 4871 GEN_PRIV; 4872 #else 4873 CHK_SV; 4874 4875 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4876 #endif /* defined(CONFIG_USER_ONLY) */ 4877 } 4878 4879 /* tlbie */ 4880 static void gen_tlbie(DisasContext *ctx) 4881 { 4882 #if defined(CONFIG_USER_ONLY) 4883 GEN_PRIV; 4884 #else 4885 TCGv_i32 t1; 4886 4887 if (ctx->gtse) { 4888 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */ 4889 } else { 4890 CHK_HV; /* Else hypervisor privileged */ 4891 } 4892 4893 if (NARROW_MODE(ctx)) { 4894 TCGv t0 = tcg_temp_new(); 4895 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4896 gen_helper_tlbie(cpu_env, t0); 4897 tcg_temp_free(t0); 4898 } else { 4899 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4900 } 4901 t1 = tcg_temp_new_i32(); 4902 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4903 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4904 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4905 tcg_temp_free_i32(t1); 4906 #endif /* defined(CONFIG_USER_ONLY) */ 4907 } 4908 4909 /* tlbsync */ 4910 static void gen_tlbsync(DisasContext *ctx) 4911 { 4912 #if defined(CONFIG_USER_ONLY) 4913 GEN_PRIV; 4914 #else 4915 4916 if (ctx->gtse) { 4917 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 4918 } else { 4919 CHK_HV; /* Else hypervisor privileged */ 4920 } 4921 4922 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4923 if (ctx->insns_flags & PPC_BOOKE) { 4924 gen_check_tlb_flush(ctx, true); 4925 } 4926 #endif /* defined(CONFIG_USER_ONLY) */ 4927 } 4928 4929 #if defined(TARGET_PPC64) 4930 /* slbia */ 4931 static void gen_slbia(DisasContext *ctx) 4932 { 4933 #if defined(CONFIG_USER_ONLY) 4934 GEN_PRIV; 4935 #else 4936 CHK_SV; 4937 4938 gen_helper_slbia(cpu_env); 4939 #endif /* defined(CONFIG_USER_ONLY) */ 4940 } 4941 4942 /* slbie */ 4943 static void gen_slbie(DisasContext *ctx) 4944 { 4945 #if defined(CONFIG_USER_ONLY) 4946 GEN_PRIV; 4947 #else 4948 CHK_SV; 4949 4950 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4951 #endif /* defined(CONFIG_USER_ONLY) */ 4952 } 4953 4954 /* slbieg */ 4955 static void gen_slbieg(DisasContext *ctx) 4956 { 4957 #if defined(CONFIG_USER_ONLY) 4958 GEN_PRIV; 4959 #else 4960 CHK_SV; 4961 4962 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4963 #endif /* defined(CONFIG_USER_ONLY) */ 4964 } 4965 4966 /* slbsync */ 4967 static void gen_slbsync(DisasContext *ctx) 4968 { 4969 #if defined(CONFIG_USER_ONLY) 4970 GEN_PRIV; 4971 #else 4972 CHK_SV; 4973 gen_check_tlb_flush(ctx, true); 4974 #endif /* defined(CONFIG_USER_ONLY) */ 4975 } 4976 4977 #endif /* defined(TARGET_PPC64) */ 4978 4979 /*** External control ***/ 4980 /* Optional: */ 4981 4982 /* eciwx */ 4983 static void gen_eciwx(DisasContext *ctx) 4984 { 4985 TCGv t0; 4986 /* Should check EAR[E] ! */ 4987 gen_set_access_type(ctx, ACCESS_EXT); 4988 t0 = tcg_temp_new(); 4989 gen_addr_reg_index(ctx, t0); 4990 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4991 DEF_MEMOP(MO_UL | MO_ALIGN)); 4992 tcg_temp_free(t0); 4993 } 4994 4995 /* ecowx */ 4996 static void gen_ecowx(DisasContext *ctx) 4997 { 4998 TCGv t0; 4999 /* Should check EAR[E] ! */ 5000 gen_set_access_type(ctx, ACCESS_EXT); 5001 t0 = tcg_temp_new(); 5002 gen_addr_reg_index(ctx, t0); 5003 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5004 DEF_MEMOP(MO_UL | MO_ALIGN)); 5005 tcg_temp_free(t0); 5006 } 5007 5008 /* PowerPC 601 specific instructions */ 5009 5010 /* abs - abs. */ 5011 static void gen_abs(DisasContext *ctx) 5012 { 5013 TCGLabel *l1 = gen_new_label(); 5014 TCGLabel *l2 = gen_new_label(); 5015 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 5016 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5017 tcg_gen_br(l2); 5018 gen_set_label(l1); 5019 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5020 gen_set_label(l2); 5021 if (unlikely(Rc(ctx->opcode) != 0)) 5022 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5023 } 5024 5025 /* abso - abso. */ 5026 static void gen_abso(DisasContext *ctx) 5027 { 5028 TCGLabel *l1 = gen_new_label(); 5029 TCGLabel *l2 = gen_new_label(); 5030 TCGLabel *l3 = gen_new_label(); 5031 /* Start with XER OV disabled, the most likely case */ 5032 tcg_gen_movi_tl(cpu_ov, 0); 5033 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 5034 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 5035 tcg_gen_movi_tl(cpu_ov, 1); 5036 tcg_gen_movi_tl(cpu_so, 1); 5037 tcg_gen_br(l2); 5038 gen_set_label(l1); 5039 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5040 tcg_gen_br(l3); 5041 gen_set_label(l2); 5042 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5043 gen_set_label(l3); 5044 if (unlikely(Rc(ctx->opcode) != 0)) 5045 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5046 } 5047 5048 /* clcs */ 5049 static void gen_clcs(DisasContext *ctx) 5050 { 5051 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5052 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5053 tcg_temp_free_i32(t0); 5054 /* Rc=1 sets CR0 to an undefined state */ 5055 } 5056 5057 /* div - div. */ 5058 static void gen_div(DisasContext *ctx) 5059 { 5060 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5061 cpu_gpr[rB(ctx->opcode)]); 5062 if (unlikely(Rc(ctx->opcode) != 0)) 5063 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5064 } 5065 5066 /* divo - divo. */ 5067 static void gen_divo(DisasContext *ctx) 5068 { 5069 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5070 cpu_gpr[rB(ctx->opcode)]); 5071 if (unlikely(Rc(ctx->opcode) != 0)) 5072 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5073 } 5074 5075 /* divs - divs. */ 5076 static void gen_divs(DisasContext *ctx) 5077 { 5078 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5079 cpu_gpr[rB(ctx->opcode)]); 5080 if (unlikely(Rc(ctx->opcode) != 0)) 5081 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5082 } 5083 5084 /* divso - divso. */ 5085 static void gen_divso(DisasContext *ctx) 5086 { 5087 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5088 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5089 if (unlikely(Rc(ctx->opcode) != 0)) 5090 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5091 } 5092 5093 /* doz - doz. */ 5094 static void gen_doz(DisasContext *ctx) 5095 { 5096 TCGLabel *l1 = gen_new_label(); 5097 TCGLabel *l2 = gen_new_label(); 5098 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5099 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5100 tcg_gen_br(l2); 5101 gen_set_label(l1); 5102 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5103 gen_set_label(l2); 5104 if (unlikely(Rc(ctx->opcode) != 0)) 5105 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5106 } 5107 5108 /* dozo - dozo. */ 5109 static void gen_dozo(DisasContext *ctx) 5110 { 5111 TCGLabel *l1 = gen_new_label(); 5112 TCGLabel *l2 = gen_new_label(); 5113 TCGv t0 = tcg_temp_new(); 5114 TCGv t1 = tcg_temp_new(); 5115 TCGv t2 = tcg_temp_new(); 5116 /* Start with XER OV disabled, the most likely case */ 5117 tcg_gen_movi_tl(cpu_ov, 0); 5118 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5119 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5120 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5121 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5122 tcg_gen_andc_tl(t1, t1, t2); 5123 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5124 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5125 tcg_gen_movi_tl(cpu_ov, 1); 5126 tcg_gen_movi_tl(cpu_so, 1); 5127 tcg_gen_br(l2); 5128 gen_set_label(l1); 5129 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5130 gen_set_label(l2); 5131 tcg_temp_free(t0); 5132 tcg_temp_free(t1); 5133 tcg_temp_free(t2); 5134 if (unlikely(Rc(ctx->opcode) != 0)) 5135 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5136 } 5137 5138 /* dozi */ 5139 static void gen_dozi(DisasContext *ctx) 5140 { 5141 target_long simm = SIMM(ctx->opcode); 5142 TCGLabel *l1 = gen_new_label(); 5143 TCGLabel *l2 = gen_new_label(); 5144 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5145 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5146 tcg_gen_br(l2); 5147 gen_set_label(l1); 5148 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5149 gen_set_label(l2); 5150 if (unlikely(Rc(ctx->opcode) != 0)) 5151 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5152 } 5153 5154 /* lscbx - lscbx. */ 5155 static void gen_lscbx(DisasContext *ctx) 5156 { 5157 TCGv t0 = tcg_temp_new(); 5158 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5159 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5160 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5161 5162 gen_addr_reg_index(ctx, t0); 5163 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5164 tcg_temp_free_i32(t1); 5165 tcg_temp_free_i32(t2); 5166 tcg_temp_free_i32(t3); 5167 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5168 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5169 if (unlikely(Rc(ctx->opcode) != 0)) 5170 gen_set_Rc0(ctx, t0); 5171 tcg_temp_free(t0); 5172 } 5173 5174 /* maskg - maskg. */ 5175 static void gen_maskg(DisasContext *ctx) 5176 { 5177 TCGLabel *l1 = gen_new_label(); 5178 TCGv t0 = tcg_temp_new(); 5179 TCGv t1 = tcg_temp_new(); 5180 TCGv t2 = tcg_temp_new(); 5181 TCGv t3 = tcg_temp_new(); 5182 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5183 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5184 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5185 tcg_gen_addi_tl(t2, t0, 1); 5186 tcg_gen_shr_tl(t2, t3, t2); 5187 tcg_gen_shr_tl(t3, t3, t1); 5188 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5189 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5190 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5191 gen_set_label(l1); 5192 tcg_temp_free(t0); 5193 tcg_temp_free(t1); 5194 tcg_temp_free(t2); 5195 tcg_temp_free(t3); 5196 if (unlikely(Rc(ctx->opcode) != 0)) 5197 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5198 } 5199 5200 /* maskir - maskir. */ 5201 static void gen_maskir(DisasContext *ctx) 5202 { 5203 TCGv t0 = tcg_temp_new(); 5204 TCGv t1 = tcg_temp_new(); 5205 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5206 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5207 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5208 tcg_temp_free(t0); 5209 tcg_temp_free(t1); 5210 if (unlikely(Rc(ctx->opcode) != 0)) 5211 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5212 } 5213 5214 /* mul - mul. */ 5215 static void gen_mul(DisasContext *ctx) 5216 { 5217 TCGv_i64 t0 = tcg_temp_new_i64(); 5218 TCGv_i64 t1 = tcg_temp_new_i64(); 5219 TCGv t2 = tcg_temp_new(); 5220 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5221 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5222 tcg_gen_mul_i64(t0, t0, t1); 5223 tcg_gen_trunc_i64_tl(t2, t0); 5224 gen_store_spr(SPR_MQ, t2); 5225 tcg_gen_shri_i64(t1, t0, 32); 5226 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5227 tcg_temp_free_i64(t0); 5228 tcg_temp_free_i64(t1); 5229 tcg_temp_free(t2); 5230 if (unlikely(Rc(ctx->opcode) != 0)) 5231 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5232 } 5233 5234 /* mulo - mulo. */ 5235 static void gen_mulo(DisasContext *ctx) 5236 { 5237 TCGLabel *l1 = gen_new_label(); 5238 TCGv_i64 t0 = tcg_temp_new_i64(); 5239 TCGv_i64 t1 = tcg_temp_new_i64(); 5240 TCGv t2 = tcg_temp_new(); 5241 /* Start with XER OV disabled, the most likely case */ 5242 tcg_gen_movi_tl(cpu_ov, 0); 5243 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5244 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5245 tcg_gen_mul_i64(t0, t0, t1); 5246 tcg_gen_trunc_i64_tl(t2, t0); 5247 gen_store_spr(SPR_MQ, t2); 5248 tcg_gen_shri_i64(t1, t0, 32); 5249 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5250 tcg_gen_ext32s_i64(t1, t0); 5251 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5252 tcg_gen_movi_tl(cpu_ov, 1); 5253 tcg_gen_movi_tl(cpu_so, 1); 5254 gen_set_label(l1); 5255 tcg_temp_free_i64(t0); 5256 tcg_temp_free_i64(t1); 5257 tcg_temp_free(t2); 5258 if (unlikely(Rc(ctx->opcode) != 0)) 5259 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5260 } 5261 5262 /* nabs - nabs. */ 5263 static void gen_nabs(DisasContext *ctx) 5264 { 5265 TCGLabel *l1 = gen_new_label(); 5266 TCGLabel *l2 = gen_new_label(); 5267 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5268 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5269 tcg_gen_br(l2); 5270 gen_set_label(l1); 5271 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5272 gen_set_label(l2); 5273 if (unlikely(Rc(ctx->opcode) != 0)) 5274 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5275 } 5276 5277 /* nabso - nabso. */ 5278 static void gen_nabso(DisasContext *ctx) 5279 { 5280 TCGLabel *l1 = gen_new_label(); 5281 TCGLabel *l2 = gen_new_label(); 5282 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5283 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5284 tcg_gen_br(l2); 5285 gen_set_label(l1); 5286 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5287 gen_set_label(l2); 5288 /* nabs never overflows */ 5289 tcg_gen_movi_tl(cpu_ov, 0); 5290 if (unlikely(Rc(ctx->opcode) != 0)) 5291 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5292 } 5293 5294 /* rlmi - rlmi. */ 5295 static void gen_rlmi(DisasContext *ctx) 5296 { 5297 uint32_t mb = MB(ctx->opcode); 5298 uint32_t me = ME(ctx->opcode); 5299 TCGv t0 = tcg_temp_new(); 5300 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5301 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5302 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5303 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 5304 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5305 tcg_temp_free(t0); 5306 if (unlikely(Rc(ctx->opcode) != 0)) 5307 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5308 } 5309 5310 /* rrib - rrib. */ 5311 static void gen_rrib(DisasContext *ctx) 5312 { 5313 TCGv t0 = tcg_temp_new(); 5314 TCGv t1 = tcg_temp_new(); 5315 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5316 tcg_gen_movi_tl(t1, 0x80000000); 5317 tcg_gen_shr_tl(t1, t1, t0); 5318 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5319 tcg_gen_and_tl(t0, t0, t1); 5320 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5321 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5322 tcg_temp_free(t0); 5323 tcg_temp_free(t1); 5324 if (unlikely(Rc(ctx->opcode) != 0)) 5325 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5326 } 5327 5328 /* sle - sle. */ 5329 static void gen_sle(DisasContext *ctx) 5330 { 5331 TCGv t0 = tcg_temp_new(); 5332 TCGv t1 = tcg_temp_new(); 5333 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5334 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5335 tcg_gen_subfi_tl(t1, 32, t1); 5336 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5337 tcg_gen_or_tl(t1, t0, t1); 5338 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5339 gen_store_spr(SPR_MQ, t1); 5340 tcg_temp_free(t0); 5341 tcg_temp_free(t1); 5342 if (unlikely(Rc(ctx->opcode) != 0)) 5343 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5344 } 5345 5346 /* sleq - sleq. */ 5347 static void gen_sleq(DisasContext *ctx) 5348 { 5349 TCGv t0 = tcg_temp_new(); 5350 TCGv t1 = tcg_temp_new(); 5351 TCGv t2 = tcg_temp_new(); 5352 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5353 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 5354 tcg_gen_shl_tl(t2, t2, t0); 5355 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5356 gen_load_spr(t1, SPR_MQ); 5357 gen_store_spr(SPR_MQ, t0); 5358 tcg_gen_and_tl(t0, t0, t2); 5359 tcg_gen_andc_tl(t1, t1, t2); 5360 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5361 tcg_temp_free(t0); 5362 tcg_temp_free(t1); 5363 tcg_temp_free(t2); 5364 if (unlikely(Rc(ctx->opcode) != 0)) 5365 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5366 } 5367 5368 /* sliq - sliq. */ 5369 static void gen_sliq(DisasContext *ctx) 5370 { 5371 int sh = SH(ctx->opcode); 5372 TCGv t0 = tcg_temp_new(); 5373 TCGv t1 = tcg_temp_new(); 5374 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5375 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5376 tcg_gen_or_tl(t1, t0, t1); 5377 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5378 gen_store_spr(SPR_MQ, t1); 5379 tcg_temp_free(t0); 5380 tcg_temp_free(t1); 5381 if (unlikely(Rc(ctx->opcode) != 0)) 5382 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5383 } 5384 5385 /* slliq - slliq. */ 5386 static void gen_slliq(DisasContext *ctx) 5387 { 5388 int sh = SH(ctx->opcode); 5389 TCGv t0 = tcg_temp_new(); 5390 TCGv t1 = tcg_temp_new(); 5391 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5392 gen_load_spr(t1, SPR_MQ); 5393 gen_store_spr(SPR_MQ, t0); 5394 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 5395 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 5396 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5397 tcg_temp_free(t0); 5398 tcg_temp_free(t1); 5399 if (unlikely(Rc(ctx->opcode) != 0)) 5400 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5401 } 5402 5403 /* sllq - sllq. */ 5404 static void gen_sllq(DisasContext *ctx) 5405 { 5406 TCGLabel *l1 = gen_new_label(); 5407 TCGLabel *l2 = gen_new_label(); 5408 TCGv t0 = tcg_temp_local_new(); 5409 TCGv t1 = tcg_temp_local_new(); 5410 TCGv t2 = tcg_temp_local_new(); 5411 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5412 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5413 tcg_gen_shl_tl(t1, t1, t2); 5414 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5415 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5416 gen_load_spr(t0, SPR_MQ); 5417 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5418 tcg_gen_br(l2); 5419 gen_set_label(l1); 5420 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5421 gen_load_spr(t2, SPR_MQ); 5422 tcg_gen_andc_tl(t1, t2, t1); 5423 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5424 gen_set_label(l2); 5425 tcg_temp_free(t0); 5426 tcg_temp_free(t1); 5427 tcg_temp_free(t2); 5428 if (unlikely(Rc(ctx->opcode) != 0)) 5429 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5430 } 5431 5432 /* slq - slq. */ 5433 static void gen_slq(DisasContext *ctx) 5434 { 5435 TCGLabel *l1 = gen_new_label(); 5436 TCGv t0 = tcg_temp_new(); 5437 TCGv t1 = tcg_temp_new(); 5438 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5439 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5440 tcg_gen_subfi_tl(t1, 32, t1); 5441 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5442 tcg_gen_or_tl(t1, t0, t1); 5443 gen_store_spr(SPR_MQ, t1); 5444 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5445 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5446 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5447 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5448 gen_set_label(l1); 5449 tcg_temp_free(t0); 5450 tcg_temp_free(t1); 5451 if (unlikely(Rc(ctx->opcode) != 0)) 5452 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5453 } 5454 5455 /* sraiq - sraiq. */ 5456 static void gen_sraiq(DisasContext *ctx) 5457 { 5458 int sh = SH(ctx->opcode); 5459 TCGLabel *l1 = gen_new_label(); 5460 TCGv t0 = tcg_temp_new(); 5461 TCGv t1 = tcg_temp_new(); 5462 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5463 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5464 tcg_gen_or_tl(t0, t0, t1); 5465 gen_store_spr(SPR_MQ, t0); 5466 tcg_gen_movi_tl(cpu_ca, 0); 5467 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5468 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5469 tcg_gen_movi_tl(cpu_ca, 1); 5470 gen_set_label(l1); 5471 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5472 tcg_temp_free(t0); 5473 tcg_temp_free(t1); 5474 if (unlikely(Rc(ctx->opcode) != 0)) 5475 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5476 } 5477 5478 /* sraq - sraq. */ 5479 static void gen_sraq(DisasContext *ctx) 5480 { 5481 TCGLabel *l1 = gen_new_label(); 5482 TCGLabel *l2 = gen_new_label(); 5483 TCGv t0 = tcg_temp_new(); 5484 TCGv t1 = tcg_temp_local_new(); 5485 TCGv t2 = tcg_temp_local_new(); 5486 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5487 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5488 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5489 tcg_gen_subfi_tl(t2, 32, t2); 5490 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5491 tcg_gen_or_tl(t0, t0, t2); 5492 gen_store_spr(SPR_MQ, t0); 5493 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5494 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5495 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5496 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5497 gen_set_label(l1); 5498 tcg_temp_free(t0); 5499 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5500 tcg_gen_movi_tl(cpu_ca, 0); 5501 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5502 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5503 tcg_gen_movi_tl(cpu_ca, 1); 5504 gen_set_label(l2); 5505 tcg_temp_free(t1); 5506 tcg_temp_free(t2); 5507 if (unlikely(Rc(ctx->opcode) != 0)) 5508 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5509 } 5510 5511 /* sre - sre. */ 5512 static void gen_sre(DisasContext *ctx) 5513 { 5514 TCGv t0 = tcg_temp_new(); 5515 TCGv t1 = tcg_temp_new(); 5516 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5517 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5518 tcg_gen_subfi_tl(t1, 32, t1); 5519 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5520 tcg_gen_or_tl(t1, t0, t1); 5521 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5522 gen_store_spr(SPR_MQ, t1); 5523 tcg_temp_free(t0); 5524 tcg_temp_free(t1); 5525 if (unlikely(Rc(ctx->opcode) != 0)) 5526 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5527 } 5528 5529 /* srea - srea. */ 5530 static void gen_srea(DisasContext *ctx) 5531 { 5532 TCGv t0 = tcg_temp_new(); 5533 TCGv t1 = tcg_temp_new(); 5534 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5535 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5536 gen_store_spr(SPR_MQ, t0); 5537 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5538 tcg_temp_free(t0); 5539 tcg_temp_free(t1); 5540 if (unlikely(Rc(ctx->opcode) != 0)) 5541 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5542 } 5543 5544 /* sreq */ 5545 static void gen_sreq(DisasContext *ctx) 5546 { 5547 TCGv t0 = tcg_temp_new(); 5548 TCGv t1 = tcg_temp_new(); 5549 TCGv t2 = tcg_temp_new(); 5550 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5551 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5552 tcg_gen_shr_tl(t1, t1, t0); 5553 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5554 gen_load_spr(t2, SPR_MQ); 5555 gen_store_spr(SPR_MQ, t0); 5556 tcg_gen_and_tl(t0, t0, t1); 5557 tcg_gen_andc_tl(t2, t2, t1); 5558 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5559 tcg_temp_free(t0); 5560 tcg_temp_free(t1); 5561 tcg_temp_free(t2); 5562 if (unlikely(Rc(ctx->opcode) != 0)) 5563 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5564 } 5565 5566 /* sriq */ 5567 static void gen_sriq(DisasContext *ctx) 5568 { 5569 int sh = SH(ctx->opcode); 5570 TCGv t0 = tcg_temp_new(); 5571 TCGv t1 = tcg_temp_new(); 5572 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5573 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5574 tcg_gen_or_tl(t1, t0, t1); 5575 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5576 gen_store_spr(SPR_MQ, t1); 5577 tcg_temp_free(t0); 5578 tcg_temp_free(t1); 5579 if (unlikely(Rc(ctx->opcode) != 0)) 5580 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5581 } 5582 5583 /* srliq */ 5584 static void gen_srliq(DisasContext *ctx) 5585 { 5586 int sh = SH(ctx->opcode); 5587 TCGv t0 = tcg_temp_new(); 5588 TCGv t1 = tcg_temp_new(); 5589 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5590 gen_load_spr(t1, SPR_MQ); 5591 gen_store_spr(SPR_MQ, t0); 5592 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5593 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5594 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5595 tcg_temp_free(t0); 5596 tcg_temp_free(t1); 5597 if (unlikely(Rc(ctx->opcode) != 0)) 5598 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5599 } 5600 5601 /* srlq */ 5602 static void gen_srlq(DisasContext *ctx) 5603 { 5604 TCGLabel *l1 = gen_new_label(); 5605 TCGLabel *l2 = gen_new_label(); 5606 TCGv t0 = tcg_temp_local_new(); 5607 TCGv t1 = tcg_temp_local_new(); 5608 TCGv t2 = tcg_temp_local_new(); 5609 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5610 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5611 tcg_gen_shr_tl(t2, t1, t2); 5612 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5613 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5614 gen_load_spr(t0, SPR_MQ); 5615 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5616 tcg_gen_br(l2); 5617 gen_set_label(l1); 5618 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5619 tcg_gen_and_tl(t0, t0, t2); 5620 gen_load_spr(t1, SPR_MQ); 5621 tcg_gen_andc_tl(t1, t1, t2); 5622 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5623 gen_set_label(l2); 5624 tcg_temp_free(t0); 5625 tcg_temp_free(t1); 5626 tcg_temp_free(t2); 5627 if (unlikely(Rc(ctx->opcode) != 0)) 5628 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5629 } 5630 5631 /* srq */ 5632 static void gen_srq(DisasContext *ctx) 5633 { 5634 TCGLabel *l1 = gen_new_label(); 5635 TCGv t0 = tcg_temp_new(); 5636 TCGv t1 = tcg_temp_new(); 5637 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5638 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5639 tcg_gen_subfi_tl(t1, 32, t1); 5640 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5641 tcg_gen_or_tl(t1, t0, t1); 5642 gen_store_spr(SPR_MQ, t1); 5643 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5644 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5645 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5646 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5647 gen_set_label(l1); 5648 tcg_temp_free(t0); 5649 tcg_temp_free(t1); 5650 if (unlikely(Rc(ctx->opcode) != 0)) 5651 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5652 } 5653 5654 /* PowerPC 602 specific instructions */ 5655 5656 /* dsa */ 5657 static void gen_dsa(DisasContext *ctx) 5658 { 5659 /* XXX: TODO */ 5660 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5661 } 5662 5663 /* esa */ 5664 static void gen_esa(DisasContext *ctx) 5665 { 5666 /* XXX: TODO */ 5667 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5668 } 5669 5670 /* mfrom */ 5671 static void gen_mfrom(DisasContext *ctx) 5672 { 5673 #if defined(CONFIG_USER_ONLY) 5674 GEN_PRIV; 5675 #else 5676 CHK_SV; 5677 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5678 #endif /* defined(CONFIG_USER_ONLY) */ 5679 } 5680 5681 /* 602 - 603 - G2 TLB management */ 5682 5683 /* tlbld */ 5684 static void gen_tlbld_6xx(DisasContext *ctx) 5685 { 5686 #if defined(CONFIG_USER_ONLY) 5687 GEN_PRIV; 5688 #else 5689 CHK_SV; 5690 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5691 #endif /* defined(CONFIG_USER_ONLY) */ 5692 } 5693 5694 /* tlbli */ 5695 static void gen_tlbli_6xx(DisasContext *ctx) 5696 { 5697 #if defined(CONFIG_USER_ONLY) 5698 GEN_PRIV; 5699 #else 5700 CHK_SV; 5701 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5702 #endif /* defined(CONFIG_USER_ONLY) */ 5703 } 5704 5705 /* 74xx TLB management */ 5706 5707 /* tlbld */ 5708 static void gen_tlbld_74xx(DisasContext *ctx) 5709 { 5710 #if defined(CONFIG_USER_ONLY) 5711 GEN_PRIV; 5712 #else 5713 CHK_SV; 5714 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5715 #endif /* defined(CONFIG_USER_ONLY) */ 5716 } 5717 5718 /* tlbli */ 5719 static void gen_tlbli_74xx(DisasContext *ctx) 5720 { 5721 #if defined(CONFIG_USER_ONLY) 5722 GEN_PRIV; 5723 #else 5724 CHK_SV; 5725 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5726 #endif /* defined(CONFIG_USER_ONLY) */ 5727 } 5728 5729 /* POWER instructions not in PowerPC 601 */ 5730 5731 /* clf */ 5732 static void gen_clf(DisasContext *ctx) 5733 { 5734 /* Cache line flush: implemented as no-op */ 5735 } 5736 5737 /* cli */ 5738 static void gen_cli(DisasContext *ctx) 5739 { 5740 #if defined(CONFIG_USER_ONLY) 5741 GEN_PRIV; 5742 #else 5743 /* Cache line invalidate: privileged and treated as no-op */ 5744 CHK_SV; 5745 #endif /* defined(CONFIG_USER_ONLY) */ 5746 } 5747 5748 /* dclst */ 5749 static void gen_dclst(DisasContext *ctx) 5750 { 5751 /* Data cache line store: treated as no-op */ 5752 } 5753 5754 static void gen_mfsri(DisasContext *ctx) 5755 { 5756 #if defined(CONFIG_USER_ONLY) 5757 GEN_PRIV; 5758 #else 5759 int ra = rA(ctx->opcode); 5760 int rd = rD(ctx->opcode); 5761 TCGv t0; 5762 5763 CHK_SV; 5764 t0 = tcg_temp_new(); 5765 gen_addr_reg_index(ctx, t0); 5766 tcg_gen_extract_tl(t0, t0, 28, 4); 5767 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5768 tcg_temp_free(t0); 5769 if (ra != 0 && ra != rd) 5770 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5771 #endif /* defined(CONFIG_USER_ONLY) */ 5772 } 5773 5774 static void gen_rac(DisasContext *ctx) 5775 { 5776 #if defined(CONFIG_USER_ONLY) 5777 GEN_PRIV; 5778 #else 5779 TCGv t0; 5780 5781 CHK_SV; 5782 t0 = tcg_temp_new(); 5783 gen_addr_reg_index(ctx, t0); 5784 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5785 tcg_temp_free(t0); 5786 #endif /* defined(CONFIG_USER_ONLY) */ 5787 } 5788 5789 static void gen_rfsvc(DisasContext *ctx) 5790 { 5791 #if defined(CONFIG_USER_ONLY) 5792 GEN_PRIV; 5793 #else 5794 CHK_SV; 5795 5796 gen_helper_rfsvc(cpu_env); 5797 gen_sync_exception(ctx); 5798 #endif /* defined(CONFIG_USER_ONLY) */ 5799 } 5800 5801 /* svc is not implemented for now */ 5802 5803 /* BookE specific instructions */ 5804 5805 /* XXX: not implemented on 440 ? */ 5806 static void gen_mfapidi(DisasContext *ctx) 5807 { 5808 /* XXX: TODO */ 5809 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5810 } 5811 5812 /* XXX: not implemented on 440 ? */ 5813 static void gen_tlbiva(DisasContext *ctx) 5814 { 5815 #if defined(CONFIG_USER_ONLY) 5816 GEN_PRIV; 5817 #else 5818 TCGv t0; 5819 5820 CHK_SV; 5821 t0 = tcg_temp_new(); 5822 gen_addr_reg_index(ctx, t0); 5823 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5824 tcg_temp_free(t0); 5825 #endif /* defined(CONFIG_USER_ONLY) */ 5826 } 5827 5828 /* All 405 MAC instructions are translated here */ 5829 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5830 int ra, int rb, int rt, int Rc) 5831 { 5832 TCGv t0, t1; 5833 5834 t0 = tcg_temp_local_new(); 5835 t1 = tcg_temp_local_new(); 5836 5837 switch (opc3 & 0x0D) { 5838 case 0x05: 5839 /* macchw - macchw. - macchwo - macchwo. */ 5840 /* macchws - macchws. - macchwso - macchwso. */ 5841 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5842 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5843 /* mulchw - mulchw. */ 5844 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5845 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5846 tcg_gen_ext16s_tl(t1, t1); 5847 break; 5848 case 0x04: 5849 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5850 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5851 /* mulchwu - mulchwu. */ 5852 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5853 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5854 tcg_gen_ext16u_tl(t1, t1); 5855 break; 5856 case 0x01: 5857 /* machhw - machhw. - machhwo - machhwo. */ 5858 /* machhws - machhws. - machhwso - machhwso. */ 5859 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5860 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5861 /* mulhhw - mulhhw. */ 5862 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5863 tcg_gen_ext16s_tl(t0, t0); 5864 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5865 tcg_gen_ext16s_tl(t1, t1); 5866 break; 5867 case 0x00: 5868 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5869 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5870 /* mulhhwu - mulhhwu. */ 5871 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5872 tcg_gen_ext16u_tl(t0, t0); 5873 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5874 tcg_gen_ext16u_tl(t1, t1); 5875 break; 5876 case 0x0D: 5877 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5878 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5879 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5880 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5881 /* mullhw - mullhw. */ 5882 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5883 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5884 break; 5885 case 0x0C: 5886 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5887 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5888 /* mullhwu - mullhwu. */ 5889 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5890 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5891 break; 5892 } 5893 if (opc2 & 0x04) { 5894 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5895 tcg_gen_mul_tl(t1, t0, t1); 5896 if (opc2 & 0x02) { 5897 /* nmultiply-and-accumulate (0x0E) */ 5898 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5899 } else { 5900 /* multiply-and-accumulate (0x0C) */ 5901 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5902 } 5903 5904 if (opc3 & 0x12) { 5905 /* Check overflow and/or saturate */ 5906 TCGLabel *l1 = gen_new_label(); 5907 5908 if (opc3 & 0x10) { 5909 /* Start with XER OV disabled, the most likely case */ 5910 tcg_gen_movi_tl(cpu_ov, 0); 5911 } 5912 if (opc3 & 0x01) { 5913 /* Signed */ 5914 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5915 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5916 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5917 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5918 if (opc3 & 0x02) { 5919 /* Saturate */ 5920 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5921 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5922 } 5923 } else { 5924 /* Unsigned */ 5925 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5926 if (opc3 & 0x02) { 5927 /* Saturate */ 5928 tcg_gen_movi_tl(t0, UINT32_MAX); 5929 } 5930 } 5931 if (opc3 & 0x10) { 5932 /* Check overflow */ 5933 tcg_gen_movi_tl(cpu_ov, 1); 5934 tcg_gen_movi_tl(cpu_so, 1); 5935 } 5936 gen_set_label(l1); 5937 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5938 } 5939 } else { 5940 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5941 } 5942 tcg_temp_free(t0); 5943 tcg_temp_free(t1); 5944 if (unlikely(Rc) != 0) { 5945 /* Update Rc0 */ 5946 gen_set_Rc0(ctx, cpu_gpr[rt]); 5947 } 5948 } 5949 5950 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5951 static void glue(gen_, name)(DisasContext *ctx) \ 5952 { \ 5953 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5954 rD(ctx->opcode), Rc(ctx->opcode)); \ 5955 } 5956 5957 /* macchw - macchw. */ 5958 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5959 /* macchwo - macchwo. */ 5960 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5961 /* macchws - macchws. */ 5962 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5963 /* macchwso - macchwso. */ 5964 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5965 /* macchwsu - macchwsu. */ 5966 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5967 /* macchwsuo - macchwsuo. */ 5968 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5969 /* macchwu - macchwu. */ 5970 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5971 /* macchwuo - macchwuo. */ 5972 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5973 /* machhw - machhw. */ 5974 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5975 /* machhwo - machhwo. */ 5976 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5977 /* machhws - machhws. */ 5978 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5979 /* machhwso - machhwso. */ 5980 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5981 /* machhwsu - machhwsu. */ 5982 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5983 /* machhwsuo - machhwsuo. */ 5984 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5985 /* machhwu - machhwu. */ 5986 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5987 /* machhwuo - machhwuo. */ 5988 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5989 /* maclhw - maclhw. */ 5990 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5991 /* maclhwo - maclhwo. */ 5992 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5993 /* maclhws - maclhws. */ 5994 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5995 /* maclhwso - maclhwso. */ 5996 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 5997 /* maclhwu - maclhwu. */ 5998 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 5999 /* maclhwuo - maclhwuo. */ 6000 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 6001 /* maclhwsu - maclhwsu. */ 6002 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 6003 /* maclhwsuo - maclhwsuo. */ 6004 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 6005 /* nmacchw - nmacchw. */ 6006 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 6007 /* nmacchwo - nmacchwo. */ 6008 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 6009 /* nmacchws - nmacchws. */ 6010 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 6011 /* nmacchwso - nmacchwso. */ 6012 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 6013 /* nmachhw - nmachhw. */ 6014 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 6015 /* nmachhwo - nmachhwo. */ 6016 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 6017 /* nmachhws - nmachhws. */ 6018 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 6019 /* nmachhwso - nmachhwso. */ 6020 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 6021 /* nmaclhw - nmaclhw. */ 6022 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 6023 /* nmaclhwo - nmaclhwo. */ 6024 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 6025 /* nmaclhws - nmaclhws. */ 6026 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 6027 /* nmaclhwso - nmaclhwso. */ 6028 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 6029 6030 /* mulchw - mulchw. */ 6031 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 6032 /* mulchwu - mulchwu. */ 6033 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 6034 /* mulhhw - mulhhw. */ 6035 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 6036 /* mulhhwu - mulhhwu. */ 6037 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 6038 /* mullhw - mullhw. */ 6039 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 6040 /* mullhwu - mullhwu. */ 6041 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 6042 6043 /* mfdcr */ 6044 static void gen_mfdcr(DisasContext *ctx) 6045 { 6046 #if defined(CONFIG_USER_ONLY) 6047 GEN_PRIV; 6048 #else 6049 TCGv dcrn; 6050 6051 CHK_SV; 6052 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6053 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6054 tcg_temp_free(dcrn); 6055 #endif /* defined(CONFIG_USER_ONLY) */ 6056 } 6057 6058 /* mtdcr */ 6059 static void gen_mtdcr(DisasContext *ctx) 6060 { 6061 #if defined(CONFIG_USER_ONLY) 6062 GEN_PRIV; 6063 #else 6064 TCGv dcrn; 6065 6066 CHK_SV; 6067 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6068 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6069 tcg_temp_free(dcrn); 6070 #endif /* defined(CONFIG_USER_ONLY) */ 6071 } 6072 6073 /* mfdcrx */ 6074 /* XXX: not implemented on 440 ? */ 6075 static void gen_mfdcrx(DisasContext *ctx) 6076 { 6077 #if defined(CONFIG_USER_ONLY) 6078 GEN_PRIV; 6079 #else 6080 CHK_SV; 6081 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6082 cpu_gpr[rA(ctx->opcode)]); 6083 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6084 #endif /* defined(CONFIG_USER_ONLY) */ 6085 } 6086 6087 /* mtdcrx */ 6088 /* XXX: not implemented on 440 ? */ 6089 static void gen_mtdcrx(DisasContext *ctx) 6090 { 6091 #if defined(CONFIG_USER_ONLY) 6092 GEN_PRIV; 6093 #else 6094 CHK_SV; 6095 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6096 cpu_gpr[rS(ctx->opcode)]); 6097 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6098 #endif /* defined(CONFIG_USER_ONLY) */ 6099 } 6100 6101 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6102 static void gen_mfdcrux(DisasContext *ctx) 6103 { 6104 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6105 cpu_gpr[rA(ctx->opcode)]); 6106 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6107 } 6108 6109 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6110 static void gen_mtdcrux(DisasContext *ctx) 6111 { 6112 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6113 cpu_gpr[rS(ctx->opcode)]); 6114 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6115 } 6116 6117 /* dccci */ 6118 static void gen_dccci(DisasContext *ctx) 6119 { 6120 CHK_SV; 6121 /* interpreted as no-op */ 6122 } 6123 6124 /* dcread */ 6125 static void gen_dcread(DisasContext *ctx) 6126 { 6127 #if defined(CONFIG_USER_ONLY) 6128 GEN_PRIV; 6129 #else 6130 TCGv EA, val; 6131 6132 CHK_SV; 6133 gen_set_access_type(ctx, ACCESS_CACHE); 6134 EA = tcg_temp_new(); 6135 gen_addr_reg_index(ctx, EA); 6136 val = tcg_temp_new(); 6137 gen_qemu_ld32u(ctx, val, EA); 6138 tcg_temp_free(val); 6139 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6140 tcg_temp_free(EA); 6141 #endif /* defined(CONFIG_USER_ONLY) */ 6142 } 6143 6144 /* icbt */ 6145 static void gen_icbt_40x(DisasContext *ctx) 6146 { 6147 /* interpreted as no-op */ 6148 /* XXX: specification say this is treated as a load by the MMU 6149 * but does not generate any exception 6150 */ 6151 } 6152 6153 /* iccci */ 6154 static void gen_iccci(DisasContext *ctx) 6155 { 6156 CHK_SV; 6157 /* interpreted as no-op */ 6158 } 6159 6160 /* icread */ 6161 static void gen_icread(DisasContext *ctx) 6162 { 6163 CHK_SV; 6164 /* interpreted as no-op */ 6165 } 6166 6167 /* rfci (supervisor only) */ 6168 static void gen_rfci_40x(DisasContext *ctx) 6169 { 6170 #if defined(CONFIG_USER_ONLY) 6171 GEN_PRIV; 6172 #else 6173 CHK_SV; 6174 /* Restore CPU state */ 6175 gen_helper_40x_rfci(cpu_env); 6176 gen_sync_exception(ctx); 6177 #endif /* defined(CONFIG_USER_ONLY) */ 6178 } 6179 6180 static void gen_rfci(DisasContext *ctx) 6181 { 6182 #if defined(CONFIG_USER_ONLY) 6183 GEN_PRIV; 6184 #else 6185 CHK_SV; 6186 /* Restore CPU state */ 6187 gen_helper_rfci(cpu_env); 6188 gen_sync_exception(ctx); 6189 #endif /* defined(CONFIG_USER_ONLY) */ 6190 } 6191 6192 /* BookE specific */ 6193 6194 /* XXX: not implemented on 440 ? */ 6195 static void gen_rfdi(DisasContext *ctx) 6196 { 6197 #if defined(CONFIG_USER_ONLY) 6198 GEN_PRIV; 6199 #else 6200 CHK_SV; 6201 /* Restore CPU state */ 6202 gen_helper_rfdi(cpu_env); 6203 gen_sync_exception(ctx); 6204 #endif /* defined(CONFIG_USER_ONLY) */ 6205 } 6206 6207 /* XXX: not implemented on 440 ? */ 6208 static void gen_rfmci(DisasContext *ctx) 6209 { 6210 #if defined(CONFIG_USER_ONLY) 6211 GEN_PRIV; 6212 #else 6213 CHK_SV; 6214 /* Restore CPU state */ 6215 gen_helper_rfmci(cpu_env); 6216 gen_sync_exception(ctx); 6217 #endif /* defined(CONFIG_USER_ONLY) */ 6218 } 6219 6220 /* TLB management - PowerPC 405 implementation */ 6221 6222 /* tlbre */ 6223 static void gen_tlbre_40x(DisasContext *ctx) 6224 { 6225 #if defined(CONFIG_USER_ONLY) 6226 GEN_PRIV; 6227 #else 6228 CHK_SV; 6229 switch (rB(ctx->opcode)) { 6230 case 0: 6231 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6232 cpu_gpr[rA(ctx->opcode)]); 6233 break; 6234 case 1: 6235 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6236 cpu_gpr[rA(ctx->opcode)]); 6237 break; 6238 default: 6239 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6240 break; 6241 } 6242 #endif /* defined(CONFIG_USER_ONLY) */ 6243 } 6244 6245 /* tlbsx - tlbsx. */ 6246 static void gen_tlbsx_40x(DisasContext *ctx) 6247 { 6248 #if defined(CONFIG_USER_ONLY) 6249 GEN_PRIV; 6250 #else 6251 TCGv t0; 6252 6253 CHK_SV; 6254 t0 = tcg_temp_new(); 6255 gen_addr_reg_index(ctx, t0); 6256 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6257 tcg_temp_free(t0); 6258 if (Rc(ctx->opcode)) { 6259 TCGLabel *l1 = gen_new_label(); 6260 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6261 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6262 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6263 gen_set_label(l1); 6264 } 6265 #endif /* defined(CONFIG_USER_ONLY) */ 6266 } 6267 6268 /* tlbwe */ 6269 static void gen_tlbwe_40x(DisasContext *ctx) 6270 { 6271 #if defined(CONFIG_USER_ONLY) 6272 GEN_PRIV; 6273 #else 6274 CHK_SV; 6275 6276 switch (rB(ctx->opcode)) { 6277 case 0: 6278 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6279 cpu_gpr[rS(ctx->opcode)]); 6280 break; 6281 case 1: 6282 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6283 cpu_gpr[rS(ctx->opcode)]); 6284 break; 6285 default: 6286 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6287 break; 6288 } 6289 #endif /* defined(CONFIG_USER_ONLY) */ 6290 } 6291 6292 /* TLB management - PowerPC 440 implementation */ 6293 6294 /* tlbre */ 6295 static void gen_tlbre_440(DisasContext *ctx) 6296 { 6297 #if defined(CONFIG_USER_ONLY) 6298 GEN_PRIV; 6299 #else 6300 CHK_SV; 6301 6302 switch (rB(ctx->opcode)) { 6303 case 0: 6304 case 1: 6305 case 2: 6306 { 6307 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6308 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6309 t0, cpu_gpr[rA(ctx->opcode)]); 6310 tcg_temp_free_i32(t0); 6311 } 6312 break; 6313 default: 6314 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6315 break; 6316 } 6317 #endif /* defined(CONFIG_USER_ONLY) */ 6318 } 6319 6320 /* tlbsx - tlbsx. */ 6321 static void gen_tlbsx_440(DisasContext *ctx) 6322 { 6323 #if defined(CONFIG_USER_ONLY) 6324 GEN_PRIV; 6325 #else 6326 TCGv t0; 6327 6328 CHK_SV; 6329 t0 = tcg_temp_new(); 6330 gen_addr_reg_index(ctx, t0); 6331 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6332 tcg_temp_free(t0); 6333 if (Rc(ctx->opcode)) { 6334 TCGLabel *l1 = gen_new_label(); 6335 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6336 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6337 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6338 gen_set_label(l1); 6339 } 6340 #endif /* defined(CONFIG_USER_ONLY) */ 6341 } 6342 6343 /* tlbwe */ 6344 static void gen_tlbwe_440(DisasContext *ctx) 6345 { 6346 #if defined(CONFIG_USER_ONLY) 6347 GEN_PRIV; 6348 #else 6349 CHK_SV; 6350 switch (rB(ctx->opcode)) { 6351 case 0: 6352 case 1: 6353 case 2: 6354 { 6355 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6356 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 6357 cpu_gpr[rS(ctx->opcode)]); 6358 tcg_temp_free_i32(t0); 6359 } 6360 break; 6361 default: 6362 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6363 break; 6364 } 6365 #endif /* defined(CONFIG_USER_ONLY) */ 6366 } 6367 6368 /* TLB management - PowerPC BookE 2.06 implementation */ 6369 6370 /* tlbre */ 6371 static void gen_tlbre_booke206(DisasContext *ctx) 6372 { 6373 #if defined(CONFIG_USER_ONLY) 6374 GEN_PRIV; 6375 #else 6376 CHK_SV; 6377 gen_helper_booke206_tlbre(cpu_env); 6378 #endif /* defined(CONFIG_USER_ONLY) */ 6379 } 6380 6381 /* tlbsx - tlbsx. */ 6382 static void gen_tlbsx_booke206(DisasContext *ctx) 6383 { 6384 #if defined(CONFIG_USER_ONLY) 6385 GEN_PRIV; 6386 #else 6387 TCGv t0; 6388 6389 CHK_SV; 6390 if (rA(ctx->opcode)) { 6391 t0 = tcg_temp_new(); 6392 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6393 } else { 6394 t0 = tcg_const_tl(0); 6395 } 6396 6397 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6398 gen_helper_booke206_tlbsx(cpu_env, t0); 6399 tcg_temp_free(t0); 6400 #endif /* defined(CONFIG_USER_ONLY) */ 6401 } 6402 6403 /* tlbwe */ 6404 static void gen_tlbwe_booke206(DisasContext *ctx) 6405 { 6406 #if defined(CONFIG_USER_ONLY) 6407 GEN_PRIV; 6408 #else 6409 CHK_SV; 6410 gen_helper_booke206_tlbwe(cpu_env); 6411 #endif /* defined(CONFIG_USER_ONLY) */ 6412 } 6413 6414 static void gen_tlbivax_booke206(DisasContext *ctx) 6415 { 6416 #if defined(CONFIG_USER_ONLY) 6417 GEN_PRIV; 6418 #else 6419 TCGv t0; 6420 6421 CHK_SV; 6422 t0 = tcg_temp_new(); 6423 gen_addr_reg_index(ctx, t0); 6424 gen_helper_booke206_tlbivax(cpu_env, t0); 6425 tcg_temp_free(t0); 6426 #endif /* defined(CONFIG_USER_ONLY) */ 6427 } 6428 6429 static void gen_tlbilx_booke206(DisasContext *ctx) 6430 { 6431 #if defined(CONFIG_USER_ONLY) 6432 GEN_PRIV; 6433 #else 6434 TCGv t0; 6435 6436 CHK_SV; 6437 t0 = tcg_temp_new(); 6438 gen_addr_reg_index(ctx, t0); 6439 6440 switch((ctx->opcode >> 21) & 0x3) { 6441 case 0: 6442 gen_helper_booke206_tlbilx0(cpu_env, t0); 6443 break; 6444 case 1: 6445 gen_helper_booke206_tlbilx1(cpu_env, t0); 6446 break; 6447 case 3: 6448 gen_helper_booke206_tlbilx3(cpu_env, t0); 6449 break; 6450 default: 6451 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6452 break; 6453 } 6454 6455 tcg_temp_free(t0); 6456 #endif /* defined(CONFIG_USER_ONLY) */ 6457 } 6458 6459 6460 /* wrtee */ 6461 static void gen_wrtee(DisasContext *ctx) 6462 { 6463 #if defined(CONFIG_USER_ONLY) 6464 GEN_PRIV; 6465 #else 6466 TCGv t0; 6467 6468 CHK_SV; 6469 t0 = tcg_temp_new(); 6470 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6471 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6472 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6473 tcg_temp_free(t0); 6474 /* Stop translation to have a chance to raise an exception 6475 * if we just set msr_ee to 1 6476 */ 6477 gen_stop_exception(ctx); 6478 #endif /* defined(CONFIG_USER_ONLY) */ 6479 } 6480 6481 /* wrteei */ 6482 static void gen_wrteei(DisasContext *ctx) 6483 { 6484 #if defined(CONFIG_USER_ONLY) 6485 GEN_PRIV; 6486 #else 6487 CHK_SV; 6488 if (ctx->opcode & 0x00008000) { 6489 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6490 /* Stop translation to have a chance to raise an exception */ 6491 gen_stop_exception(ctx); 6492 } else { 6493 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6494 } 6495 #endif /* defined(CONFIG_USER_ONLY) */ 6496 } 6497 6498 /* PowerPC 440 specific instructions */ 6499 6500 /* dlmzb */ 6501 static void gen_dlmzb(DisasContext *ctx) 6502 { 6503 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6504 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6505 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6506 tcg_temp_free_i32(t0); 6507 } 6508 6509 /* mbar replaces eieio on 440 */ 6510 static void gen_mbar(DisasContext *ctx) 6511 { 6512 /* interpreted as no-op */ 6513 } 6514 6515 /* msync replaces sync on 440 */ 6516 static void gen_msync_4xx(DisasContext *ctx) 6517 { 6518 /* interpreted as no-op */ 6519 } 6520 6521 /* icbt */ 6522 static void gen_icbt_440(DisasContext *ctx) 6523 { 6524 /* interpreted as no-op */ 6525 /* XXX: specification say this is treated as a load by the MMU 6526 * but does not generate any exception 6527 */ 6528 } 6529 6530 /* Embedded.Processor Control */ 6531 6532 static void gen_msgclr(DisasContext *ctx) 6533 { 6534 #if defined(CONFIG_USER_ONLY) 6535 GEN_PRIV; 6536 #else 6537 CHK_HV; 6538 /* 64-bit server processors compliant with arch 2.x */ 6539 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6540 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6541 } else { 6542 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6543 } 6544 #endif /* defined(CONFIG_USER_ONLY) */ 6545 } 6546 6547 static void gen_msgsnd(DisasContext *ctx) 6548 { 6549 #if defined(CONFIG_USER_ONLY) 6550 GEN_PRIV; 6551 #else 6552 CHK_HV; 6553 /* 64-bit server processors compliant with arch 2.x */ 6554 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6555 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6556 } else { 6557 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6558 } 6559 #endif /* defined(CONFIG_USER_ONLY) */ 6560 } 6561 6562 static void gen_msgsync(DisasContext *ctx) 6563 { 6564 #if defined(CONFIG_USER_ONLY) 6565 GEN_PRIV; 6566 #else 6567 CHK_HV; 6568 #endif /* defined(CONFIG_USER_ONLY) */ 6569 /* interpreted as no-op */ 6570 } 6571 6572 #if defined(TARGET_PPC64) 6573 static void gen_maddld(DisasContext *ctx) 6574 { 6575 TCGv_i64 t1 = tcg_temp_new_i64(); 6576 6577 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6578 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6579 tcg_temp_free_i64(t1); 6580 } 6581 6582 /* maddhd maddhdu */ 6583 static void gen_maddhd_maddhdu(DisasContext *ctx) 6584 { 6585 TCGv_i64 lo = tcg_temp_new_i64(); 6586 TCGv_i64 hi = tcg_temp_new_i64(); 6587 TCGv_i64 t1 = tcg_temp_new_i64(); 6588 6589 if (Rc(ctx->opcode)) { 6590 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6591 cpu_gpr[rB(ctx->opcode)]); 6592 tcg_gen_movi_i64(t1, 0); 6593 } else { 6594 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6595 cpu_gpr[rB(ctx->opcode)]); 6596 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6597 } 6598 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6599 cpu_gpr[rC(ctx->opcode)], t1); 6600 tcg_temp_free_i64(lo); 6601 tcg_temp_free_i64(hi); 6602 tcg_temp_free_i64(t1); 6603 } 6604 #endif /* defined(TARGET_PPC64) */ 6605 6606 static void gen_tbegin(DisasContext *ctx) 6607 { 6608 if (unlikely(!ctx->tm_enabled)) { 6609 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6610 return; 6611 } 6612 gen_helper_tbegin(cpu_env); 6613 } 6614 6615 #define GEN_TM_NOOP(name) \ 6616 static inline void gen_##name(DisasContext *ctx) \ 6617 { \ 6618 if (unlikely(!ctx->tm_enabled)) { \ 6619 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6620 return; \ 6621 } \ 6622 /* Because tbegin always fails in QEMU, these user \ 6623 * space instructions all have a simple implementation: \ 6624 * \ 6625 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6626 * = 0b0 || 0b00 || 0b0 \ 6627 */ \ 6628 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6629 } 6630 6631 GEN_TM_NOOP(tend); 6632 GEN_TM_NOOP(tabort); 6633 GEN_TM_NOOP(tabortwc); 6634 GEN_TM_NOOP(tabortwci); 6635 GEN_TM_NOOP(tabortdc); 6636 GEN_TM_NOOP(tabortdci); 6637 GEN_TM_NOOP(tsr); 6638 static inline void gen_cp_abort(DisasContext *ctx) 6639 { 6640 // Do Nothing 6641 } 6642 6643 #define GEN_CP_PASTE_NOOP(name) \ 6644 static inline void gen_##name(DisasContext *ctx) \ 6645 { \ 6646 /* Generate invalid exception until \ 6647 * we have an implementation of the copy \ 6648 * paste facility \ 6649 */ \ 6650 gen_invalid(ctx); \ 6651 } 6652 6653 GEN_CP_PASTE_NOOP(copy) 6654 GEN_CP_PASTE_NOOP(paste) 6655 6656 static void gen_tcheck(DisasContext *ctx) 6657 { 6658 if (unlikely(!ctx->tm_enabled)) { 6659 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6660 return; 6661 } 6662 /* Because tbegin always fails, the tcheck implementation 6663 * is simple: 6664 * 6665 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6666 * = 0b1 || 0b00 || 0b0 6667 */ 6668 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6669 } 6670 6671 #if defined(CONFIG_USER_ONLY) 6672 #define GEN_TM_PRIV_NOOP(name) \ 6673 static inline void gen_##name(DisasContext *ctx) \ 6674 { \ 6675 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6676 } 6677 6678 #else 6679 6680 #define GEN_TM_PRIV_NOOP(name) \ 6681 static inline void gen_##name(DisasContext *ctx) \ 6682 { \ 6683 CHK_SV; \ 6684 if (unlikely(!ctx->tm_enabled)) { \ 6685 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6686 return; \ 6687 } \ 6688 /* Because tbegin always fails, the implementation is \ 6689 * simple: \ 6690 * \ 6691 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6692 * = 0b0 || 0b00 | 0b0 \ 6693 */ \ 6694 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6695 } 6696 6697 #endif 6698 6699 GEN_TM_PRIV_NOOP(treclaim); 6700 GEN_TM_PRIV_NOOP(trechkpt); 6701 6702 #include "translate/fp-impl.inc.c" 6703 6704 #include "translate/vmx-impl.inc.c" 6705 6706 #include "translate/vsx-impl.inc.c" 6707 6708 #include "translate/dfp-impl.inc.c" 6709 6710 #include "translate/spe-impl.inc.c" 6711 6712 /* Handles lfdp, lxsd, lxssp */ 6713 static void gen_dform39(DisasContext *ctx) 6714 { 6715 switch (ctx->opcode & 0x3) { 6716 case 0: /* lfdp */ 6717 if (ctx->insns_flags2 & PPC2_ISA205) { 6718 return gen_lfdp(ctx); 6719 } 6720 break; 6721 case 2: /* lxsd */ 6722 if (ctx->insns_flags2 & PPC2_ISA300) { 6723 return gen_lxsd(ctx); 6724 } 6725 break; 6726 case 3: /* lxssp */ 6727 if (ctx->insns_flags2 & PPC2_ISA300) { 6728 return gen_lxssp(ctx); 6729 } 6730 break; 6731 } 6732 return gen_invalid(ctx); 6733 } 6734 6735 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 6736 static void gen_dform3D(DisasContext *ctx) 6737 { 6738 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 6739 switch (ctx->opcode & 0x7) { 6740 case 1: /* lxv */ 6741 if (ctx->insns_flags2 & PPC2_ISA300) { 6742 return gen_lxv(ctx); 6743 } 6744 break; 6745 case 5: /* stxv */ 6746 if (ctx->insns_flags2 & PPC2_ISA300) { 6747 return gen_stxv(ctx); 6748 } 6749 break; 6750 } 6751 } else { /* DS-FORM */ 6752 switch (ctx->opcode & 0x3) { 6753 case 0: /* stfdp */ 6754 if (ctx->insns_flags2 & PPC2_ISA205) { 6755 return gen_stfdp(ctx); 6756 } 6757 break; 6758 case 2: /* stxsd */ 6759 if (ctx->insns_flags2 & PPC2_ISA300) { 6760 return gen_stxsd(ctx); 6761 } 6762 break; 6763 case 3: /* stxssp */ 6764 if (ctx->insns_flags2 & PPC2_ISA300) { 6765 return gen_stxssp(ctx); 6766 } 6767 break; 6768 } 6769 } 6770 return gen_invalid(ctx); 6771 } 6772 6773 static opcode_t opcodes[] = { 6774 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6775 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6776 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6777 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6778 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6779 #if defined(TARGET_PPC64) 6780 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6781 #endif 6782 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6783 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6784 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6785 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6786 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6787 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6788 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6789 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6790 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6791 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6792 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6793 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6794 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6795 #if defined(TARGET_PPC64) 6796 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6797 #endif 6798 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6799 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6800 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6801 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6802 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6803 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6804 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6805 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 6806 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6807 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 6808 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6809 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6810 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6811 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6812 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6813 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6814 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6815 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6816 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6817 #if defined(TARGET_PPC64) 6818 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6819 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6820 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6821 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6822 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6823 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6824 #endif 6825 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6826 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6827 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6828 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6829 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6830 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6831 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6832 #if defined(TARGET_PPC64) 6833 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6834 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6835 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6836 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6837 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6838 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6839 PPC_NONE, PPC2_ISA300), 6840 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6841 PPC_NONE, PPC2_ISA300), 6842 #endif 6843 #if defined(TARGET_PPC64) 6844 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6845 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6846 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6847 #endif 6848 /* handles lfdp, lxsd, lxssp */ 6849 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6850 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 6851 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6852 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6853 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6854 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6855 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6856 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6857 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6858 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 6859 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6860 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6861 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6862 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6863 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 6864 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 6865 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6866 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6867 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6868 #if defined(TARGET_PPC64) 6869 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 6870 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 6871 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6872 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6873 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6874 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6875 #endif 6876 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6877 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6878 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 6879 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6880 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6881 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6882 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6883 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6884 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6885 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6886 #if defined(TARGET_PPC64) 6887 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6888 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6889 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6890 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6891 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6892 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6893 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6894 #endif 6895 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6896 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6897 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6898 #if defined(TARGET_PPC64) 6899 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6900 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6901 #endif 6902 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6903 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6904 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6905 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6906 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6907 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6908 #if defined(TARGET_PPC64) 6909 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6910 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6911 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 6912 #endif 6913 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6914 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6915 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6916 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6917 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6918 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6919 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6920 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6921 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6922 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6923 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6924 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6925 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6926 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6927 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6928 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 6929 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6930 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6931 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6932 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6933 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6934 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6935 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6936 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6937 #if defined(TARGET_PPC64) 6938 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6939 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6940 PPC_SEGMENT_64B), 6941 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6942 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6943 PPC_SEGMENT_64B), 6944 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6945 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6946 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6947 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6948 #endif 6949 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6950 /* XXX Those instructions will need to be handled differently for 6951 * different ISA versions */ 6952 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6953 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6954 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 6955 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 6956 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6957 #if defined(TARGET_PPC64) 6958 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6959 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6960 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 6961 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6962 #endif 6963 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6964 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6965 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6966 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6967 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6968 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6969 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6970 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6971 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6972 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6973 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 6974 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6975 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 6976 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 6977 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 6978 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 6979 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 6980 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 6981 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 6982 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6983 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 6984 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 6985 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 6986 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 6987 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 6988 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 6989 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 6990 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 6991 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 6992 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 6993 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 6994 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 6995 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 6996 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 6997 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 6998 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 6999 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 7000 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 7001 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 7002 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 7003 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 7004 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 7005 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 7006 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 7007 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7008 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7009 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7010 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7011 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7012 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7013 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7014 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7015 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7016 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7017 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7018 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7019 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7020 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7021 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7022 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7023 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7024 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7025 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7026 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7027 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7028 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7029 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7030 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7031 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7032 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7033 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7034 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7035 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7036 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7037 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7038 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7039 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7040 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7041 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7042 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7043 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7044 PPC_NONE, PPC2_BOOKE206), 7045 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7046 PPC_NONE, PPC2_BOOKE206), 7047 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7048 PPC_NONE, PPC2_BOOKE206), 7049 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7050 PPC_NONE, PPC2_BOOKE206), 7051 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7052 PPC_NONE, PPC2_BOOKE206), 7053 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7054 PPC_NONE, PPC2_PRCNTL), 7055 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7056 PPC_NONE, PPC2_PRCNTL), 7057 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7058 PPC_NONE, PPC2_PRCNTL), 7059 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7060 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7061 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7062 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7063 PPC_BOOKE, PPC2_BOOKE206), 7064 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE), 7065 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7066 PPC_BOOKE, PPC2_BOOKE206), 7067 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7068 PPC_440_SPEC), 7069 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7070 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7071 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7072 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7073 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7074 #if defined(TARGET_PPC64) 7075 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7076 PPC2_ISA300), 7077 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7078 #endif 7079 7080 #undef GEN_INT_ARITH_ADD 7081 #undef GEN_INT_ARITH_ADD_CONST 7082 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7083 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7084 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7085 add_ca, compute_ca, compute_ov) \ 7086 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7087 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7088 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7089 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7090 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7091 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7092 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7093 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7094 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7095 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 7096 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7097 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7098 7099 #undef GEN_INT_ARITH_DIVW 7100 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7101 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7102 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7103 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7104 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7105 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7106 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7107 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7108 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7109 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7110 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7111 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7112 7113 #if defined(TARGET_PPC64) 7114 #undef GEN_INT_ARITH_DIVD 7115 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7116 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7117 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7118 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7119 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7120 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7121 7122 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7123 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7124 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7125 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7126 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7127 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7128 7129 #undef GEN_INT_ARITH_MUL_HELPER 7130 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7131 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7132 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7133 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7134 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7135 #endif 7136 7137 #undef GEN_INT_ARITH_SUBF 7138 #undef GEN_INT_ARITH_SUBF_CONST 7139 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7140 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7141 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7142 add_ca, compute_ca, compute_ov) \ 7143 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7144 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7145 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7146 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7147 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7148 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7149 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7150 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7151 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7152 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7153 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7154 7155 #undef GEN_LOGICAL1 7156 #undef GEN_LOGICAL2 7157 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7158 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7159 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7160 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7161 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7162 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7163 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7164 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7165 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7166 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7167 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7168 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 7169 #if defined(TARGET_PPC64) 7170 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 7171 #endif 7172 7173 #if defined(TARGET_PPC64) 7174 #undef GEN_PPC64_R2 7175 #undef GEN_PPC64_R4 7176 #define GEN_PPC64_R2(name, opc1, opc2) \ 7177 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7178 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7179 PPC_64B) 7180 #define GEN_PPC64_R4(name, opc1, opc2) \ 7181 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7182 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 7183 PPC_64B), \ 7184 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7185 PPC_64B), \ 7186 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 7187 PPC_64B) 7188 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 7189 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 7190 GEN_PPC64_R4(rldic, 0x1E, 0x04), 7191 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 7192 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 7193 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 7194 #endif 7195 7196 #undef GEN_LD 7197 #undef GEN_LDU 7198 #undef GEN_LDUX 7199 #undef GEN_LDX_E 7200 #undef GEN_LDS 7201 #define GEN_LD(name, ldop, opc, type) \ 7202 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7203 #define GEN_LDU(name, ldop, opc, type) \ 7204 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 7205 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 7206 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7207 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 7208 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 7209 #define GEN_LDS(name, ldop, op, type) \ 7210 GEN_LD(name, ldop, op | 0x20, type) \ 7211 GEN_LDU(name, ldop, op | 0x21, type) \ 7212 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 7213 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 7214 7215 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 7216 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 7217 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 7218 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 7219 #if defined(TARGET_PPC64) 7220 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 7221 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 7222 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 7223 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 7224 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 7225 7226 /* HV/P7 and later only */ 7227 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 7228 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 7229 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 7230 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 7231 #endif 7232 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 7233 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 7234 7235 /* External PID based load */ 7236 #undef GEN_LDEPX 7237 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 7238 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7239 0x00000001, PPC_NONE, PPC2_BOOKE206), 7240 7241 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 7242 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 7243 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 7244 #if defined(TARGET_PPC64) 7245 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 7246 #endif 7247 7248 #undef GEN_ST 7249 #undef GEN_STU 7250 #undef GEN_STUX 7251 #undef GEN_STX_E 7252 #undef GEN_STS 7253 #define GEN_ST(name, stop, opc, type) \ 7254 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7255 #define GEN_STU(name, stop, opc, type) \ 7256 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 7257 #define GEN_STUX(name, stop, opc2, opc3, type) \ 7258 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7259 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 7260 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 7261 #define GEN_STS(name, stop, op, type) \ 7262 GEN_ST(name, stop, op | 0x20, type) \ 7263 GEN_STU(name, stop, op | 0x21, type) \ 7264 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 7265 GEN_STX(name, stop, 0x17, op | 0x00, type) 7266 7267 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 7268 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 7269 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 7270 #if defined(TARGET_PPC64) 7271 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 7272 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 7273 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 7274 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 7275 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 7276 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 7277 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 7278 #endif 7279 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 7280 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 7281 7282 #undef GEN_STEPX 7283 #define GEN_STEPX(name, ldop, opc2, opc3) \ 7284 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7285 0x00000001, PPC_NONE, PPC2_BOOKE206), 7286 7287 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 7288 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 7289 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 7290 #if defined(TARGET_PPC64) 7291 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04) 7292 #endif 7293 7294 #undef GEN_CRLOGIC 7295 #define GEN_CRLOGIC(name, tcg_op, opc) \ 7296 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 7297 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 7298 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 7299 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 7300 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 7301 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 7302 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 7303 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 7304 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 7305 7306 #undef GEN_MAC_HANDLER 7307 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 7308 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 7309 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 7310 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 7311 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 7312 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 7313 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 7314 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 7315 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 7316 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 7317 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 7318 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 7319 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 7320 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 7321 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 7322 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 7323 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 7324 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 7325 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 7326 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 7327 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 7328 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 7329 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 7330 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 7331 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 7332 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 7333 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 7334 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 7335 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 7336 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 7337 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 7338 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 7339 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 7340 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 7341 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 7342 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 7343 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 7344 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 7345 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 7346 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 7347 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 7348 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 7349 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 7350 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 7351 7352 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 7353 PPC_NONE, PPC2_TM), 7354 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 7355 PPC_NONE, PPC2_TM), 7356 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 7357 PPC_NONE, PPC2_TM), 7358 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 7359 PPC_NONE, PPC2_TM), 7360 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 7361 PPC_NONE, PPC2_TM), 7362 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 7363 PPC_NONE, PPC2_TM), 7364 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 7365 PPC_NONE, PPC2_TM), 7366 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 7367 PPC_NONE, PPC2_TM), 7368 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 7369 PPC_NONE, PPC2_TM), 7370 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 7371 PPC_NONE, PPC2_TM), 7372 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 7373 PPC_NONE, PPC2_TM), 7374 7375 #include "translate/fp-ops.inc.c" 7376 7377 #include "translate/vmx-ops.inc.c" 7378 7379 #include "translate/vsx-ops.inc.c" 7380 7381 #include "translate/dfp-ops.inc.c" 7382 7383 #include "translate/spe-ops.inc.c" 7384 }; 7385 7386 #include "helper_regs.h" 7387 #include "translate_init.inc.c" 7388 7389 /*****************************************************************************/ 7390 /* Misc PowerPC helpers */ 7391 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 7392 int flags) 7393 { 7394 #define RGPL 4 7395 #define RFPL 4 7396 7397 PowerPCCPU *cpu = POWERPC_CPU(cs); 7398 CPUPPCState *env = &cpu->env; 7399 int i; 7400 7401 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 7402 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 7403 env->nip, env->lr, env->ctr, cpu_read_xer(env), 7404 cs->cpu_index); 7405 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 7406 TARGET_FMT_lx " iidx %d didx %d\n", 7407 env->msr, env->spr[SPR_HID0], 7408 env->hflags, env->immu_idx, env->dmmu_idx); 7409 #if !defined(NO_TIMER_DUMP) 7410 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 7411 #if !defined(CONFIG_USER_ONLY) 7412 " DECR %08" PRIu32 7413 #endif 7414 "\n", 7415 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 7416 #if !defined(CONFIG_USER_ONLY) 7417 , cpu_ppc_load_decr(env) 7418 #endif 7419 ); 7420 #endif 7421 for (i = 0; i < 32; i++) { 7422 if ((i & (RGPL - 1)) == 0) 7423 cpu_fprintf(f, "GPR%02d", i); 7424 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 7425 if ((i & (RGPL - 1)) == (RGPL - 1)) 7426 cpu_fprintf(f, "\n"); 7427 } 7428 cpu_fprintf(f, "CR "); 7429 for (i = 0; i < 8; i++) 7430 cpu_fprintf(f, "%01x", env->crf[i]); 7431 cpu_fprintf(f, " ["); 7432 for (i = 0; i < 8; i++) { 7433 char a = '-'; 7434 if (env->crf[i] & 0x08) 7435 a = 'L'; 7436 else if (env->crf[i] & 0x04) 7437 a = 'G'; 7438 else if (env->crf[i] & 0x02) 7439 a = 'E'; 7440 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 7441 } 7442 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 7443 env->reserve_addr); 7444 7445 if (flags & CPU_DUMP_FPU) { 7446 for (i = 0; i < 32; i++) { 7447 if ((i & (RFPL - 1)) == 0) { 7448 cpu_fprintf(f, "FPR%02d", i); 7449 } 7450 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i])); 7451 if ((i & (RFPL - 1)) == (RFPL - 1)) { 7452 cpu_fprintf(f, "\n"); 7453 } 7454 } 7455 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 7456 } 7457 7458 #if !defined(CONFIG_USER_ONLY) 7459 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 7460 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 7461 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 7462 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 7463 7464 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 7465 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 7466 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 7467 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 7468 7469 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 7470 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 7471 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 7472 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 7473 7474 #if defined(TARGET_PPC64) 7475 if (env->excp_model == POWERPC_EXCP_POWER7 || 7476 env->excp_model == POWERPC_EXCP_POWER8) { 7477 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 7478 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 7479 } 7480 #endif 7481 if (env->excp_model == POWERPC_EXCP_BOOKE) { 7482 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 7483 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 7484 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 7485 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 7486 7487 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 7488 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 7489 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 7490 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 7491 7492 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 7493 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 7494 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 7495 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 7496 7497 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 7498 " EPR " TARGET_FMT_lx "\n", 7499 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 7500 env->spr[SPR_BOOKE_EPR]); 7501 7502 /* FSL-specific */ 7503 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 7504 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 7505 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 7506 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 7507 7508 /* 7509 * IVORs are left out as they are large and do not change often -- 7510 * they can be read with "p $ivor0", "p $ivor1", etc. 7511 */ 7512 } 7513 7514 #if defined(TARGET_PPC64) 7515 if (env->flags & POWERPC_FLAG_CFAR) { 7516 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 7517 } 7518 #endif 7519 7520 if (env->spr_cb[SPR_LPCR].name) 7521 cpu_fprintf(f, " LPCR " TARGET_FMT_lx "\n", env->spr[SPR_LPCR]); 7522 7523 switch (env->mmu_model) { 7524 case POWERPC_MMU_32B: 7525 case POWERPC_MMU_601: 7526 case POWERPC_MMU_SOFT_6xx: 7527 case POWERPC_MMU_SOFT_74xx: 7528 #if defined(TARGET_PPC64) 7529 case POWERPC_MMU_64B: 7530 case POWERPC_MMU_2_03: 7531 case POWERPC_MMU_2_06: 7532 case POWERPC_MMU_2_07: 7533 case POWERPC_MMU_3_00: 7534 #endif 7535 if (env->spr_cb[SPR_SDR1].name) { /* SDR1 Exists */ 7536 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " ", env->spr[SPR_SDR1]); 7537 } 7538 if (env->spr_cb[SPR_PTCR].name) { /* PTCR Exists */ 7539 cpu_fprintf(f, " PTCR " TARGET_FMT_lx " ", env->spr[SPR_PTCR]); 7540 } 7541 cpu_fprintf(f, " DAR " TARGET_FMT_lx " DSISR " TARGET_FMT_lx "\n", 7542 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 7543 break; 7544 case POWERPC_MMU_BOOKE206: 7545 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 7546 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 7547 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 7548 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 7549 7550 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 7551 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 7552 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 7553 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 7554 7555 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 7556 " TLB1CFG " TARGET_FMT_lx "\n", 7557 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 7558 env->spr[SPR_BOOKE_TLB1CFG]); 7559 break; 7560 default: 7561 break; 7562 } 7563 #endif 7564 7565 #undef RGPL 7566 #undef RFPL 7567 } 7568 7569 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 7570 fprintf_function cpu_fprintf, int flags) 7571 { 7572 #if defined(DO_PPC_STATISTICS) 7573 PowerPCCPU *cpu = POWERPC_CPU(cs); 7574 opc_handler_t **t1, **t2, **t3, *handler; 7575 int op1, op2, op3; 7576 7577 t1 = cpu->env.opcodes; 7578 for (op1 = 0; op1 < 64; op1++) { 7579 handler = t1[op1]; 7580 if (is_indirect_opcode(handler)) { 7581 t2 = ind_table(handler); 7582 for (op2 = 0; op2 < 32; op2++) { 7583 handler = t2[op2]; 7584 if (is_indirect_opcode(handler)) { 7585 t3 = ind_table(handler); 7586 for (op3 = 0; op3 < 32; op3++) { 7587 handler = t3[op3]; 7588 if (handler->count == 0) 7589 continue; 7590 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 7591 "%016" PRIx64 " %" PRId64 "\n", 7592 op1, op2, op3, op1, (op3 << 5) | op2, 7593 handler->oname, 7594 handler->count, handler->count); 7595 } 7596 } else { 7597 if (handler->count == 0) 7598 continue; 7599 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 7600 "%016" PRIx64 " %" PRId64 "\n", 7601 op1, op2, op1, op2, handler->oname, 7602 handler->count, handler->count); 7603 } 7604 } 7605 } else { 7606 if (handler->count == 0) 7607 continue; 7608 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 7609 " %" PRId64 "\n", 7610 op1, op1, handler->oname, 7611 handler->count, handler->count); 7612 } 7613 } 7614 #endif 7615 } 7616 7617 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 7618 { 7619 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7620 CPUPPCState *env = cs->env_ptr; 7621 int bound; 7622 7623 ctx->exception = POWERPC_EXCP_NONE; 7624 ctx->spr_cb = env->spr_cb; 7625 ctx->pr = msr_pr; 7626 ctx->mem_idx = env->dmmu_idx; 7627 ctx->dr = msr_dr; 7628 #if !defined(CONFIG_USER_ONLY) 7629 ctx->hv = msr_hv || !env->has_hv_mode; 7630 #endif 7631 ctx->insns_flags = env->insns_flags; 7632 ctx->insns_flags2 = env->insns_flags2; 7633 ctx->access_type = -1; 7634 ctx->need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7635 ctx->le_mode = !!(env->hflags & (1 << MSR_LE)); 7636 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 7637 ctx->flags = env->flags; 7638 #if defined(TARGET_PPC64) 7639 ctx->sf_mode = msr_is_64bit(env, env->msr); 7640 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7641 #endif 7642 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 7643 || env->mmu_model == POWERPC_MMU_601 7644 || (env->mmu_model & POWERPC_MMU_64B); 7645 7646 ctx->fpu_enabled = !!msr_fp; 7647 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7648 ctx->spe_enabled = !!msr_spe; 7649 else 7650 ctx->spe_enabled = false; 7651 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7652 ctx->altivec_enabled = !!msr_vr; 7653 else 7654 ctx->altivec_enabled = false; 7655 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7656 ctx->vsx_enabled = !!msr_vsx; 7657 } else { 7658 ctx->vsx_enabled = false; 7659 } 7660 #if defined(TARGET_PPC64) 7661 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7662 ctx->tm_enabled = !!msr_tm; 7663 } else { 7664 ctx->tm_enabled = false; 7665 } 7666 #endif 7667 ctx->gtse = !!(env->spr[SPR_LPCR] & LPCR_GTSE); 7668 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7669 ctx->singlestep_enabled = CPU_SINGLE_STEP; 7670 else 7671 ctx->singlestep_enabled = 0; 7672 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7673 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7674 if ((env->flags & POWERPC_FLAG_DE) && msr_de) { 7675 ctx->singlestep_enabled = 0; 7676 target_ulong dbcr0 = env->spr[SPR_BOOKE_DBCR0]; 7677 if (dbcr0 & DBCR0_ICMP) { 7678 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 7679 } 7680 if (dbcr0 & DBCR0_BRT) { 7681 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7682 } 7683 7684 } 7685 if (unlikely(ctx->base.singlestep_enabled)) { 7686 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7687 } 7688 #if defined (DO_SINGLE_STEP) && 0 7689 /* Single step trace mode */ 7690 msr_se = 1; 7691 #endif 7692 7693 bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4; 7694 ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 7695 } 7696 7697 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 7698 { 7699 } 7700 7701 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 7702 { 7703 tcg_gen_insn_start(dcbase->pc_next); 7704 } 7705 7706 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs, 7707 const CPUBreakpoint *bp) 7708 { 7709 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7710 7711 gen_debug_exception(ctx); 7712 dcbase->is_jmp = DISAS_NORETURN; 7713 /* The address covered by the breakpoint must be included in 7714 [tb->pc, tb->pc + tb->size) in order to for it to be 7715 properly cleared -- thus we increment the PC here so that 7716 the logic setting tb->size below does the right thing. */ 7717 ctx->base.pc_next += 4; 7718 return true; 7719 } 7720 7721 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 7722 { 7723 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7724 CPUPPCState *env = cs->env_ptr; 7725 opc_handler_t **table, *handler; 7726 7727 LOG_DISAS("----------------\n"); 7728 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7729 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 7730 7731 if (unlikely(need_byteswap(ctx))) { 7732 ctx->opcode = bswap32(cpu_ldl_code(env, ctx->base.pc_next)); 7733 } else { 7734 ctx->opcode = cpu_ldl_code(env, ctx->base.pc_next); 7735 } 7736 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7737 ctx->opcode, opc1(ctx->opcode), opc2(ctx->opcode), 7738 opc3(ctx->opcode), opc4(ctx->opcode), 7739 ctx->le_mode ? "little" : "big"); 7740 ctx->base.pc_next += 4; 7741 table = env->opcodes; 7742 handler = table[opc1(ctx->opcode)]; 7743 if (is_indirect_opcode(handler)) { 7744 table = ind_table(handler); 7745 handler = table[opc2(ctx->opcode)]; 7746 if (is_indirect_opcode(handler)) { 7747 table = ind_table(handler); 7748 handler = table[opc3(ctx->opcode)]; 7749 if (is_indirect_opcode(handler)) { 7750 table = ind_table(handler); 7751 handler = table[opc4(ctx->opcode)]; 7752 } 7753 } 7754 } 7755 /* Is opcode *REALLY* valid ? */ 7756 if (unlikely(handler->handler == &gen_invalid)) { 7757 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7758 "%02x - %02x - %02x - %02x (%08x) " 7759 TARGET_FMT_lx " %d\n", 7760 opc1(ctx->opcode), opc2(ctx->opcode), 7761 opc3(ctx->opcode), opc4(ctx->opcode), 7762 ctx->opcode, ctx->base.pc_next - 4, (int)msr_ir); 7763 } else { 7764 uint32_t inval; 7765 7766 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 7767 && Rc(ctx->opcode))) { 7768 inval = handler->inval2; 7769 } else { 7770 inval = handler->inval1; 7771 } 7772 7773 if (unlikely((ctx->opcode & inval) != 0)) { 7774 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7775 "%02x - %02x - %02x - %02x (%08x) " 7776 TARGET_FMT_lx "\n", ctx->opcode & inval, 7777 opc1(ctx->opcode), opc2(ctx->opcode), 7778 opc3(ctx->opcode), opc4(ctx->opcode), 7779 ctx->opcode, ctx->base.pc_next - 4); 7780 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7781 ctx->base.is_jmp = DISAS_NORETURN; 7782 return; 7783 } 7784 } 7785 (*(handler->handler))(ctx); 7786 #if defined(DO_PPC_STATISTICS) 7787 handler->count++; 7788 #endif 7789 /* Check trace mode exceptions */ 7790 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP && 7791 (ctx->base.pc_next <= 0x100 || ctx->base.pc_next > 0xF00) && 7792 ctx->exception != POWERPC_SYSCALL && 7793 ctx->exception != POWERPC_EXCP_TRAP && 7794 ctx->exception != POWERPC_EXCP_BRANCH)) { 7795 uint32_t excp = gen_prep_dbgex(ctx, POWERPC_EXCP_TRACE); 7796 if (excp != POWERPC_EXCP_NONE) 7797 gen_exception_nip(ctx, excp, ctx->base.pc_next); 7798 } 7799 7800 if (tcg_check_temp_count()) { 7801 qemu_log("Opcode %02x %02x %02x %02x (%08x) leaked " 7802 "temporaries\n", opc1(ctx->opcode), opc2(ctx->opcode), 7803 opc3(ctx->opcode), opc4(ctx->opcode), ctx->opcode); 7804 } 7805 7806 ctx->base.is_jmp = ctx->exception == POWERPC_EXCP_NONE ? 7807 DISAS_NEXT : DISAS_NORETURN; 7808 } 7809 7810 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 7811 { 7812 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7813 7814 if (ctx->exception == POWERPC_EXCP_NONE) { 7815 gen_goto_tb(ctx, 0, ctx->base.pc_next); 7816 } else if (ctx->exception != POWERPC_EXCP_BRANCH) { 7817 if (unlikely(ctx->base.singlestep_enabled)) { 7818 gen_debug_exception(ctx); 7819 } 7820 /* Generate the return instruction */ 7821 tcg_gen_exit_tb(NULL, 0); 7822 } 7823 } 7824 7825 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 7826 { 7827 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 7828 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 7829 } 7830 7831 static const TranslatorOps ppc_tr_ops = { 7832 .init_disas_context = ppc_tr_init_disas_context, 7833 .tb_start = ppc_tr_tb_start, 7834 .insn_start = ppc_tr_insn_start, 7835 .breakpoint_check = ppc_tr_breakpoint_check, 7836 .translate_insn = ppc_tr_translate_insn, 7837 .tb_stop = ppc_tr_tb_stop, 7838 .disas_log = ppc_tr_disas_log, 7839 }; 7840 7841 void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb) 7842 { 7843 DisasContext ctx; 7844 7845 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb); 7846 } 7847 7848 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7849 target_ulong *data) 7850 { 7851 env->nip = data[0]; 7852 } 7853