1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "qemu/host-utils.h" 28 #include "exec/cpu_ldst.h" 29 30 #include "exec/helper-proto.h" 31 #include "exec/helper-gen.h" 32 33 #include "trace-tcg.h" 34 #include "exec/translator.h" 35 #include "exec/log.h" 36 #include "qemu/atomic128.h" 37 38 39 #define CPU_SINGLE_STEP 0x1 40 #define CPU_BRANCH_STEP 0x2 41 #define GDBSTUB_SINGLE_STEP 0x4 42 43 /* Include definitions for instructions classes and implementations flags */ 44 //#define PPC_DEBUG_DISAS 45 //#define DO_PPC_STATISTICS 46 47 #ifdef PPC_DEBUG_DISAS 48 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 49 #else 50 # define LOG_DISAS(...) do { } while (0) 51 #endif 52 /*****************************************************************************/ 53 /* Code translation helpers */ 54 55 /* global register indexes */ 56 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 57 + 10*4 + 22*5 /* SPE GPRh */ 58 + 10*4 + 22*5 /* FPR */ 59 + 2*(10*6 + 22*7) /* AVRh, AVRl */ 60 + 10*5 + 22*6 /* VSR */ 61 + 8*5 /* CRF */]; 62 static TCGv cpu_gpr[32]; 63 static TCGv cpu_gprh[32]; 64 static TCGv_i64 cpu_fpr[32]; 65 static TCGv_i64 cpu_avrh[32], cpu_avrl[32]; 66 static TCGv_i64 cpu_vsr[32]; 67 static TCGv_i32 cpu_crf[8]; 68 static TCGv cpu_nip; 69 static TCGv cpu_msr; 70 static TCGv cpu_ctr; 71 static TCGv cpu_lr; 72 #if defined(TARGET_PPC64) 73 static TCGv cpu_cfar; 74 #endif 75 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 76 static TCGv cpu_reserve; 77 static TCGv cpu_reserve_val; 78 static TCGv cpu_fpscr; 79 static TCGv_i32 cpu_access_type; 80 81 #include "exec/gen-icount.h" 82 83 void ppc_translate_init(void) 84 { 85 int i; 86 char* p; 87 size_t cpu_reg_names_size; 88 89 p = cpu_reg_names; 90 cpu_reg_names_size = sizeof(cpu_reg_names); 91 92 for (i = 0; i < 8; i++) { 93 snprintf(p, cpu_reg_names_size, "crf%d", i); 94 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 95 offsetof(CPUPPCState, crf[i]), p); 96 p += 5; 97 cpu_reg_names_size -= 5; 98 } 99 100 for (i = 0; i < 32; i++) { 101 snprintf(p, cpu_reg_names_size, "r%d", i); 102 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 103 offsetof(CPUPPCState, gpr[i]), p); 104 p += (i < 10) ? 3 : 4; 105 cpu_reg_names_size -= (i < 10) ? 3 : 4; 106 snprintf(p, cpu_reg_names_size, "r%dH", i); 107 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 108 offsetof(CPUPPCState, gprh[i]), p); 109 p += (i < 10) ? 4 : 5; 110 cpu_reg_names_size -= (i < 10) ? 4 : 5; 111 112 snprintf(p, cpu_reg_names_size, "fp%d", i); 113 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env, 114 offsetof(CPUPPCState, fpr[i]), p); 115 p += (i < 10) ? 4 : 5; 116 cpu_reg_names_size -= (i < 10) ? 4 : 5; 117 118 snprintf(p, cpu_reg_names_size, "avr%dH", i); 119 #ifdef HOST_WORDS_BIGENDIAN 120 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 121 offsetof(CPUPPCState, avr[i].u64[0]), p); 122 #else 123 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 124 offsetof(CPUPPCState, avr[i].u64[1]), p); 125 #endif 126 p += (i < 10) ? 6 : 7; 127 cpu_reg_names_size -= (i < 10) ? 6 : 7; 128 129 snprintf(p, cpu_reg_names_size, "avr%dL", i); 130 #ifdef HOST_WORDS_BIGENDIAN 131 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 132 offsetof(CPUPPCState, avr[i].u64[1]), p); 133 #else 134 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 135 offsetof(CPUPPCState, avr[i].u64[0]), p); 136 #endif 137 p += (i < 10) ? 6 : 7; 138 cpu_reg_names_size -= (i < 10) ? 6 : 7; 139 snprintf(p, cpu_reg_names_size, "vsr%d", i); 140 cpu_vsr[i] = tcg_global_mem_new_i64(cpu_env, 141 offsetof(CPUPPCState, vsr[i]), p); 142 p += (i < 10) ? 5 : 6; 143 cpu_reg_names_size -= (i < 10) ? 5 : 6; 144 } 145 146 cpu_nip = tcg_global_mem_new(cpu_env, 147 offsetof(CPUPPCState, nip), "nip"); 148 149 cpu_msr = tcg_global_mem_new(cpu_env, 150 offsetof(CPUPPCState, msr), "msr"); 151 152 cpu_ctr = tcg_global_mem_new(cpu_env, 153 offsetof(CPUPPCState, ctr), "ctr"); 154 155 cpu_lr = tcg_global_mem_new(cpu_env, 156 offsetof(CPUPPCState, lr), "lr"); 157 158 #if defined(TARGET_PPC64) 159 cpu_cfar = tcg_global_mem_new(cpu_env, 160 offsetof(CPUPPCState, cfar), "cfar"); 161 #endif 162 163 cpu_xer = tcg_global_mem_new(cpu_env, 164 offsetof(CPUPPCState, xer), "xer"); 165 cpu_so = tcg_global_mem_new(cpu_env, 166 offsetof(CPUPPCState, so), "SO"); 167 cpu_ov = tcg_global_mem_new(cpu_env, 168 offsetof(CPUPPCState, ov), "OV"); 169 cpu_ca = tcg_global_mem_new(cpu_env, 170 offsetof(CPUPPCState, ca), "CA"); 171 cpu_ov32 = tcg_global_mem_new(cpu_env, 172 offsetof(CPUPPCState, ov32), "OV32"); 173 cpu_ca32 = tcg_global_mem_new(cpu_env, 174 offsetof(CPUPPCState, ca32), "CA32"); 175 176 cpu_reserve = tcg_global_mem_new(cpu_env, 177 offsetof(CPUPPCState, reserve_addr), 178 "reserve_addr"); 179 cpu_reserve_val = tcg_global_mem_new(cpu_env, 180 offsetof(CPUPPCState, reserve_val), 181 "reserve_val"); 182 183 cpu_fpscr = tcg_global_mem_new(cpu_env, 184 offsetof(CPUPPCState, fpscr), "fpscr"); 185 186 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 187 offsetof(CPUPPCState, access_type), "access_type"); 188 } 189 190 /* internal defines */ 191 struct DisasContext { 192 DisasContextBase base; 193 uint32_t opcode; 194 uint32_t exception; 195 /* Routine used to access memory */ 196 bool pr, hv, dr, le_mode; 197 bool lazy_tlb_flush; 198 bool need_access_type; 199 int mem_idx; 200 int access_type; 201 /* Translation flags */ 202 TCGMemOp default_tcg_memop_mask; 203 #if defined(TARGET_PPC64) 204 bool sf_mode; 205 bool has_cfar; 206 #endif 207 bool fpu_enabled; 208 bool altivec_enabled; 209 bool vsx_enabled; 210 bool spe_enabled; 211 bool tm_enabled; 212 bool gtse; 213 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 214 int singlestep_enabled; 215 uint32_t flags; 216 uint64_t insns_flags; 217 uint64_t insns_flags2; 218 }; 219 220 /* Return true iff byteswap is needed in a scalar memop */ 221 static inline bool need_byteswap(const DisasContext *ctx) 222 { 223 #if defined(TARGET_WORDS_BIGENDIAN) 224 return ctx->le_mode; 225 #else 226 return !ctx->le_mode; 227 #endif 228 } 229 230 /* True when active word size < size of target_long. */ 231 #ifdef TARGET_PPC64 232 # define NARROW_MODE(C) (!(C)->sf_mode) 233 #else 234 # define NARROW_MODE(C) 0 235 #endif 236 237 struct opc_handler_t { 238 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 239 uint32_t inval1; 240 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 241 uint32_t inval2; 242 /* instruction type */ 243 uint64_t type; 244 /* extended instruction type */ 245 uint64_t type2; 246 /* handler */ 247 void (*handler)(DisasContext *ctx); 248 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 249 const char *oname; 250 #endif 251 #if defined(DO_PPC_STATISTICS) 252 uint64_t count; 253 #endif 254 }; 255 256 /* SPR load/store helpers */ 257 static inline void gen_load_spr(TCGv t, int reg) 258 { 259 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 260 } 261 262 static inline void gen_store_spr(int reg, TCGv t) 263 { 264 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 265 } 266 267 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 268 { 269 if (ctx->need_access_type && ctx->access_type != access_type) { 270 tcg_gen_movi_i32(cpu_access_type, access_type); 271 ctx->access_type = access_type; 272 } 273 } 274 275 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 276 { 277 if (NARROW_MODE(ctx)) { 278 nip = (uint32_t)nip; 279 } 280 tcg_gen_movi_tl(cpu_nip, nip); 281 } 282 283 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 284 { 285 TCGv_i32 t0, t1; 286 287 /* These are all synchronous exceptions, we set the PC back to 288 * the faulting instruction 289 */ 290 if (ctx->exception == POWERPC_EXCP_NONE) { 291 gen_update_nip(ctx, ctx->base.pc_next - 4); 292 } 293 t0 = tcg_const_i32(excp); 294 t1 = tcg_const_i32(error); 295 gen_helper_raise_exception_err(cpu_env, t0, t1); 296 tcg_temp_free_i32(t0); 297 tcg_temp_free_i32(t1); 298 ctx->exception = (excp); 299 } 300 301 static void gen_exception(DisasContext *ctx, uint32_t excp) 302 { 303 TCGv_i32 t0; 304 305 /* These are all synchronous exceptions, we set the PC back to 306 * the faulting instruction 307 */ 308 if (ctx->exception == POWERPC_EXCP_NONE) { 309 gen_update_nip(ctx, ctx->base.pc_next - 4); 310 } 311 t0 = tcg_const_i32(excp); 312 gen_helper_raise_exception(cpu_env, t0); 313 tcg_temp_free_i32(t0); 314 ctx->exception = (excp); 315 } 316 317 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 318 target_ulong nip) 319 { 320 TCGv_i32 t0; 321 322 gen_update_nip(ctx, nip); 323 t0 = tcg_const_i32(excp); 324 gen_helper_raise_exception(cpu_env, t0); 325 tcg_temp_free_i32(t0); 326 ctx->exception = (excp); 327 } 328 329 /* Translates the EXCP_TRACE/BRANCH exceptions used on most PowerPCs to 330 * EXCP_DEBUG, if we are running on cores using the debug enable bit (e.g. 331 * BookE). 332 */ 333 static uint32_t gen_prep_dbgex(DisasContext *ctx, uint32_t excp) 334 { 335 if ((ctx->singlestep_enabled & CPU_SINGLE_STEP) 336 && (excp == POWERPC_EXCP_BRANCH)) { 337 /* Trace excpt. has priority */ 338 excp = POWERPC_EXCP_TRACE; 339 } 340 if (ctx->flags & POWERPC_FLAG_DE) { 341 target_ulong dbsr = 0; 342 switch (excp) { 343 case POWERPC_EXCP_TRACE: 344 dbsr = DBCR0_ICMP; 345 break; 346 case POWERPC_EXCP_BRANCH: 347 dbsr = DBCR0_BRT; 348 break; 349 } 350 TCGv t0 = tcg_temp_new(); 351 gen_load_spr(t0, SPR_BOOKE_DBSR); 352 tcg_gen_ori_tl(t0, t0, dbsr); 353 gen_store_spr(SPR_BOOKE_DBSR, t0); 354 tcg_temp_free(t0); 355 return POWERPC_EXCP_DEBUG; 356 } else { 357 return excp; 358 } 359 } 360 361 static void gen_debug_exception(DisasContext *ctx) 362 { 363 TCGv_i32 t0; 364 365 /* These are all synchronous exceptions, we set the PC back to 366 * the faulting instruction 367 */ 368 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 369 (ctx->exception != POWERPC_EXCP_SYNC)) { 370 gen_update_nip(ctx, ctx->base.pc_next); 371 } 372 t0 = tcg_const_i32(EXCP_DEBUG); 373 gen_helper_raise_exception(cpu_env, t0); 374 tcg_temp_free_i32(t0); 375 } 376 377 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 378 { 379 /* Will be converted to program check if needed */ 380 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 381 } 382 383 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 384 { 385 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 386 } 387 388 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 389 { 390 /* Will be converted to program check if needed */ 391 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 392 } 393 394 /* Stop translation */ 395 static inline void gen_stop_exception(DisasContext *ctx) 396 { 397 gen_update_nip(ctx, ctx->base.pc_next); 398 ctx->exception = POWERPC_EXCP_STOP; 399 } 400 401 #ifndef CONFIG_USER_ONLY 402 /* No need to update nip here, as execution flow will change */ 403 static inline void gen_sync_exception(DisasContext *ctx) 404 { 405 ctx->exception = POWERPC_EXCP_SYNC; 406 } 407 #endif 408 409 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 410 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 411 412 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 413 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 414 415 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 416 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 417 418 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 419 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 420 421 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 422 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 423 424 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 425 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 426 427 typedef struct opcode_t { 428 unsigned char opc1, opc2, opc3, opc4; 429 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 430 unsigned char pad[4]; 431 #endif 432 opc_handler_t handler; 433 const char *oname; 434 } opcode_t; 435 436 /* Helpers for priv. check */ 437 #define GEN_PRIV \ 438 do { \ 439 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 440 } while (0) 441 442 #if defined(CONFIG_USER_ONLY) 443 #define CHK_HV GEN_PRIV 444 #define CHK_SV GEN_PRIV 445 #define CHK_HVRM GEN_PRIV 446 #else 447 #define CHK_HV \ 448 do { \ 449 if (unlikely(ctx->pr || !ctx->hv)) { \ 450 GEN_PRIV; \ 451 } \ 452 } while (0) 453 #define CHK_SV \ 454 do { \ 455 if (unlikely(ctx->pr)) { \ 456 GEN_PRIV; \ 457 } \ 458 } while (0) 459 #define CHK_HVRM \ 460 do { \ 461 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 462 GEN_PRIV; \ 463 } \ 464 } while (0) 465 #endif 466 467 #define CHK_NONE 468 469 /*****************************************************************************/ 470 /* PowerPC instructions table */ 471 472 #if defined(DO_PPC_STATISTICS) 473 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 474 { \ 475 .opc1 = op1, \ 476 .opc2 = op2, \ 477 .opc3 = op3, \ 478 .opc4 = 0xff, \ 479 .handler = { \ 480 .inval1 = invl, \ 481 .type = _typ, \ 482 .type2 = _typ2, \ 483 .handler = &gen_##name, \ 484 .oname = stringify(name), \ 485 }, \ 486 .oname = stringify(name), \ 487 } 488 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 489 { \ 490 .opc1 = op1, \ 491 .opc2 = op2, \ 492 .opc3 = op3, \ 493 .opc4 = 0xff, \ 494 .handler = { \ 495 .inval1 = invl1, \ 496 .inval2 = invl2, \ 497 .type = _typ, \ 498 .type2 = _typ2, \ 499 .handler = &gen_##name, \ 500 .oname = stringify(name), \ 501 }, \ 502 .oname = stringify(name), \ 503 } 504 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 505 { \ 506 .opc1 = op1, \ 507 .opc2 = op2, \ 508 .opc3 = op3, \ 509 .opc4 = 0xff, \ 510 .handler = { \ 511 .inval1 = invl, \ 512 .type = _typ, \ 513 .type2 = _typ2, \ 514 .handler = &gen_##name, \ 515 .oname = onam, \ 516 }, \ 517 .oname = onam, \ 518 } 519 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 520 { \ 521 .opc1 = op1, \ 522 .opc2 = op2, \ 523 .opc3 = op3, \ 524 .opc4 = op4, \ 525 .handler = { \ 526 .inval1 = invl, \ 527 .type = _typ, \ 528 .type2 = _typ2, \ 529 .handler = &gen_##name, \ 530 .oname = stringify(name), \ 531 }, \ 532 .oname = stringify(name), \ 533 } 534 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 535 { \ 536 .opc1 = op1, \ 537 .opc2 = op2, \ 538 .opc3 = op3, \ 539 .opc4 = op4, \ 540 .handler = { \ 541 .inval1 = invl, \ 542 .type = _typ, \ 543 .type2 = _typ2, \ 544 .handler = &gen_##name, \ 545 .oname = onam, \ 546 }, \ 547 .oname = onam, \ 548 } 549 #else 550 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 551 { \ 552 .opc1 = op1, \ 553 .opc2 = op2, \ 554 .opc3 = op3, \ 555 .opc4 = 0xff, \ 556 .handler = { \ 557 .inval1 = invl, \ 558 .type = _typ, \ 559 .type2 = _typ2, \ 560 .handler = &gen_##name, \ 561 }, \ 562 .oname = stringify(name), \ 563 } 564 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 565 { \ 566 .opc1 = op1, \ 567 .opc2 = op2, \ 568 .opc3 = op3, \ 569 .opc4 = 0xff, \ 570 .handler = { \ 571 .inval1 = invl1, \ 572 .inval2 = invl2, \ 573 .type = _typ, \ 574 .type2 = _typ2, \ 575 .handler = &gen_##name, \ 576 }, \ 577 .oname = stringify(name), \ 578 } 579 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 580 { \ 581 .opc1 = op1, \ 582 .opc2 = op2, \ 583 .opc3 = op3, \ 584 .opc4 = 0xff, \ 585 .handler = { \ 586 .inval1 = invl, \ 587 .type = _typ, \ 588 .type2 = _typ2, \ 589 .handler = &gen_##name, \ 590 }, \ 591 .oname = onam, \ 592 } 593 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 594 { \ 595 .opc1 = op1, \ 596 .opc2 = op2, \ 597 .opc3 = op3, \ 598 .opc4 = op4, \ 599 .handler = { \ 600 .inval1 = invl, \ 601 .type = _typ, \ 602 .type2 = _typ2, \ 603 .handler = &gen_##name, \ 604 }, \ 605 .oname = stringify(name), \ 606 } 607 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 608 { \ 609 .opc1 = op1, \ 610 .opc2 = op2, \ 611 .opc3 = op3, \ 612 .opc4 = op4, \ 613 .handler = { \ 614 .inval1 = invl, \ 615 .type = _typ, \ 616 .type2 = _typ2, \ 617 .handler = &gen_##name, \ 618 }, \ 619 .oname = onam, \ 620 } 621 #endif 622 623 /* Invalid instruction */ 624 static void gen_invalid(DisasContext *ctx) 625 { 626 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 627 } 628 629 static opc_handler_t invalid_handler = { 630 .inval1 = 0xFFFFFFFF, 631 .inval2 = 0xFFFFFFFF, 632 .type = PPC_NONE, 633 .type2 = PPC_NONE, 634 .handler = gen_invalid, 635 }; 636 637 /*** Integer comparison ***/ 638 639 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 640 { 641 TCGv t0 = tcg_temp_new(); 642 TCGv t1 = tcg_temp_new(); 643 TCGv_i32 t = tcg_temp_new_i32(); 644 645 tcg_gen_movi_tl(t0, CRF_EQ); 646 tcg_gen_movi_tl(t1, CRF_LT); 647 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), t0, arg0, arg1, t1, t0); 648 tcg_gen_movi_tl(t1, CRF_GT); 649 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), t0, arg0, arg1, t1, t0); 650 651 tcg_gen_trunc_tl_i32(t, t0); 652 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 653 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 654 655 tcg_temp_free(t0); 656 tcg_temp_free(t1); 657 tcg_temp_free_i32(t); 658 } 659 660 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 661 { 662 TCGv t0 = tcg_const_tl(arg1); 663 gen_op_cmp(arg0, t0, s, crf); 664 tcg_temp_free(t0); 665 } 666 667 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 668 { 669 TCGv t0, t1; 670 t0 = tcg_temp_new(); 671 t1 = tcg_temp_new(); 672 if (s) { 673 tcg_gen_ext32s_tl(t0, arg0); 674 tcg_gen_ext32s_tl(t1, arg1); 675 } else { 676 tcg_gen_ext32u_tl(t0, arg0); 677 tcg_gen_ext32u_tl(t1, arg1); 678 } 679 gen_op_cmp(t0, t1, s, crf); 680 tcg_temp_free(t1); 681 tcg_temp_free(t0); 682 } 683 684 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 685 { 686 TCGv t0 = tcg_const_tl(arg1); 687 gen_op_cmp32(arg0, t0, s, crf); 688 tcg_temp_free(t0); 689 } 690 691 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 692 { 693 if (NARROW_MODE(ctx)) { 694 gen_op_cmpi32(reg, 0, 1, 0); 695 } else { 696 gen_op_cmpi(reg, 0, 1, 0); 697 } 698 } 699 700 /* cmp */ 701 static void gen_cmp(DisasContext *ctx) 702 { 703 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 704 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 705 1, crfD(ctx->opcode)); 706 } else { 707 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 708 1, crfD(ctx->opcode)); 709 } 710 } 711 712 /* cmpi */ 713 static void gen_cmpi(DisasContext *ctx) 714 { 715 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 716 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 717 1, crfD(ctx->opcode)); 718 } else { 719 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 720 1, crfD(ctx->opcode)); 721 } 722 } 723 724 /* cmpl */ 725 static void gen_cmpl(DisasContext *ctx) 726 { 727 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 728 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 729 0, crfD(ctx->opcode)); 730 } else { 731 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 732 0, crfD(ctx->opcode)); 733 } 734 } 735 736 /* cmpli */ 737 static void gen_cmpli(DisasContext *ctx) 738 { 739 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 740 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 741 0, crfD(ctx->opcode)); 742 } else { 743 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 744 0, crfD(ctx->opcode)); 745 } 746 } 747 748 /* cmprb - range comparison: isupper, isaplha, islower*/ 749 static void gen_cmprb(DisasContext *ctx) 750 { 751 TCGv_i32 src1 = tcg_temp_new_i32(); 752 TCGv_i32 src2 = tcg_temp_new_i32(); 753 TCGv_i32 src2lo = tcg_temp_new_i32(); 754 TCGv_i32 src2hi = tcg_temp_new_i32(); 755 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 756 757 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 758 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 759 760 tcg_gen_andi_i32(src1, src1, 0xFF); 761 tcg_gen_ext8u_i32(src2lo, src2); 762 tcg_gen_shri_i32(src2, src2, 8); 763 tcg_gen_ext8u_i32(src2hi, src2); 764 765 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 766 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 767 tcg_gen_and_i32(crf, src2lo, src2hi); 768 769 if (ctx->opcode & 0x00200000) { 770 tcg_gen_shri_i32(src2, src2, 8); 771 tcg_gen_ext8u_i32(src2lo, src2); 772 tcg_gen_shri_i32(src2, src2, 8); 773 tcg_gen_ext8u_i32(src2hi, src2); 774 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 775 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 776 tcg_gen_and_i32(src2lo, src2lo, src2hi); 777 tcg_gen_or_i32(crf, crf, src2lo); 778 } 779 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 780 tcg_temp_free_i32(src1); 781 tcg_temp_free_i32(src2); 782 tcg_temp_free_i32(src2lo); 783 tcg_temp_free_i32(src2hi); 784 } 785 786 #if defined(TARGET_PPC64) 787 /* cmpeqb */ 788 static void gen_cmpeqb(DisasContext *ctx) 789 { 790 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 791 cpu_gpr[rB(ctx->opcode)]); 792 } 793 #endif 794 795 /* isel (PowerPC 2.03 specification) */ 796 static void gen_isel(DisasContext *ctx) 797 { 798 uint32_t bi = rC(ctx->opcode); 799 uint32_t mask = 0x08 >> (bi & 0x03); 800 TCGv t0 = tcg_temp_new(); 801 TCGv zr; 802 803 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 804 tcg_gen_andi_tl(t0, t0, mask); 805 806 zr = tcg_const_tl(0); 807 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 808 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 809 cpu_gpr[rB(ctx->opcode)]); 810 tcg_temp_free(zr); 811 tcg_temp_free(t0); 812 } 813 814 /* cmpb: PowerPC 2.05 specification */ 815 static void gen_cmpb(DisasContext *ctx) 816 { 817 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 818 cpu_gpr[rB(ctx->opcode)]); 819 } 820 821 /*** Integer arithmetic ***/ 822 823 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 824 TCGv arg1, TCGv arg2, int sub) 825 { 826 TCGv t0 = tcg_temp_new(); 827 828 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 829 tcg_gen_xor_tl(t0, arg1, arg2); 830 if (sub) { 831 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 832 } else { 833 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 834 } 835 tcg_temp_free(t0); 836 if (NARROW_MODE(ctx)) { 837 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 838 if (is_isa300(ctx)) { 839 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 840 } 841 } else { 842 if (is_isa300(ctx)) { 843 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 844 } 845 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 846 } 847 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 848 } 849 850 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 851 TCGv res, TCGv arg0, TCGv arg1, 852 int sub) 853 { 854 TCGv t0; 855 856 if (!is_isa300(ctx)) { 857 return; 858 } 859 860 t0 = tcg_temp_new(); 861 if (sub) { 862 tcg_gen_eqv_tl(t0, arg0, arg1); 863 } else { 864 tcg_gen_xor_tl(t0, arg0, arg1); 865 } 866 tcg_gen_xor_tl(t0, t0, res); 867 tcg_gen_extract_tl(cpu_ca32, t0, 32, 1); 868 tcg_temp_free(t0); 869 } 870 871 /* Common add function */ 872 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 873 TCGv arg2, bool add_ca, bool compute_ca, 874 bool compute_ov, bool compute_rc0) 875 { 876 TCGv t0 = ret; 877 878 if (compute_ca || compute_ov) { 879 t0 = tcg_temp_new(); 880 } 881 882 if (compute_ca) { 883 if (NARROW_MODE(ctx)) { 884 /* Caution: a non-obvious corner case of the spec is that we 885 must produce the *entire* 64-bit addition, but produce the 886 carry into bit 32. */ 887 TCGv t1 = tcg_temp_new(); 888 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 889 tcg_gen_add_tl(t0, arg1, arg2); 890 if (add_ca) { 891 tcg_gen_add_tl(t0, t0, cpu_ca); 892 } 893 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */ 894 tcg_temp_free(t1); 895 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 896 if (is_isa300(ctx)) { 897 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 898 } 899 } else { 900 TCGv zero = tcg_const_tl(0); 901 if (add_ca) { 902 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero); 903 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero); 904 } else { 905 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero); 906 } 907 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, 0); 908 tcg_temp_free(zero); 909 } 910 } else { 911 tcg_gen_add_tl(t0, arg1, arg2); 912 if (add_ca) { 913 tcg_gen_add_tl(t0, t0, cpu_ca); 914 } 915 } 916 917 if (compute_ov) { 918 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 919 } 920 if (unlikely(compute_rc0)) { 921 gen_set_Rc0(ctx, t0); 922 } 923 924 if (t0 != ret) { 925 tcg_gen_mov_tl(ret, t0); 926 tcg_temp_free(t0); 927 } 928 } 929 /* Add functions with two operands */ 930 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 931 static void glue(gen_, name)(DisasContext *ctx) \ 932 { \ 933 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 934 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 935 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 936 } 937 /* Add functions with one operand and one immediate */ 938 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 939 add_ca, compute_ca, compute_ov) \ 940 static void glue(gen_, name)(DisasContext *ctx) \ 941 { \ 942 TCGv t0 = tcg_const_tl(const_val); \ 943 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 944 cpu_gpr[rA(ctx->opcode)], t0, \ 945 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 946 tcg_temp_free(t0); \ 947 } 948 949 /* add add. addo addo. */ 950 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 951 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 952 /* addc addc. addco addco. */ 953 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 954 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 955 /* adde adde. addeo addeo. */ 956 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 957 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 958 /* addme addme. addmeo addmeo. */ 959 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 960 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 961 /* addze addze. addzeo addzeo.*/ 962 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 963 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 964 /* addi */ 965 static void gen_addi(DisasContext *ctx) 966 { 967 target_long simm = SIMM(ctx->opcode); 968 969 if (rA(ctx->opcode) == 0) { 970 /* li case */ 971 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 972 } else { 973 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 974 cpu_gpr[rA(ctx->opcode)], simm); 975 } 976 } 977 /* addic addic.*/ 978 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 979 { 980 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 981 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 982 c, 0, 1, 0, compute_rc0); 983 tcg_temp_free(c); 984 } 985 986 static void gen_addic(DisasContext *ctx) 987 { 988 gen_op_addic(ctx, 0); 989 } 990 991 static void gen_addic_(DisasContext *ctx) 992 { 993 gen_op_addic(ctx, 1); 994 } 995 996 /* addis */ 997 static void gen_addis(DisasContext *ctx) 998 { 999 target_long simm = SIMM(ctx->opcode); 1000 1001 if (rA(ctx->opcode) == 0) { 1002 /* lis case */ 1003 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 1004 } else { 1005 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 1006 cpu_gpr[rA(ctx->opcode)], simm << 16); 1007 } 1008 } 1009 1010 /* addpcis */ 1011 static void gen_addpcis(DisasContext *ctx) 1012 { 1013 target_long d = DX(ctx->opcode); 1014 1015 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16)); 1016 } 1017 1018 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1019 TCGv arg2, int sign, int compute_ov) 1020 { 1021 TCGv_i32 t0 = tcg_temp_new_i32(); 1022 TCGv_i32 t1 = tcg_temp_new_i32(); 1023 TCGv_i32 t2 = tcg_temp_new_i32(); 1024 TCGv_i32 t3 = tcg_temp_new_i32(); 1025 1026 tcg_gen_trunc_tl_i32(t0, arg1); 1027 tcg_gen_trunc_tl_i32(t1, arg2); 1028 if (sign) { 1029 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1030 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1031 tcg_gen_and_i32(t2, t2, t3); 1032 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1033 tcg_gen_or_i32(t2, t2, t3); 1034 tcg_gen_movi_i32(t3, 0); 1035 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1036 tcg_gen_div_i32(t3, t0, t1); 1037 tcg_gen_extu_i32_tl(ret, t3); 1038 } else { 1039 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1040 tcg_gen_movi_i32(t3, 0); 1041 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1042 tcg_gen_divu_i32(t3, t0, t1); 1043 tcg_gen_extu_i32_tl(ret, t3); 1044 } 1045 if (compute_ov) { 1046 tcg_gen_extu_i32_tl(cpu_ov, t2); 1047 if (is_isa300(ctx)) { 1048 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1049 } 1050 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1051 } 1052 tcg_temp_free_i32(t0); 1053 tcg_temp_free_i32(t1); 1054 tcg_temp_free_i32(t2); 1055 tcg_temp_free_i32(t3); 1056 1057 if (unlikely(Rc(ctx->opcode) != 0)) 1058 gen_set_Rc0(ctx, ret); 1059 } 1060 /* Div functions */ 1061 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1062 static void glue(gen_, name)(DisasContext *ctx) \ 1063 { \ 1064 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1065 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1066 sign, compute_ov); \ 1067 } 1068 /* divwu divwu. divwuo divwuo. */ 1069 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1070 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1071 /* divw divw. divwo divwo. */ 1072 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1073 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1074 1075 /* div[wd]eu[o][.] */ 1076 #define GEN_DIVE(name, hlpr, compute_ov) \ 1077 static void gen_##name(DisasContext *ctx) \ 1078 { \ 1079 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1080 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1081 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1082 tcg_temp_free_i32(t0); \ 1083 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1084 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1085 } \ 1086 } 1087 1088 GEN_DIVE(divweu, divweu, 0); 1089 GEN_DIVE(divweuo, divweu, 1); 1090 GEN_DIVE(divwe, divwe, 0); 1091 GEN_DIVE(divweo, divwe, 1); 1092 1093 #if defined(TARGET_PPC64) 1094 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1095 TCGv arg2, int sign, int compute_ov) 1096 { 1097 TCGv_i64 t0 = tcg_temp_new_i64(); 1098 TCGv_i64 t1 = tcg_temp_new_i64(); 1099 TCGv_i64 t2 = tcg_temp_new_i64(); 1100 TCGv_i64 t3 = tcg_temp_new_i64(); 1101 1102 tcg_gen_mov_i64(t0, arg1); 1103 tcg_gen_mov_i64(t1, arg2); 1104 if (sign) { 1105 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1106 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1107 tcg_gen_and_i64(t2, t2, t3); 1108 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1109 tcg_gen_or_i64(t2, t2, t3); 1110 tcg_gen_movi_i64(t3, 0); 1111 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1112 tcg_gen_div_i64(ret, t0, t1); 1113 } else { 1114 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1115 tcg_gen_movi_i64(t3, 0); 1116 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1117 tcg_gen_divu_i64(ret, t0, t1); 1118 } 1119 if (compute_ov) { 1120 tcg_gen_mov_tl(cpu_ov, t2); 1121 if (is_isa300(ctx)) { 1122 tcg_gen_mov_tl(cpu_ov32, t2); 1123 } 1124 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1125 } 1126 tcg_temp_free_i64(t0); 1127 tcg_temp_free_i64(t1); 1128 tcg_temp_free_i64(t2); 1129 tcg_temp_free_i64(t3); 1130 1131 if (unlikely(Rc(ctx->opcode) != 0)) 1132 gen_set_Rc0(ctx, ret); 1133 } 1134 1135 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1136 static void glue(gen_, name)(DisasContext *ctx) \ 1137 { \ 1138 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1139 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1140 sign, compute_ov); \ 1141 } 1142 /* divdu divdu. divduo divduo. */ 1143 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1144 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1145 /* divd divd. divdo divdo. */ 1146 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1147 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1148 1149 GEN_DIVE(divdeu, divdeu, 0); 1150 GEN_DIVE(divdeuo, divdeu, 1); 1151 GEN_DIVE(divde, divde, 0); 1152 GEN_DIVE(divdeo, divde, 1); 1153 #endif 1154 1155 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1156 TCGv arg2, int sign) 1157 { 1158 TCGv_i32 t0 = tcg_temp_new_i32(); 1159 TCGv_i32 t1 = tcg_temp_new_i32(); 1160 1161 tcg_gen_trunc_tl_i32(t0, arg1); 1162 tcg_gen_trunc_tl_i32(t1, arg2); 1163 if (sign) { 1164 TCGv_i32 t2 = tcg_temp_new_i32(); 1165 TCGv_i32 t3 = tcg_temp_new_i32(); 1166 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1167 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1168 tcg_gen_and_i32(t2, t2, t3); 1169 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1170 tcg_gen_or_i32(t2, t2, t3); 1171 tcg_gen_movi_i32(t3, 0); 1172 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1173 tcg_gen_rem_i32(t3, t0, t1); 1174 tcg_gen_ext_i32_tl(ret, t3); 1175 tcg_temp_free_i32(t2); 1176 tcg_temp_free_i32(t3); 1177 } else { 1178 TCGv_i32 t2 = tcg_const_i32(1); 1179 TCGv_i32 t3 = tcg_const_i32(0); 1180 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1181 tcg_gen_remu_i32(t3, t0, t1); 1182 tcg_gen_extu_i32_tl(ret, t3); 1183 tcg_temp_free_i32(t2); 1184 tcg_temp_free_i32(t3); 1185 } 1186 tcg_temp_free_i32(t0); 1187 tcg_temp_free_i32(t1); 1188 } 1189 1190 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1191 static void glue(gen_, name)(DisasContext *ctx) \ 1192 { \ 1193 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1194 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1195 sign); \ 1196 } 1197 1198 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1199 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1200 1201 #if defined(TARGET_PPC64) 1202 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1203 TCGv arg2, int sign) 1204 { 1205 TCGv_i64 t0 = tcg_temp_new_i64(); 1206 TCGv_i64 t1 = tcg_temp_new_i64(); 1207 1208 tcg_gen_mov_i64(t0, arg1); 1209 tcg_gen_mov_i64(t1, arg2); 1210 if (sign) { 1211 TCGv_i64 t2 = tcg_temp_new_i64(); 1212 TCGv_i64 t3 = tcg_temp_new_i64(); 1213 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1214 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1215 tcg_gen_and_i64(t2, t2, t3); 1216 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1217 tcg_gen_or_i64(t2, t2, t3); 1218 tcg_gen_movi_i64(t3, 0); 1219 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1220 tcg_gen_rem_i64(ret, t0, t1); 1221 tcg_temp_free_i64(t2); 1222 tcg_temp_free_i64(t3); 1223 } else { 1224 TCGv_i64 t2 = tcg_const_i64(1); 1225 TCGv_i64 t3 = tcg_const_i64(0); 1226 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1227 tcg_gen_remu_i64(ret, t0, t1); 1228 tcg_temp_free_i64(t2); 1229 tcg_temp_free_i64(t3); 1230 } 1231 tcg_temp_free_i64(t0); 1232 tcg_temp_free_i64(t1); 1233 } 1234 1235 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1236 static void glue(gen_, name)(DisasContext *ctx) \ 1237 { \ 1238 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1239 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1240 sign); \ 1241 } 1242 1243 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1244 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1245 #endif 1246 1247 /* mulhw mulhw. */ 1248 static void gen_mulhw(DisasContext *ctx) 1249 { 1250 TCGv_i32 t0 = tcg_temp_new_i32(); 1251 TCGv_i32 t1 = tcg_temp_new_i32(); 1252 1253 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1254 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1255 tcg_gen_muls2_i32(t0, t1, t0, t1); 1256 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1257 tcg_temp_free_i32(t0); 1258 tcg_temp_free_i32(t1); 1259 if (unlikely(Rc(ctx->opcode) != 0)) 1260 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1261 } 1262 1263 /* mulhwu mulhwu. */ 1264 static void gen_mulhwu(DisasContext *ctx) 1265 { 1266 TCGv_i32 t0 = tcg_temp_new_i32(); 1267 TCGv_i32 t1 = tcg_temp_new_i32(); 1268 1269 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1270 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1271 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1272 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1273 tcg_temp_free_i32(t0); 1274 tcg_temp_free_i32(t1); 1275 if (unlikely(Rc(ctx->opcode) != 0)) 1276 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1277 } 1278 1279 /* mullw mullw. */ 1280 static void gen_mullw(DisasContext *ctx) 1281 { 1282 #if defined(TARGET_PPC64) 1283 TCGv_i64 t0, t1; 1284 t0 = tcg_temp_new_i64(); 1285 t1 = tcg_temp_new_i64(); 1286 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1287 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1288 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1289 tcg_temp_free(t0); 1290 tcg_temp_free(t1); 1291 #else 1292 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1293 cpu_gpr[rB(ctx->opcode)]); 1294 #endif 1295 if (unlikely(Rc(ctx->opcode) != 0)) 1296 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1297 } 1298 1299 /* mullwo mullwo. */ 1300 static void gen_mullwo(DisasContext *ctx) 1301 { 1302 TCGv_i32 t0 = tcg_temp_new_i32(); 1303 TCGv_i32 t1 = tcg_temp_new_i32(); 1304 1305 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1306 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1307 tcg_gen_muls2_i32(t0, t1, t0, t1); 1308 #if defined(TARGET_PPC64) 1309 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1310 #else 1311 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1312 #endif 1313 1314 tcg_gen_sari_i32(t0, t0, 31); 1315 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1316 tcg_gen_extu_i32_tl(cpu_ov, t0); 1317 if (is_isa300(ctx)) { 1318 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1319 } 1320 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1321 1322 tcg_temp_free_i32(t0); 1323 tcg_temp_free_i32(t1); 1324 if (unlikely(Rc(ctx->opcode) != 0)) 1325 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1326 } 1327 1328 /* mulli */ 1329 static void gen_mulli(DisasContext *ctx) 1330 { 1331 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1332 SIMM(ctx->opcode)); 1333 } 1334 1335 #if defined(TARGET_PPC64) 1336 /* mulhd mulhd. */ 1337 static void gen_mulhd(DisasContext *ctx) 1338 { 1339 TCGv lo = tcg_temp_new(); 1340 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1341 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1342 tcg_temp_free(lo); 1343 if (unlikely(Rc(ctx->opcode) != 0)) { 1344 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1345 } 1346 } 1347 1348 /* mulhdu mulhdu. */ 1349 static void gen_mulhdu(DisasContext *ctx) 1350 { 1351 TCGv lo = tcg_temp_new(); 1352 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1353 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1354 tcg_temp_free(lo); 1355 if (unlikely(Rc(ctx->opcode) != 0)) { 1356 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1357 } 1358 } 1359 1360 /* mulld mulld. */ 1361 static void gen_mulld(DisasContext *ctx) 1362 { 1363 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1364 cpu_gpr[rB(ctx->opcode)]); 1365 if (unlikely(Rc(ctx->opcode) != 0)) 1366 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1367 } 1368 1369 /* mulldo mulldo. */ 1370 static void gen_mulldo(DisasContext *ctx) 1371 { 1372 TCGv_i64 t0 = tcg_temp_new_i64(); 1373 TCGv_i64 t1 = tcg_temp_new_i64(); 1374 1375 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1376 cpu_gpr[rB(ctx->opcode)]); 1377 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1378 1379 tcg_gen_sari_i64(t0, t0, 63); 1380 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1381 if (is_isa300(ctx)) { 1382 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1383 } 1384 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1385 1386 tcg_temp_free_i64(t0); 1387 tcg_temp_free_i64(t1); 1388 1389 if (unlikely(Rc(ctx->opcode) != 0)) { 1390 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1391 } 1392 } 1393 #endif 1394 1395 /* Common subf function */ 1396 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1397 TCGv arg2, bool add_ca, bool compute_ca, 1398 bool compute_ov, bool compute_rc0) 1399 { 1400 TCGv t0 = ret; 1401 1402 if (compute_ca || compute_ov) { 1403 t0 = tcg_temp_new(); 1404 } 1405 1406 if (compute_ca) { 1407 /* dest = ~arg1 + arg2 [+ ca]. */ 1408 if (NARROW_MODE(ctx)) { 1409 /* Caution: a non-obvious corner case of the spec is that we 1410 must produce the *entire* 64-bit addition, but produce the 1411 carry into bit 32. */ 1412 TCGv inv1 = tcg_temp_new(); 1413 TCGv t1 = tcg_temp_new(); 1414 tcg_gen_not_tl(inv1, arg1); 1415 if (add_ca) { 1416 tcg_gen_add_tl(t0, arg2, cpu_ca); 1417 } else { 1418 tcg_gen_addi_tl(t0, arg2, 1); 1419 } 1420 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1421 tcg_gen_add_tl(t0, t0, inv1); 1422 tcg_temp_free(inv1); 1423 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1424 tcg_temp_free(t1); 1425 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 1426 if (is_isa300(ctx)) { 1427 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 1428 } 1429 } else if (add_ca) { 1430 TCGv zero, inv1 = tcg_temp_new(); 1431 tcg_gen_not_tl(inv1, arg1); 1432 zero = tcg_const_tl(0); 1433 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1434 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1435 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, 0); 1436 tcg_temp_free(zero); 1437 tcg_temp_free(inv1); 1438 } else { 1439 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1440 tcg_gen_sub_tl(t0, arg2, arg1); 1441 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, 1); 1442 } 1443 } else if (add_ca) { 1444 /* Since we're ignoring carry-out, we can simplify the 1445 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1446 tcg_gen_sub_tl(t0, arg2, arg1); 1447 tcg_gen_add_tl(t0, t0, cpu_ca); 1448 tcg_gen_subi_tl(t0, t0, 1); 1449 } else { 1450 tcg_gen_sub_tl(t0, arg2, arg1); 1451 } 1452 1453 if (compute_ov) { 1454 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1455 } 1456 if (unlikely(compute_rc0)) { 1457 gen_set_Rc0(ctx, t0); 1458 } 1459 1460 if (t0 != ret) { 1461 tcg_gen_mov_tl(ret, t0); 1462 tcg_temp_free(t0); 1463 } 1464 } 1465 /* Sub functions with Two operands functions */ 1466 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1467 static void glue(gen_, name)(DisasContext *ctx) \ 1468 { \ 1469 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1470 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1471 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1472 } 1473 /* Sub functions with one operand and one immediate */ 1474 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1475 add_ca, compute_ca, compute_ov) \ 1476 static void glue(gen_, name)(DisasContext *ctx) \ 1477 { \ 1478 TCGv t0 = tcg_const_tl(const_val); \ 1479 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1480 cpu_gpr[rA(ctx->opcode)], t0, \ 1481 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1482 tcg_temp_free(t0); \ 1483 } 1484 /* subf subf. subfo subfo. */ 1485 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1486 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1487 /* subfc subfc. subfco subfco. */ 1488 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1489 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1490 /* subfe subfe. subfeo subfo. */ 1491 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1492 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1493 /* subfme subfme. subfmeo subfmeo. */ 1494 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1495 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1496 /* subfze subfze. subfzeo subfzeo.*/ 1497 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1498 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1499 1500 /* subfic */ 1501 static void gen_subfic(DisasContext *ctx) 1502 { 1503 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1504 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1505 c, 0, 1, 0, 0); 1506 tcg_temp_free(c); 1507 } 1508 1509 /* neg neg. nego nego. */ 1510 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1511 { 1512 TCGv zero = tcg_const_tl(0); 1513 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1514 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1515 tcg_temp_free(zero); 1516 } 1517 1518 static void gen_neg(DisasContext *ctx) 1519 { 1520 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 1521 if (unlikely(Rc(ctx->opcode))) { 1522 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1523 } 1524 } 1525 1526 static void gen_nego(DisasContext *ctx) 1527 { 1528 gen_op_arith_neg(ctx, 1); 1529 } 1530 1531 /*** Integer logical ***/ 1532 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1533 static void glue(gen_, name)(DisasContext *ctx) \ 1534 { \ 1535 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1536 cpu_gpr[rB(ctx->opcode)]); \ 1537 if (unlikely(Rc(ctx->opcode) != 0)) \ 1538 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1539 } 1540 1541 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1542 static void glue(gen_, name)(DisasContext *ctx) \ 1543 { \ 1544 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1545 if (unlikely(Rc(ctx->opcode) != 0)) \ 1546 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1547 } 1548 1549 /* and & and. */ 1550 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1551 /* andc & andc. */ 1552 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1553 1554 /* andi. */ 1555 static void gen_andi_(DisasContext *ctx) 1556 { 1557 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1558 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1559 } 1560 1561 /* andis. */ 1562 static void gen_andis_(DisasContext *ctx) 1563 { 1564 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1565 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1566 } 1567 1568 /* cntlzw */ 1569 static void gen_cntlzw(DisasContext *ctx) 1570 { 1571 TCGv_i32 t = tcg_temp_new_i32(); 1572 1573 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1574 tcg_gen_clzi_i32(t, t, 32); 1575 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1576 tcg_temp_free_i32(t); 1577 1578 if (unlikely(Rc(ctx->opcode) != 0)) 1579 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1580 } 1581 1582 /* cnttzw */ 1583 static void gen_cnttzw(DisasContext *ctx) 1584 { 1585 TCGv_i32 t = tcg_temp_new_i32(); 1586 1587 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1588 tcg_gen_ctzi_i32(t, t, 32); 1589 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1590 tcg_temp_free_i32(t); 1591 1592 if (unlikely(Rc(ctx->opcode) != 0)) { 1593 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1594 } 1595 } 1596 1597 /* eqv & eqv. */ 1598 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1599 /* extsb & extsb. */ 1600 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1601 /* extsh & extsh. */ 1602 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1603 /* nand & nand. */ 1604 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1605 /* nor & nor. */ 1606 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1607 1608 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1609 static void gen_pause(DisasContext *ctx) 1610 { 1611 TCGv_i32 t0 = tcg_const_i32(0); 1612 tcg_gen_st_i32(t0, cpu_env, 1613 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1614 tcg_temp_free_i32(t0); 1615 1616 /* Stop translation, this gives other CPUs a chance to run */ 1617 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 1618 } 1619 #endif /* defined(TARGET_PPC64) */ 1620 1621 /* or & or. */ 1622 static void gen_or(DisasContext *ctx) 1623 { 1624 int rs, ra, rb; 1625 1626 rs = rS(ctx->opcode); 1627 ra = rA(ctx->opcode); 1628 rb = rB(ctx->opcode); 1629 /* Optimisation for mr. ri case */ 1630 if (rs != ra || rs != rb) { 1631 if (rs != rb) 1632 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1633 else 1634 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1635 if (unlikely(Rc(ctx->opcode) != 0)) 1636 gen_set_Rc0(ctx, cpu_gpr[ra]); 1637 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1638 gen_set_Rc0(ctx, cpu_gpr[rs]); 1639 #if defined(TARGET_PPC64) 1640 } else if (rs != 0) { /* 0 is nop */ 1641 int prio = 0; 1642 1643 switch (rs) { 1644 case 1: 1645 /* Set process priority to low */ 1646 prio = 2; 1647 break; 1648 case 6: 1649 /* Set process priority to medium-low */ 1650 prio = 3; 1651 break; 1652 case 2: 1653 /* Set process priority to normal */ 1654 prio = 4; 1655 break; 1656 #if !defined(CONFIG_USER_ONLY) 1657 case 31: 1658 if (!ctx->pr) { 1659 /* Set process priority to very low */ 1660 prio = 1; 1661 } 1662 break; 1663 case 5: 1664 if (!ctx->pr) { 1665 /* Set process priority to medium-hight */ 1666 prio = 5; 1667 } 1668 break; 1669 case 3: 1670 if (!ctx->pr) { 1671 /* Set process priority to high */ 1672 prio = 6; 1673 } 1674 break; 1675 case 7: 1676 if (ctx->hv && !ctx->pr) { 1677 /* Set process priority to very high */ 1678 prio = 7; 1679 } 1680 break; 1681 #endif 1682 default: 1683 break; 1684 } 1685 if (prio) { 1686 TCGv t0 = tcg_temp_new(); 1687 gen_load_spr(t0, SPR_PPR); 1688 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1689 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1690 gen_store_spr(SPR_PPR, t0); 1691 tcg_temp_free(t0); 1692 } 1693 #if !defined(CONFIG_USER_ONLY) 1694 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1695 * CPU and the kernel hangs. This applies to all encodings other 1696 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1697 * and all currently undefined. 1698 */ 1699 gen_pause(ctx); 1700 #endif 1701 #endif 1702 } 1703 } 1704 /* orc & orc. */ 1705 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1706 1707 /* xor & xor. */ 1708 static void gen_xor(DisasContext *ctx) 1709 { 1710 /* Optimisation for "set to zero" case */ 1711 if (rS(ctx->opcode) != rB(ctx->opcode)) 1712 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1713 else 1714 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1715 if (unlikely(Rc(ctx->opcode) != 0)) 1716 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1717 } 1718 1719 /* ori */ 1720 static void gen_ori(DisasContext *ctx) 1721 { 1722 target_ulong uimm = UIMM(ctx->opcode); 1723 1724 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1725 return; 1726 } 1727 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1728 } 1729 1730 /* oris */ 1731 static void gen_oris(DisasContext *ctx) 1732 { 1733 target_ulong uimm = UIMM(ctx->opcode); 1734 1735 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1736 /* NOP */ 1737 return; 1738 } 1739 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1740 } 1741 1742 /* xori */ 1743 static void gen_xori(DisasContext *ctx) 1744 { 1745 target_ulong uimm = UIMM(ctx->opcode); 1746 1747 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1748 /* NOP */ 1749 return; 1750 } 1751 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1752 } 1753 1754 /* xoris */ 1755 static void gen_xoris(DisasContext *ctx) 1756 { 1757 target_ulong uimm = UIMM(ctx->opcode); 1758 1759 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1760 /* NOP */ 1761 return; 1762 } 1763 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1764 } 1765 1766 /* popcntb : PowerPC 2.03 specification */ 1767 static void gen_popcntb(DisasContext *ctx) 1768 { 1769 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1770 } 1771 1772 static void gen_popcntw(DisasContext *ctx) 1773 { 1774 #if defined(TARGET_PPC64) 1775 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1776 #else 1777 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1778 #endif 1779 } 1780 1781 #if defined(TARGET_PPC64) 1782 /* popcntd: PowerPC 2.06 specification */ 1783 static void gen_popcntd(DisasContext *ctx) 1784 { 1785 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1786 } 1787 #endif 1788 1789 /* prtyw: PowerPC 2.05 specification */ 1790 static void gen_prtyw(DisasContext *ctx) 1791 { 1792 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1793 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1794 TCGv t0 = tcg_temp_new(); 1795 tcg_gen_shri_tl(t0, rs, 16); 1796 tcg_gen_xor_tl(ra, rs, t0); 1797 tcg_gen_shri_tl(t0, ra, 8); 1798 tcg_gen_xor_tl(ra, ra, t0); 1799 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1800 tcg_temp_free(t0); 1801 } 1802 1803 #if defined(TARGET_PPC64) 1804 /* prtyd: PowerPC 2.05 specification */ 1805 static void gen_prtyd(DisasContext *ctx) 1806 { 1807 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1808 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1809 TCGv t0 = tcg_temp_new(); 1810 tcg_gen_shri_tl(t0, rs, 32); 1811 tcg_gen_xor_tl(ra, rs, t0); 1812 tcg_gen_shri_tl(t0, ra, 16); 1813 tcg_gen_xor_tl(ra, ra, t0); 1814 tcg_gen_shri_tl(t0, ra, 8); 1815 tcg_gen_xor_tl(ra, ra, t0); 1816 tcg_gen_andi_tl(ra, ra, 1); 1817 tcg_temp_free(t0); 1818 } 1819 #endif 1820 1821 #if defined(TARGET_PPC64) 1822 /* bpermd */ 1823 static void gen_bpermd(DisasContext *ctx) 1824 { 1825 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1826 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1827 } 1828 #endif 1829 1830 #if defined(TARGET_PPC64) 1831 /* extsw & extsw. */ 1832 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1833 1834 /* cntlzd */ 1835 static void gen_cntlzd(DisasContext *ctx) 1836 { 1837 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1838 if (unlikely(Rc(ctx->opcode) != 0)) 1839 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1840 } 1841 1842 /* cnttzd */ 1843 static void gen_cnttzd(DisasContext *ctx) 1844 { 1845 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1846 if (unlikely(Rc(ctx->opcode) != 0)) { 1847 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1848 } 1849 } 1850 1851 /* darn */ 1852 static void gen_darn(DisasContext *ctx) 1853 { 1854 int l = L(ctx->opcode); 1855 1856 if (l == 0) { 1857 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1858 } else if (l <= 2) { 1859 /* Return 64-bit random for both CRN and RRN */ 1860 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1861 } else { 1862 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1863 } 1864 } 1865 #endif 1866 1867 /*** Integer rotate ***/ 1868 1869 /* rlwimi & rlwimi. */ 1870 static void gen_rlwimi(DisasContext *ctx) 1871 { 1872 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1873 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1874 uint32_t sh = SH(ctx->opcode); 1875 uint32_t mb = MB(ctx->opcode); 1876 uint32_t me = ME(ctx->opcode); 1877 1878 if (sh == (31-me) && mb <= me) { 1879 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1880 } else { 1881 target_ulong mask; 1882 TCGv t1; 1883 1884 #if defined(TARGET_PPC64) 1885 mb += 32; 1886 me += 32; 1887 #endif 1888 mask = MASK(mb, me); 1889 1890 t1 = tcg_temp_new(); 1891 if (mask <= 0xffffffffu) { 1892 TCGv_i32 t0 = tcg_temp_new_i32(); 1893 tcg_gen_trunc_tl_i32(t0, t_rs); 1894 tcg_gen_rotli_i32(t0, t0, sh); 1895 tcg_gen_extu_i32_tl(t1, t0); 1896 tcg_temp_free_i32(t0); 1897 } else { 1898 #if defined(TARGET_PPC64) 1899 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1900 tcg_gen_rotli_i64(t1, t1, sh); 1901 #else 1902 g_assert_not_reached(); 1903 #endif 1904 } 1905 1906 tcg_gen_andi_tl(t1, t1, mask); 1907 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1908 tcg_gen_or_tl(t_ra, t_ra, t1); 1909 tcg_temp_free(t1); 1910 } 1911 if (unlikely(Rc(ctx->opcode) != 0)) { 1912 gen_set_Rc0(ctx, t_ra); 1913 } 1914 } 1915 1916 /* rlwinm & rlwinm. */ 1917 static void gen_rlwinm(DisasContext *ctx) 1918 { 1919 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1920 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1921 int sh = SH(ctx->opcode); 1922 int mb = MB(ctx->opcode); 1923 int me = ME(ctx->opcode); 1924 int len = me - mb + 1; 1925 int rsh = (32 - sh) & 31; 1926 1927 if (sh != 0 && len > 0 && me == (31 - sh)) { 1928 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 1929 } else if (me == 31 && rsh + len <= 32) { 1930 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 1931 } else { 1932 target_ulong mask; 1933 #if defined(TARGET_PPC64) 1934 mb += 32; 1935 me += 32; 1936 #endif 1937 mask = MASK(mb, me); 1938 if (sh == 0) { 1939 tcg_gen_andi_tl(t_ra, t_rs, mask); 1940 } else if (mask <= 0xffffffffu) { 1941 TCGv_i32 t0 = tcg_temp_new_i32(); 1942 tcg_gen_trunc_tl_i32(t0, t_rs); 1943 tcg_gen_rotli_i32(t0, t0, sh); 1944 tcg_gen_andi_i32(t0, t0, mask); 1945 tcg_gen_extu_i32_tl(t_ra, t0); 1946 tcg_temp_free_i32(t0); 1947 } else { 1948 #if defined(TARGET_PPC64) 1949 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1950 tcg_gen_rotli_i64(t_ra, t_ra, sh); 1951 tcg_gen_andi_i64(t_ra, t_ra, mask); 1952 #else 1953 g_assert_not_reached(); 1954 #endif 1955 } 1956 } 1957 if (unlikely(Rc(ctx->opcode) != 0)) { 1958 gen_set_Rc0(ctx, t_ra); 1959 } 1960 } 1961 1962 /* rlwnm & rlwnm. */ 1963 static void gen_rlwnm(DisasContext *ctx) 1964 { 1965 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1966 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1967 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 1968 uint32_t mb = MB(ctx->opcode); 1969 uint32_t me = ME(ctx->opcode); 1970 target_ulong mask; 1971 1972 #if defined(TARGET_PPC64) 1973 mb += 32; 1974 me += 32; 1975 #endif 1976 mask = MASK(mb, me); 1977 1978 if (mask <= 0xffffffffu) { 1979 TCGv_i32 t0 = tcg_temp_new_i32(); 1980 TCGv_i32 t1 = tcg_temp_new_i32(); 1981 tcg_gen_trunc_tl_i32(t0, t_rb); 1982 tcg_gen_trunc_tl_i32(t1, t_rs); 1983 tcg_gen_andi_i32(t0, t0, 0x1f); 1984 tcg_gen_rotl_i32(t1, t1, t0); 1985 tcg_gen_extu_i32_tl(t_ra, t1); 1986 tcg_temp_free_i32(t0); 1987 tcg_temp_free_i32(t1); 1988 } else { 1989 #if defined(TARGET_PPC64) 1990 TCGv_i64 t0 = tcg_temp_new_i64(); 1991 tcg_gen_andi_i64(t0, t_rb, 0x1f); 1992 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1993 tcg_gen_rotl_i64(t_ra, t_ra, t0); 1994 tcg_temp_free_i64(t0); 1995 #else 1996 g_assert_not_reached(); 1997 #endif 1998 } 1999 2000 tcg_gen_andi_tl(t_ra, t_ra, mask); 2001 2002 if (unlikely(Rc(ctx->opcode) != 0)) { 2003 gen_set_Rc0(ctx, t_ra); 2004 } 2005 } 2006 2007 #if defined(TARGET_PPC64) 2008 #define GEN_PPC64_R2(name, opc1, opc2) \ 2009 static void glue(gen_, name##0)(DisasContext *ctx) \ 2010 { \ 2011 gen_##name(ctx, 0); \ 2012 } \ 2013 \ 2014 static void glue(gen_, name##1)(DisasContext *ctx) \ 2015 { \ 2016 gen_##name(ctx, 1); \ 2017 } 2018 #define GEN_PPC64_R4(name, opc1, opc2) \ 2019 static void glue(gen_, name##0)(DisasContext *ctx) \ 2020 { \ 2021 gen_##name(ctx, 0, 0); \ 2022 } \ 2023 \ 2024 static void glue(gen_, name##1)(DisasContext *ctx) \ 2025 { \ 2026 gen_##name(ctx, 0, 1); \ 2027 } \ 2028 \ 2029 static void glue(gen_, name##2)(DisasContext *ctx) \ 2030 { \ 2031 gen_##name(ctx, 1, 0); \ 2032 } \ 2033 \ 2034 static void glue(gen_, name##3)(DisasContext *ctx) \ 2035 { \ 2036 gen_##name(ctx, 1, 1); \ 2037 } 2038 2039 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2040 { 2041 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2042 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2043 int len = me - mb + 1; 2044 int rsh = (64 - sh) & 63; 2045 2046 if (sh != 0 && len > 0 && me == (63 - sh)) { 2047 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2048 } else if (me == 63 && rsh + len <= 64) { 2049 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2050 } else { 2051 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2052 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2053 } 2054 if (unlikely(Rc(ctx->opcode) != 0)) { 2055 gen_set_Rc0(ctx, t_ra); 2056 } 2057 } 2058 2059 /* rldicl - rldicl. */ 2060 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2061 { 2062 uint32_t sh, mb; 2063 2064 sh = SH(ctx->opcode) | (shn << 5); 2065 mb = MB(ctx->opcode) | (mbn << 5); 2066 gen_rldinm(ctx, mb, 63, sh); 2067 } 2068 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2069 2070 /* rldicr - rldicr. */ 2071 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2072 { 2073 uint32_t sh, me; 2074 2075 sh = SH(ctx->opcode) | (shn << 5); 2076 me = MB(ctx->opcode) | (men << 5); 2077 gen_rldinm(ctx, 0, me, sh); 2078 } 2079 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2080 2081 /* rldic - rldic. */ 2082 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2083 { 2084 uint32_t sh, mb; 2085 2086 sh = SH(ctx->opcode) | (shn << 5); 2087 mb = MB(ctx->opcode) | (mbn << 5); 2088 gen_rldinm(ctx, mb, 63 - sh, sh); 2089 } 2090 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2091 2092 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2093 { 2094 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2095 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2096 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2097 TCGv t0; 2098 2099 t0 = tcg_temp_new(); 2100 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2101 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2102 tcg_temp_free(t0); 2103 2104 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2105 if (unlikely(Rc(ctx->opcode) != 0)) { 2106 gen_set_Rc0(ctx, t_ra); 2107 } 2108 } 2109 2110 /* rldcl - rldcl. */ 2111 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2112 { 2113 uint32_t mb; 2114 2115 mb = MB(ctx->opcode) | (mbn << 5); 2116 gen_rldnm(ctx, mb, 63); 2117 } 2118 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2119 2120 /* rldcr - rldcr. */ 2121 static inline void gen_rldcr(DisasContext *ctx, int men) 2122 { 2123 uint32_t me; 2124 2125 me = MB(ctx->opcode) | (men << 5); 2126 gen_rldnm(ctx, 0, me); 2127 } 2128 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2129 2130 /* rldimi - rldimi. */ 2131 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2132 { 2133 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2134 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2135 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2136 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2137 uint32_t me = 63 - sh; 2138 2139 if (mb <= me) { 2140 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2141 } else { 2142 target_ulong mask = MASK(mb, me); 2143 TCGv t1 = tcg_temp_new(); 2144 2145 tcg_gen_rotli_tl(t1, t_rs, sh); 2146 tcg_gen_andi_tl(t1, t1, mask); 2147 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2148 tcg_gen_or_tl(t_ra, t_ra, t1); 2149 tcg_temp_free(t1); 2150 } 2151 if (unlikely(Rc(ctx->opcode) != 0)) { 2152 gen_set_Rc0(ctx, t_ra); 2153 } 2154 } 2155 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2156 #endif 2157 2158 /*** Integer shift ***/ 2159 2160 /* slw & slw. */ 2161 static void gen_slw(DisasContext *ctx) 2162 { 2163 TCGv t0, t1; 2164 2165 t0 = tcg_temp_new(); 2166 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2167 #if defined(TARGET_PPC64) 2168 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2169 tcg_gen_sari_tl(t0, t0, 0x3f); 2170 #else 2171 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2172 tcg_gen_sari_tl(t0, t0, 0x1f); 2173 #endif 2174 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2175 t1 = tcg_temp_new(); 2176 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2177 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2178 tcg_temp_free(t1); 2179 tcg_temp_free(t0); 2180 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2181 if (unlikely(Rc(ctx->opcode) != 0)) 2182 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2183 } 2184 2185 /* sraw & sraw. */ 2186 static void gen_sraw(DisasContext *ctx) 2187 { 2188 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2189 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2190 if (unlikely(Rc(ctx->opcode) != 0)) 2191 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2192 } 2193 2194 /* srawi & srawi. */ 2195 static void gen_srawi(DisasContext *ctx) 2196 { 2197 int sh = SH(ctx->opcode); 2198 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2199 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2200 if (sh == 0) { 2201 tcg_gen_ext32s_tl(dst, src); 2202 tcg_gen_movi_tl(cpu_ca, 0); 2203 if (is_isa300(ctx)) { 2204 tcg_gen_movi_tl(cpu_ca32, 0); 2205 } 2206 } else { 2207 TCGv t0; 2208 tcg_gen_ext32s_tl(dst, src); 2209 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2210 t0 = tcg_temp_new(); 2211 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2212 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2213 tcg_temp_free(t0); 2214 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2215 if (is_isa300(ctx)) { 2216 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2217 } 2218 tcg_gen_sari_tl(dst, dst, sh); 2219 } 2220 if (unlikely(Rc(ctx->opcode) != 0)) { 2221 gen_set_Rc0(ctx, dst); 2222 } 2223 } 2224 2225 /* srw & srw. */ 2226 static void gen_srw(DisasContext *ctx) 2227 { 2228 TCGv t0, t1; 2229 2230 t0 = tcg_temp_new(); 2231 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2232 #if defined(TARGET_PPC64) 2233 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2234 tcg_gen_sari_tl(t0, t0, 0x3f); 2235 #else 2236 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2237 tcg_gen_sari_tl(t0, t0, 0x1f); 2238 #endif 2239 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2240 tcg_gen_ext32u_tl(t0, t0); 2241 t1 = tcg_temp_new(); 2242 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2243 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2244 tcg_temp_free(t1); 2245 tcg_temp_free(t0); 2246 if (unlikely(Rc(ctx->opcode) != 0)) 2247 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2248 } 2249 2250 #if defined(TARGET_PPC64) 2251 /* sld & sld. */ 2252 static void gen_sld(DisasContext *ctx) 2253 { 2254 TCGv t0, t1; 2255 2256 t0 = tcg_temp_new(); 2257 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2258 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2259 tcg_gen_sari_tl(t0, t0, 0x3f); 2260 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2261 t1 = tcg_temp_new(); 2262 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2263 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2264 tcg_temp_free(t1); 2265 tcg_temp_free(t0); 2266 if (unlikely(Rc(ctx->opcode) != 0)) 2267 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2268 } 2269 2270 /* srad & srad. */ 2271 static void gen_srad(DisasContext *ctx) 2272 { 2273 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2274 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2275 if (unlikely(Rc(ctx->opcode) != 0)) 2276 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2277 } 2278 /* sradi & sradi. */ 2279 static inline void gen_sradi(DisasContext *ctx, int n) 2280 { 2281 int sh = SH(ctx->opcode) + (n << 5); 2282 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2283 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2284 if (sh == 0) { 2285 tcg_gen_mov_tl(dst, src); 2286 tcg_gen_movi_tl(cpu_ca, 0); 2287 if (is_isa300(ctx)) { 2288 tcg_gen_movi_tl(cpu_ca32, 0); 2289 } 2290 } else { 2291 TCGv t0; 2292 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2293 t0 = tcg_temp_new(); 2294 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2295 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2296 tcg_temp_free(t0); 2297 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2298 if (is_isa300(ctx)) { 2299 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2300 } 2301 tcg_gen_sari_tl(dst, src, sh); 2302 } 2303 if (unlikely(Rc(ctx->opcode) != 0)) { 2304 gen_set_Rc0(ctx, dst); 2305 } 2306 } 2307 2308 static void gen_sradi0(DisasContext *ctx) 2309 { 2310 gen_sradi(ctx, 0); 2311 } 2312 2313 static void gen_sradi1(DisasContext *ctx) 2314 { 2315 gen_sradi(ctx, 1); 2316 } 2317 2318 /* extswsli & extswsli. */ 2319 static inline void gen_extswsli(DisasContext *ctx, int n) 2320 { 2321 int sh = SH(ctx->opcode) + (n << 5); 2322 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2323 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2324 2325 tcg_gen_ext32s_tl(dst, src); 2326 tcg_gen_shli_tl(dst, dst, sh); 2327 if (unlikely(Rc(ctx->opcode) != 0)) { 2328 gen_set_Rc0(ctx, dst); 2329 } 2330 } 2331 2332 static void gen_extswsli0(DisasContext *ctx) 2333 { 2334 gen_extswsli(ctx, 0); 2335 } 2336 2337 static void gen_extswsli1(DisasContext *ctx) 2338 { 2339 gen_extswsli(ctx, 1); 2340 } 2341 2342 /* srd & srd. */ 2343 static void gen_srd(DisasContext *ctx) 2344 { 2345 TCGv t0, t1; 2346 2347 t0 = tcg_temp_new(); 2348 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2349 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2350 tcg_gen_sari_tl(t0, t0, 0x3f); 2351 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2352 t1 = tcg_temp_new(); 2353 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2354 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2355 tcg_temp_free(t1); 2356 tcg_temp_free(t0); 2357 if (unlikely(Rc(ctx->opcode) != 0)) 2358 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2359 } 2360 #endif 2361 2362 /*** Addressing modes ***/ 2363 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2364 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2365 target_long maskl) 2366 { 2367 target_long simm = SIMM(ctx->opcode); 2368 2369 simm &= ~maskl; 2370 if (rA(ctx->opcode) == 0) { 2371 if (NARROW_MODE(ctx)) { 2372 simm = (uint32_t)simm; 2373 } 2374 tcg_gen_movi_tl(EA, simm); 2375 } else if (likely(simm != 0)) { 2376 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2377 if (NARROW_MODE(ctx)) { 2378 tcg_gen_ext32u_tl(EA, EA); 2379 } 2380 } else { 2381 if (NARROW_MODE(ctx)) { 2382 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2383 } else { 2384 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2385 } 2386 } 2387 } 2388 2389 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2390 { 2391 if (rA(ctx->opcode) == 0) { 2392 if (NARROW_MODE(ctx)) { 2393 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2394 } else { 2395 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2396 } 2397 } else { 2398 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2399 if (NARROW_MODE(ctx)) { 2400 tcg_gen_ext32u_tl(EA, EA); 2401 } 2402 } 2403 } 2404 2405 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2406 { 2407 if (rA(ctx->opcode) == 0) { 2408 tcg_gen_movi_tl(EA, 0); 2409 } else if (NARROW_MODE(ctx)) { 2410 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2411 } else { 2412 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2413 } 2414 } 2415 2416 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2417 target_long val) 2418 { 2419 tcg_gen_addi_tl(ret, arg1, val); 2420 if (NARROW_MODE(ctx)) { 2421 tcg_gen_ext32u_tl(ret, ret); 2422 } 2423 } 2424 2425 static inline void gen_align_no_le(DisasContext *ctx) 2426 { 2427 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2428 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2429 } 2430 2431 /*** Integer load ***/ 2432 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2433 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2434 2435 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2436 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2437 TCGv val, \ 2438 TCGv addr) \ 2439 { \ 2440 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2441 } 2442 2443 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2444 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2445 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2446 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2447 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2448 2449 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2450 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2451 2452 #define GEN_QEMU_LOAD_64(ldop, op) \ 2453 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2454 TCGv_i64 val, \ 2455 TCGv addr) \ 2456 { \ 2457 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2458 } 2459 2460 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2461 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2462 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2463 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2464 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2465 2466 #if defined(TARGET_PPC64) 2467 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2468 #endif 2469 2470 #define GEN_QEMU_STORE_TL(stop, op) \ 2471 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2472 TCGv val, \ 2473 TCGv addr) \ 2474 { \ 2475 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2476 } 2477 2478 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2479 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2480 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2481 2482 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2483 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2484 2485 #define GEN_QEMU_STORE_64(stop, op) \ 2486 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2487 TCGv_i64 val, \ 2488 TCGv addr) \ 2489 { \ 2490 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2491 } 2492 2493 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2494 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2495 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2496 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2497 2498 #if defined(TARGET_PPC64) 2499 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2500 #endif 2501 2502 #define GEN_LD(name, ldop, opc, type) \ 2503 static void glue(gen_, name)(DisasContext *ctx) \ 2504 { \ 2505 TCGv EA; \ 2506 gen_set_access_type(ctx, ACCESS_INT); \ 2507 EA = tcg_temp_new(); \ 2508 gen_addr_imm_index(ctx, EA, 0); \ 2509 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2510 tcg_temp_free(EA); \ 2511 } 2512 2513 #define GEN_LDU(name, ldop, opc, type) \ 2514 static void glue(gen_, name##u)(DisasContext *ctx) \ 2515 { \ 2516 TCGv EA; \ 2517 if (unlikely(rA(ctx->opcode) == 0 || \ 2518 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2519 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2520 return; \ 2521 } \ 2522 gen_set_access_type(ctx, ACCESS_INT); \ 2523 EA = tcg_temp_new(); \ 2524 if (type == PPC_64B) \ 2525 gen_addr_imm_index(ctx, EA, 0x03); \ 2526 else \ 2527 gen_addr_imm_index(ctx, EA, 0); \ 2528 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2529 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2530 tcg_temp_free(EA); \ 2531 } 2532 2533 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2534 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2535 { \ 2536 TCGv EA; \ 2537 if (unlikely(rA(ctx->opcode) == 0 || \ 2538 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2539 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2540 return; \ 2541 } \ 2542 gen_set_access_type(ctx, ACCESS_INT); \ 2543 EA = tcg_temp_new(); \ 2544 gen_addr_reg_index(ctx, EA); \ 2545 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2546 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2547 tcg_temp_free(EA); \ 2548 } 2549 2550 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2551 static void glue(gen_, name##x)(DisasContext *ctx) \ 2552 { \ 2553 TCGv EA; \ 2554 chk; \ 2555 gen_set_access_type(ctx, ACCESS_INT); \ 2556 EA = tcg_temp_new(); \ 2557 gen_addr_reg_index(ctx, EA); \ 2558 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2559 tcg_temp_free(EA); \ 2560 } 2561 2562 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2563 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2564 2565 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2566 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2567 2568 #define GEN_LDS(name, ldop, op, type) \ 2569 GEN_LD(name, ldop, op | 0x20, type); \ 2570 GEN_LDU(name, ldop, op | 0x21, type); \ 2571 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2572 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2573 2574 /* lbz lbzu lbzux lbzx */ 2575 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2576 /* lha lhau lhaux lhax */ 2577 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2578 /* lhz lhzu lhzux lhzx */ 2579 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2580 /* lwz lwzu lwzux lwzx */ 2581 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2582 2583 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 2584 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2585 { \ 2586 TCGv EA; \ 2587 CHK_SV; \ 2588 gen_set_access_type(ctx, ACCESS_INT); \ 2589 EA = tcg_temp_new(); \ 2590 gen_addr_reg_index(ctx, EA); \ 2591 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 2592 tcg_temp_free(EA); \ 2593 } 2594 2595 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 2596 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 2597 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 2598 #if defined(TARGET_PPC64) 2599 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 2600 #endif 2601 2602 #if defined(TARGET_PPC64) 2603 /* lwaux */ 2604 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2605 /* lwax */ 2606 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2607 /* ldux */ 2608 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2609 /* ldx */ 2610 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2611 2612 /* CI load/store variants */ 2613 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2614 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2615 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2616 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2617 2618 static void gen_ld(DisasContext *ctx) 2619 { 2620 TCGv EA; 2621 if (Rc(ctx->opcode)) { 2622 if (unlikely(rA(ctx->opcode) == 0 || 2623 rA(ctx->opcode) == rD(ctx->opcode))) { 2624 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2625 return; 2626 } 2627 } 2628 gen_set_access_type(ctx, ACCESS_INT); 2629 EA = tcg_temp_new(); 2630 gen_addr_imm_index(ctx, EA, 0x03); 2631 if (ctx->opcode & 0x02) { 2632 /* lwa (lwau is undefined) */ 2633 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2634 } else { 2635 /* ld - ldu */ 2636 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2637 } 2638 if (Rc(ctx->opcode)) 2639 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2640 tcg_temp_free(EA); 2641 } 2642 2643 /* lq */ 2644 static void gen_lq(DisasContext *ctx) 2645 { 2646 int ra, rd; 2647 TCGv EA, hi, lo; 2648 2649 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2650 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2651 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2652 2653 if (!legal_in_user_mode && ctx->pr) { 2654 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2655 return; 2656 } 2657 2658 if (!le_is_supported && ctx->le_mode) { 2659 gen_align_no_le(ctx); 2660 return; 2661 } 2662 ra = rA(ctx->opcode); 2663 rd = rD(ctx->opcode); 2664 if (unlikely((rd & 1) || rd == ra)) { 2665 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2666 return; 2667 } 2668 2669 gen_set_access_type(ctx, ACCESS_INT); 2670 EA = tcg_temp_new(); 2671 gen_addr_imm_index(ctx, EA, 0x0F); 2672 2673 /* Note that the low part is always in RD+1, even in LE mode. */ 2674 lo = cpu_gpr[rd + 1]; 2675 hi = cpu_gpr[rd]; 2676 2677 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2678 if (HAVE_ATOMIC128) { 2679 TCGv_i32 oi = tcg_temp_new_i32(); 2680 if (ctx->le_mode) { 2681 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2682 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 2683 } else { 2684 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2685 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 2686 } 2687 tcg_temp_free_i32(oi); 2688 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 2689 } else { 2690 /* Restart with exclusive lock. */ 2691 gen_helper_exit_atomic(cpu_env); 2692 ctx->base.is_jmp = DISAS_NORETURN; 2693 } 2694 } else if (ctx->le_mode) { 2695 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2696 gen_addr_add(ctx, EA, EA, 8); 2697 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2698 } else { 2699 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2700 gen_addr_add(ctx, EA, EA, 8); 2701 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2702 } 2703 tcg_temp_free(EA); 2704 } 2705 #endif 2706 2707 /*** Integer store ***/ 2708 #define GEN_ST(name, stop, opc, type) \ 2709 static void glue(gen_, name)(DisasContext *ctx) \ 2710 { \ 2711 TCGv EA; \ 2712 gen_set_access_type(ctx, ACCESS_INT); \ 2713 EA = tcg_temp_new(); \ 2714 gen_addr_imm_index(ctx, EA, 0); \ 2715 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2716 tcg_temp_free(EA); \ 2717 } 2718 2719 #define GEN_STU(name, stop, opc, type) \ 2720 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2721 { \ 2722 TCGv EA; \ 2723 if (unlikely(rA(ctx->opcode) == 0)) { \ 2724 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2725 return; \ 2726 } \ 2727 gen_set_access_type(ctx, ACCESS_INT); \ 2728 EA = tcg_temp_new(); \ 2729 if (type == PPC_64B) \ 2730 gen_addr_imm_index(ctx, EA, 0x03); \ 2731 else \ 2732 gen_addr_imm_index(ctx, EA, 0); \ 2733 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2734 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2735 tcg_temp_free(EA); \ 2736 } 2737 2738 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2739 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2740 { \ 2741 TCGv EA; \ 2742 if (unlikely(rA(ctx->opcode) == 0)) { \ 2743 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2744 return; \ 2745 } \ 2746 gen_set_access_type(ctx, ACCESS_INT); \ 2747 EA = tcg_temp_new(); \ 2748 gen_addr_reg_index(ctx, EA); \ 2749 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2750 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2751 tcg_temp_free(EA); \ 2752 } 2753 2754 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2755 static void glue(gen_, name##x)(DisasContext *ctx) \ 2756 { \ 2757 TCGv EA; \ 2758 chk; \ 2759 gen_set_access_type(ctx, ACCESS_INT); \ 2760 EA = tcg_temp_new(); \ 2761 gen_addr_reg_index(ctx, EA); \ 2762 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2763 tcg_temp_free(EA); \ 2764 } 2765 #define GEN_STX(name, stop, opc2, opc3, type) \ 2766 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2767 2768 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2769 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2770 2771 #define GEN_STS(name, stop, op, type) \ 2772 GEN_ST(name, stop, op | 0x20, type); \ 2773 GEN_STU(name, stop, op | 0x21, type); \ 2774 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2775 GEN_STX(name, stop, 0x17, op | 0x00, type) 2776 2777 /* stb stbu stbux stbx */ 2778 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2779 /* sth sthu sthux sthx */ 2780 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2781 /* stw stwu stwux stwx */ 2782 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2783 2784 #define GEN_STEPX(name, stop, opc2, opc3) \ 2785 static void glue(gen_, name##epx)(DisasContext *ctx) \ 2786 { \ 2787 TCGv EA; \ 2788 CHK_SV; \ 2789 gen_set_access_type(ctx, ACCESS_INT); \ 2790 EA = tcg_temp_new(); \ 2791 gen_addr_reg_index(ctx, EA); \ 2792 tcg_gen_qemu_st_tl( \ 2793 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 2794 tcg_temp_free(EA); \ 2795 } 2796 2797 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 2798 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 2799 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 2800 #if defined(TARGET_PPC64) 2801 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04) 2802 #endif 2803 2804 #if defined(TARGET_PPC64) 2805 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2806 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2807 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2808 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2809 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2810 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2811 2812 static void gen_std(DisasContext *ctx) 2813 { 2814 int rs; 2815 TCGv EA; 2816 2817 rs = rS(ctx->opcode); 2818 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2819 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2820 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2821 TCGv hi, lo; 2822 2823 if (!(ctx->insns_flags & PPC_64BX)) { 2824 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2825 } 2826 2827 if (!legal_in_user_mode && ctx->pr) { 2828 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2829 return; 2830 } 2831 2832 if (!le_is_supported && ctx->le_mode) { 2833 gen_align_no_le(ctx); 2834 return; 2835 } 2836 2837 if (unlikely(rs & 1)) { 2838 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2839 return; 2840 } 2841 gen_set_access_type(ctx, ACCESS_INT); 2842 EA = tcg_temp_new(); 2843 gen_addr_imm_index(ctx, EA, 0x03); 2844 2845 /* Note that the low part is always in RS+1, even in LE mode. */ 2846 lo = cpu_gpr[rs + 1]; 2847 hi = cpu_gpr[rs]; 2848 2849 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2850 if (HAVE_ATOMIC128) { 2851 TCGv_i32 oi = tcg_temp_new_i32(); 2852 if (ctx->le_mode) { 2853 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2854 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 2855 } else { 2856 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2857 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 2858 } 2859 tcg_temp_free_i32(oi); 2860 } else { 2861 /* Restart with exclusive lock. */ 2862 gen_helper_exit_atomic(cpu_env); 2863 ctx->base.is_jmp = DISAS_NORETURN; 2864 } 2865 } else if (ctx->le_mode) { 2866 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2867 gen_addr_add(ctx, EA, EA, 8); 2868 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2869 } else { 2870 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2871 gen_addr_add(ctx, EA, EA, 8); 2872 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2873 } 2874 tcg_temp_free(EA); 2875 } else { 2876 /* std / stdu */ 2877 if (Rc(ctx->opcode)) { 2878 if (unlikely(rA(ctx->opcode) == 0)) { 2879 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2880 return; 2881 } 2882 } 2883 gen_set_access_type(ctx, ACCESS_INT); 2884 EA = tcg_temp_new(); 2885 gen_addr_imm_index(ctx, EA, 0x03); 2886 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2887 if (Rc(ctx->opcode)) 2888 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2889 tcg_temp_free(EA); 2890 } 2891 } 2892 #endif 2893 /*** Integer load and store with byte reverse ***/ 2894 2895 /* lhbrx */ 2896 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2897 2898 /* lwbrx */ 2899 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2900 2901 #if defined(TARGET_PPC64) 2902 /* ldbrx */ 2903 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2904 /* stdbrx */ 2905 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2906 #endif /* TARGET_PPC64 */ 2907 2908 /* sthbrx */ 2909 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2910 /* stwbrx */ 2911 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2912 2913 /*** Integer load and store multiple ***/ 2914 2915 /* lmw */ 2916 static void gen_lmw(DisasContext *ctx) 2917 { 2918 TCGv t0; 2919 TCGv_i32 t1; 2920 2921 if (ctx->le_mode) { 2922 gen_align_no_le(ctx); 2923 return; 2924 } 2925 gen_set_access_type(ctx, ACCESS_INT); 2926 t0 = tcg_temp_new(); 2927 t1 = tcg_const_i32(rD(ctx->opcode)); 2928 gen_addr_imm_index(ctx, t0, 0); 2929 gen_helper_lmw(cpu_env, t0, t1); 2930 tcg_temp_free(t0); 2931 tcg_temp_free_i32(t1); 2932 } 2933 2934 /* stmw */ 2935 static void gen_stmw(DisasContext *ctx) 2936 { 2937 TCGv t0; 2938 TCGv_i32 t1; 2939 2940 if (ctx->le_mode) { 2941 gen_align_no_le(ctx); 2942 return; 2943 } 2944 gen_set_access_type(ctx, ACCESS_INT); 2945 t0 = tcg_temp_new(); 2946 t1 = tcg_const_i32(rS(ctx->opcode)); 2947 gen_addr_imm_index(ctx, t0, 0); 2948 gen_helper_stmw(cpu_env, t0, t1); 2949 tcg_temp_free(t0); 2950 tcg_temp_free_i32(t1); 2951 } 2952 2953 /*** Integer load and store strings ***/ 2954 2955 /* lswi */ 2956 /* PowerPC32 specification says we must generate an exception if 2957 * rA is in the range of registers to be loaded. 2958 * In an other hand, IBM says this is valid, but rA won't be loaded. 2959 * For now, I'll follow the spec... 2960 */ 2961 static void gen_lswi(DisasContext *ctx) 2962 { 2963 TCGv t0; 2964 TCGv_i32 t1, t2; 2965 int nb = NB(ctx->opcode); 2966 int start = rD(ctx->opcode); 2967 int ra = rA(ctx->opcode); 2968 int nr; 2969 2970 if (ctx->le_mode) { 2971 gen_align_no_le(ctx); 2972 return; 2973 } 2974 if (nb == 0) 2975 nb = 32; 2976 nr = DIV_ROUND_UP(nb, 4); 2977 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2978 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2979 return; 2980 } 2981 gen_set_access_type(ctx, ACCESS_INT); 2982 t0 = tcg_temp_new(); 2983 gen_addr_register(ctx, t0); 2984 t1 = tcg_const_i32(nb); 2985 t2 = tcg_const_i32(start); 2986 gen_helper_lsw(cpu_env, t0, t1, t2); 2987 tcg_temp_free(t0); 2988 tcg_temp_free_i32(t1); 2989 tcg_temp_free_i32(t2); 2990 } 2991 2992 /* lswx */ 2993 static void gen_lswx(DisasContext *ctx) 2994 { 2995 TCGv t0; 2996 TCGv_i32 t1, t2, t3; 2997 2998 if (ctx->le_mode) { 2999 gen_align_no_le(ctx); 3000 return; 3001 } 3002 gen_set_access_type(ctx, ACCESS_INT); 3003 t0 = tcg_temp_new(); 3004 gen_addr_reg_index(ctx, t0); 3005 t1 = tcg_const_i32(rD(ctx->opcode)); 3006 t2 = tcg_const_i32(rA(ctx->opcode)); 3007 t3 = tcg_const_i32(rB(ctx->opcode)); 3008 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 3009 tcg_temp_free(t0); 3010 tcg_temp_free_i32(t1); 3011 tcg_temp_free_i32(t2); 3012 tcg_temp_free_i32(t3); 3013 } 3014 3015 /* stswi */ 3016 static void gen_stswi(DisasContext *ctx) 3017 { 3018 TCGv t0; 3019 TCGv_i32 t1, t2; 3020 int nb = NB(ctx->opcode); 3021 3022 if (ctx->le_mode) { 3023 gen_align_no_le(ctx); 3024 return; 3025 } 3026 gen_set_access_type(ctx, ACCESS_INT); 3027 t0 = tcg_temp_new(); 3028 gen_addr_register(ctx, t0); 3029 if (nb == 0) 3030 nb = 32; 3031 t1 = tcg_const_i32(nb); 3032 t2 = tcg_const_i32(rS(ctx->opcode)); 3033 gen_helper_stsw(cpu_env, t0, t1, t2); 3034 tcg_temp_free(t0); 3035 tcg_temp_free_i32(t1); 3036 tcg_temp_free_i32(t2); 3037 } 3038 3039 /* stswx */ 3040 static void gen_stswx(DisasContext *ctx) 3041 { 3042 TCGv t0; 3043 TCGv_i32 t1, t2; 3044 3045 if (ctx->le_mode) { 3046 gen_align_no_le(ctx); 3047 return; 3048 } 3049 gen_set_access_type(ctx, ACCESS_INT); 3050 t0 = tcg_temp_new(); 3051 gen_addr_reg_index(ctx, t0); 3052 t1 = tcg_temp_new_i32(); 3053 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3054 tcg_gen_andi_i32(t1, t1, 0x7F); 3055 t2 = tcg_const_i32(rS(ctx->opcode)); 3056 gen_helper_stsw(cpu_env, t0, t1, t2); 3057 tcg_temp_free(t0); 3058 tcg_temp_free_i32(t1); 3059 tcg_temp_free_i32(t2); 3060 } 3061 3062 /*** Memory synchronisation ***/ 3063 /* eieio */ 3064 static void gen_eieio(DisasContext *ctx) 3065 { 3066 TCGBar bar = TCG_MO_LD_ST; 3067 3068 /* 3069 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3070 * tell the CPU it is a store-forwarding barrier. 3071 */ 3072 if (ctx->opcode & 0x2000000) { 3073 /* 3074 * ISA says that "Reserved fields in instructions are ignored 3075 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3076 * as this is not an instruction software should be using, 3077 * complain to the user. 3078 */ 3079 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3080 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3081 TARGET_FMT_lx "\n", ctx->base.pc_next - 4); 3082 } else { 3083 bar = TCG_MO_ST_LD; 3084 } 3085 } 3086 3087 tcg_gen_mb(bar | TCG_BAR_SC); 3088 } 3089 3090 #if !defined(CONFIG_USER_ONLY) 3091 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3092 { 3093 TCGv_i32 t; 3094 TCGLabel *l; 3095 3096 if (!ctx->lazy_tlb_flush) { 3097 return; 3098 } 3099 l = gen_new_label(); 3100 t = tcg_temp_new_i32(); 3101 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3102 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3103 if (global) { 3104 gen_helper_check_tlb_flush_global(cpu_env); 3105 } else { 3106 gen_helper_check_tlb_flush_local(cpu_env); 3107 } 3108 gen_set_label(l); 3109 tcg_temp_free_i32(t); 3110 } 3111 #else 3112 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3113 #endif 3114 3115 /* isync */ 3116 static void gen_isync(DisasContext *ctx) 3117 { 3118 /* 3119 * We need to check for a pending TLB flush. This can only happen in 3120 * kernel mode however so check MSR_PR 3121 */ 3122 if (!ctx->pr) { 3123 gen_check_tlb_flush(ctx, false); 3124 } 3125 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3126 gen_stop_exception(ctx); 3127 } 3128 3129 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3130 3131 static void gen_load_locked(DisasContext *ctx, TCGMemOp memop) 3132 { 3133 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3134 TCGv t0 = tcg_temp_new(); 3135 3136 gen_set_access_type(ctx, ACCESS_RES); 3137 gen_addr_reg_index(ctx, t0); 3138 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3139 tcg_gen_mov_tl(cpu_reserve, t0); 3140 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3141 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3142 tcg_temp_free(t0); 3143 } 3144 3145 #define LARX(name, memop) \ 3146 static void gen_##name(DisasContext *ctx) \ 3147 { \ 3148 gen_load_locked(ctx, memop); \ 3149 } 3150 3151 /* lwarx */ 3152 LARX(lbarx, DEF_MEMOP(MO_UB)) 3153 LARX(lharx, DEF_MEMOP(MO_UW)) 3154 LARX(lwarx, DEF_MEMOP(MO_UL)) 3155 3156 static void gen_fetch_inc_conditional(DisasContext *ctx, TCGMemOp memop, 3157 TCGv EA, TCGCond cond, int addend) 3158 { 3159 TCGv t = tcg_temp_new(); 3160 TCGv t2 = tcg_temp_new(); 3161 TCGv u = tcg_temp_new(); 3162 3163 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3164 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3165 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3166 tcg_gen_addi_tl(u, t, addend); 3167 3168 /* E.g. for fetch and increment bounded... */ 3169 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3170 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3171 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3172 3173 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3174 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3175 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3176 3177 tcg_temp_free(t); 3178 tcg_temp_free(t2); 3179 tcg_temp_free(u); 3180 } 3181 3182 static void gen_ld_atomic(DisasContext *ctx, TCGMemOp memop) 3183 { 3184 uint32_t gpr_FC = FC(ctx->opcode); 3185 TCGv EA = tcg_temp_new(); 3186 int rt = rD(ctx->opcode); 3187 bool need_serial; 3188 TCGv src, dst; 3189 3190 gen_addr_register(ctx, EA); 3191 dst = cpu_gpr[rt]; 3192 src = cpu_gpr[(rt + 1) & 31]; 3193 3194 need_serial = false; 3195 memop |= MO_ALIGN; 3196 switch (gpr_FC) { 3197 case 0: /* Fetch and add */ 3198 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3199 break; 3200 case 1: /* Fetch and xor */ 3201 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3202 break; 3203 case 2: /* Fetch and or */ 3204 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3205 break; 3206 case 3: /* Fetch and 'and' */ 3207 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3208 break; 3209 case 4: /* Fetch and max unsigned */ 3210 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3211 break; 3212 case 5: /* Fetch and max signed */ 3213 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3214 break; 3215 case 6: /* Fetch and min unsigned */ 3216 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3217 break; 3218 case 7: /* Fetch and min signed */ 3219 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3220 break; 3221 case 8: /* Swap */ 3222 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3223 break; 3224 3225 case 16: /* Compare and swap not equal */ 3226 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3227 need_serial = true; 3228 } else { 3229 TCGv t0 = tcg_temp_new(); 3230 TCGv t1 = tcg_temp_new(); 3231 3232 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3233 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3234 tcg_gen_mov_tl(t1, src); 3235 } else { 3236 tcg_gen_ext32u_tl(t1, src); 3237 } 3238 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3239 cpu_gpr[(rt + 2) & 31], t0); 3240 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3241 tcg_gen_mov_tl(dst, t0); 3242 3243 tcg_temp_free(t0); 3244 tcg_temp_free(t1); 3245 } 3246 break; 3247 3248 case 24: /* Fetch and increment bounded */ 3249 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3250 need_serial = true; 3251 } else { 3252 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3253 } 3254 break; 3255 case 25: /* Fetch and increment equal */ 3256 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3257 need_serial = true; 3258 } else { 3259 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3260 } 3261 break; 3262 case 28: /* Fetch and decrement bounded */ 3263 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3264 need_serial = true; 3265 } else { 3266 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3267 } 3268 break; 3269 3270 default: 3271 /* invoke data storage error handler */ 3272 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3273 } 3274 tcg_temp_free(EA); 3275 3276 if (need_serial) { 3277 /* Restart with exclusive lock. */ 3278 gen_helper_exit_atomic(cpu_env); 3279 ctx->base.is_jmp = DISAS_NORETURN; 3280 } 3281 } 3282 3283 static void gen_lwat(DisasContext *ctx) 3284 { 3285 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3286 } 3287 3288 #ifdef TARGET_PPC64 3289 static void gen_ldat(DisasContext *ctx) 3290 { 3291 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3292 } 3293 #endif 3294 3295 static void gen_st_atomic(DisasContext *ctx, TCGMemOp memop) 3296 { 3297 uint32_t gpr_FC = FC(ctx->opcode); 3298 TCGv EA = tcg_temp_new(); 3299 TCGv src, discard; 3300 3301 gen_addr_register(ctx, EA); 3302 src = cpu_gpr[rD(ctx->opcode)]; 3303 discard = tcg_temp_new(); 3304 3305 memop |= MO_ALIGN; 3306 switch (gpr_FC) { 3307 case 0: /* add and Store */ 3308 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3309 break; 3310 case 1: /* xor and Store */ 3311 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3312 break; 3313 case 2: /* Or and Store */ 3314 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3315 break; 3316 case 3: /* 'and' and Store */ 3317 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3318 break; 3319 case 4: /* Store max unsigned */ 3320 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3321 break; 3322 case 5: /* Store max signed */ 3323 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3324 break; 3325 case 6: /* Store min unsigned */ 3326 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3327 break; 3328 case 7: /* Store min signed */ 3329 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3330 break; 3331 case 24: /* Store twin */ 3332 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3333 /* Restart with exclusive lock. */ 3334 gen_helper_exit_atomic(cpu_env); 3335 ctx->base.is_jmp = DISAS_NORETURN; 3336 } else { 3337 TCGv t = tcg_temp_new(); 3338 TCGv t2 = tcg_temp_new(); 3339 TCGv s = tcg_temp_new(); 3340 TCGv s2 = tcg_temp_new(); 3341 TCGv ea_plus_s = tcg_temp_new(); 3342 3343 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3344 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3345 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3346 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3347 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3348 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3349 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3350 3351 tcg_temp_free(ea_plus_s); 3352 tcg_temp_free(s2); 3353 tcg_temp_free(s); 3354 tcg_temp_free(t2); 3355 tcg_temp_free(t); 3356 } 3357 break; 3358 default: 3359 /* invoke data storage error handler */ 3360 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3361 } 3362 tcg_temp_free(discard); 3363 tcg_temp_free(EA); 3364 } 3365 3366 static void gen_stwat(DisasContext *ctx) 3367 { 3368 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3369 } 3370 3371 #ifdef TARGET_PPC64 3372 static void gen_stdat(DisasContext *ctx) 3373 { 3374 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3375 } 3376 #endif 3377 3378 static void gen_conditional_store(DisasContext *ctx, TCGMemOp memop) 3379 { 3380 TCGLabel *l1 = gen_new_label(); 3381 TCGLabel *l2 = gen_new_label(); 3382 TCGv t0 = tcg_temp_new(); 3383 int reg = rS(ctx->opcode); 3384 3385 gen_set_access_type(ctx, ACCESS_RES); 3386 gen_addr_reg_index(ctx, t0); 3387 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 3388 tcg_temp_free(t0); 3389 3390 t0 = tcg_temp_new(); 3391 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3392 cpu_gpr[reg], ctx->mem_idx, 3393 DEF_MEMOP(memop) | MO_ALIGN); 3394 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3395 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3396 tcg_gen_or_tl(t0, t0, cpu_so); 3397 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3398 tcg_temp_free(t0); 3399 tcg_gen_br(l2); 3400 3401 gen_set_label(l1); 3402 3403 /* Address mismatch implies failure. But we still need to provide the 3404 memory barrier semantics of the instruction. */ 3405 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3406 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3407 3408 gen_set_label(l2); 3409 tcg_gen_movi_tl(cpu_reserve, -1); 3410 } 3411 3412 #define STCX(name, memop) \ 3413 static void gen_##name(DisasContext *ctx) \ 3414 { \ 3415 gen_conditional_store(ctx, memop); \ 3416 } 3417 3418 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3419 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3420 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3421 3422 #if defined(TARGET_PPC64) 3423 /* ldarx */ 3424 LARX(ldarx, DEF_MEMOP(MO_Q)) 3425 /* stdcx. */ 3426 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3427 3428 /* lqarx */ 3429 static void gen_lqarx(DisasContext *ctx) 3430 { 3431 int rd = rD(ctx->opcode); 3432 TCGv EA, hi, lo; 3433 3434 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3435 (rd == rB(ctx->opcode)))) { 3436 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3437 return; 3438 } 3439 3440 gen_set_access_type(ctx, ACCESS_RES); 3441 EA = tcg_temp_new(); 3442 gen_addr_reg_index(ctx, EA); 3443 3444 /* Note that the low part is always in RD+1, even in LE mode. */ 3445 lo = cpu_gpr[rd + 1]; 3446 hi = cpu_gpr[rd]; 3447 3448 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3449 if (HAVE_ATOMIC128) { 3450 TCGv_i32 oi = tcg_temp_new_i32(); 3451 if (ctx->le_mode) { 3452 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16, 3453 ctx->mem_idx)); 3454 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3455 } else { 3456 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16, 3457 ctx->mem_idx)); 3458 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3459 } 3460 tcg_temp_free_i32(oi); 3461 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3462 } else { 3463 /* Restart with exclusive lock. */ 3464 gen_helper_exit_atomic(cpu_env); 3465 ctx->base.is_jmp = DISAS_NORETURN; 3466 tcg_temp_free(EA); 3467 return; 3468 } 3469 } else if (ctx->le_mode) { 3470 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 3471 tcg_gen_mov_tl(cpu_reserve, EA); 3472 gen_addr_add(ctx, EA, EA, 8); 3473 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3474 } else { 3475 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 3476 tcg_gen_mov_tl(cpu_reserve, EA); 3477 gen_addr_add(ctx, EA, EA, 8); 3478 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3479 } 3480 tcg_temp_free(EA); 3481 3482 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 3483 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3484 } 3485 3486 /* stqcx. */ 3487 static void gen_stqcx_(DisasContext *ctx) 3488 { 3489 int rs = rS(ctx->opcode); 3490 TCGv EA, hi, lo; 3491 3492 if (unlikely(rs & 1)) { 3493 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3494 return; 3495 } 3496 3497 gen_set_access_type(ctx, ACCESS_RES); 3498 EA = tcg_temp_new(); 3499 gen_addr_reg_index(ctx, EA); 3500 3501 /* Note that the low part is always in RS+1, even in LE mode. */ 3502 lo = cpu_gpr[rs + 1]; 3503 hi = cpu_gpr[rs]; 3504 3505 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3506 if (HAVE_CMPXCHG128) { 3507 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16); 3508 if (ctx->le_mode) { 3509 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 3510 EA, lo, hi, oi); 3511 } else { 3512 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 3513 EA, lo, hi, oi); 3514 } 3515 tcg_temp_free_i32(oi); 3516 } else { 3517 /* Restart with exclusive lock. */ 3518 gen_helper_exit_atomic(cpu_env); 3519 ctx->base.is_jmp = DISAS_NORETURN; 3520 } 3521 tcg_temp_free(EA); 3522 } else { 3523 TCGLabel *lab_fail = gen_new_label(); 3524 TCGLabel *lab_over = gen_new_label(); 3525 TCGv_i64 t0 = tcg_temp_new_i64(); 3526 TCGv_i64 t1 = tcg_temp_new_i64(); 3527 3528 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 3529 tcg_temp_free(EA); 3530 3531 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 3532 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3533 ? offsetof(CPUPPCState, reserve_val2) 3534 : offsetof(CPUPPCState, reserve_val))); 3535 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3536 3537 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3538 gen_qemu_ld64_i64(ctx, t0, t0); 3539 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3540 ? offsetof(CPUPPCState, reserve_val) 3541 : offsetof(CPUPPCState, reserve_val2))); 3542 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3543 3544 /* Success */ 3545 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 3546 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3547 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 3548 3549 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3550 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 3551 tcg_gen_br(lab_over); 3552 3553 gen_set_label(lab_fail); 3554 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3555 3556 gen_set_label(lab_over); 3557 tcg_gen_movi_tl(cpu_reserve, -1); 3558 tcg_temp_free_i64(t0); 3559 tcg_temp_free_i64(t1); 3560 } 3561 } 3562 #endif /* defined(TARGET_PPC64) */ 3563 3564 /* sync */ 3565 static void gen_sync(DisasContext *ctx) 3566 { 3567 uint32_t l = (ctx->opcode >> 21) & 3; 3568 3569 /* 3570 * We may need to check for a pending TLB flush. 3571 * 3572 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3573 * 3574 * Additionally, this can only happen in kernel mode however so 3575 * check MSR_PR as well. 3576 */ 3577 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3578 gen_check_tlb_flush(ctx, true); 3579 } 3580 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3581 } 3582 3583 /* wait */ 3584 static void gen_wait(DisasContext *ctx) 3585 { 3586 TCGv_i32 t0 = tcg_const_i32(1); 3587 tcg_gen_st_i32(t0, cpu_env, 3588 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3589 tcg_temp_free_i32(t0); 3590 /* Stop translation, as the CPU is supposed to sleep from now */ 3591 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3592 } 3593 3594 #if defined(TARGET_PPC64) 3595 static void gen_doze(DisasContext *ctx) 3596 { 3597 #if defined(CONFIG_USER_ONLY) 3598 GEN_PRIV; 3599 #else 3600 TCGv_i32 t; 3601 3602 CHK_HV; 3603 t = tcg_const_i32(PPC_PM_DOZE); 3604 gen_helper_pminsn(cpu_env, t); 3605 tcg_temp_free_i32(t); 3606 gen_stop_exception(ctx); 3607 #endif /* defined(CONFIG_USER_ONLY) */ 3608 } 3609 3610 static void gen_nap(DisasContext *ctx) 3611 { 3612 #if defined(CONFIG_USER_ONLY) 3613 GEN_PRIV; 3614 #else 3615 TCGv_i32 t; 3616 3617 CHK_HV; 3618 t = tcg_const_i32(PPC_PM_NAP); 3619 gen_helper_pminsn(cpu_env, t); 3620 tcg_temp_free_i32(t); 3621 gen_stop_exception(ctx); 3622 #endif /* defined(CONFIG_USER_ONLY) */ 3623 } 3624 3625 static void gen_stop(DisasContext *ctx) 3626 { 3627 gen_nap(ctx); 3628 } 3629 3630 static void gen_sleep(DisasContext *ctx) 3631 { 3632 #if defined(CONFIG_USER_ONLY) 3633 GEN_PRIV; 3634 #else 3635 TCGv_i32 t; 3636 3637 CHK_HV; 3638 t = tcg_const_i32(PPC_PM_SLEEP); 3639 gen_helper_pminsn(cpu_env, t); 3640 tcg_temp_free_i32(t); 3641 gen_stop_exception(ctx); 3642 #endif /* defined(CONFIG_USER_ONLY) */ 3643 } 3644 3645 static void gen_rvwinkle(DisasContext *ctx) 3646 { 3647 #if defined(CONFIG_USER_ONLY) 3648 GEN_PRIV; 3649 #else 3650 TCGv_i32 t; 3651 3652 CHK_HV; 3653 t = tcg_const_i32(PPC_PM_RVWINKLE); 3654 gen_helper_pminsn(cpu_env, t); 3655 tcg_temp_free_i32(t); 3656 gen_stop_exception(ctx); 3657 #endif /* defined(CONFIG_USER_ONLY) */ 3658 } 3659 #endif /* #if defined(TARGET_PPC64) */ 3660 3661 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3662 { 3663 #if defined(TARGET_PPC64) 3664 if (ctx->has_cfar) 3665 tcg_gen_movi_tl(cpu_cfar, nip); 3666 #endif 3667 } 3668 3669 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3670 { 3671 if (unlikely(ctx->singlestep_enabled)) { 3672 return false; 3673 } 3674 3675 #ifndef CONFIG_USER_ONLY 3676 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3677 #else 3678 return true; 3679 #endif 3680 } 3681 3682 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 3683 { 3684 int sse = ctx->singlestep_enabled; 3685 if (unlikely(sse)) { 3686 if (sse & GDBSTUB_SINGLE_STEP) { 3687 gen_debug_exception(ctx); 3688 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) { 3689 uint32_t excp = gen_prep_dbgex(ctx, POWERPC_EXCP_BRANCH); 3690 if (excp != POWERPC_EXCP_NONE) { 3691 gen_exception(ctx, excp); 3692 } 3693 } 3694 tcg_gen_exit_tb(NULL, 0); 3695 } else { 3696 tcg_gen_lookup_and_goto_ptr(); 3697 } 3698 } 3699 3700 /*** Branch ***/ 3701 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3702 { 3703 if (NARROW_MODE(ctx)) { 3704 dest = (uint32_t) dest; 3705 } 3706 if (use_goto_tb(ctx, dest)) { 3707 tcg_gen_goto_tb(n); 3708 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3709 tcg_gen_exit_tb(ctx->base.tb, n); 3710 } else { 3711 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3712 gen_lookup_and_goto_ptr(ctx); 3713 } 3714 } 3715 3716 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3717 { 3718 if (NARROW_MODE(ctx)) { 3719 nip = (uint32_t)nip; 3720 } 3721 tcg_gen_movi_tl(cpu_lr, nip); 3722 } 3723 3724 /* b ba bl bla */ 3725 static void gen_b(DisasContext *ctx) 3726 { 3727 target_ulong li, target; 3728 3729 ctx->exception = POWERPC_EXCP_BRANCH; 3730 /* sign extend LI */ 3731 li = LI(ctx->opcode); 3732 li = (li ^ 0x02000000) - 0x02000000; 3733 if (likely(AA(ctx->opcode) == 0)) { 3734 target = ctx->base.pc_next + li - 4; 3735 } else { 3736 target = li; 3737 } 3738 if (LK(ctx->opcode)) { 3739 gen_setlr(ctx, ctx->base.pc_next); 3740 } 3741 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3742 gen_goto_tb(ctx, 0, target); 3743 } 3744 3745 #define BCOND_IM 0 3746 #define BCOND_LR 1 3747 #define BCOND_CTR 2 3748 #define BCOND_TAR 3 3749 3750 static void gen_bcond(DisasContext *ctx, int type) 3751 { 3752 uint32_t bo = BO(ctx->opcode); 3753 TCGLabel *l1; 3754 TCGv target; 3755 ctx->exception = POWERPC_EXCP_BRANCH; 3756 3757 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3758 target = tcg_temp_local_new(); 3759 if (type == BCOND_CTR) 3760 tcg_gen_mov_tl(target, cpu_ctr); 3761 else if (type == BCOND_TAR) 3762 gen_load_spr(target, SPR_TAR); 3763 else 3764 tcg_gen_mov_tl(target, cpu_lr); 3765 } else { 3766 target = NULL; 3767 } 3768 if (LK(ctx->opcode)) 3769 gen_setlr(ctx, ctx->base.pc_next); 3770 l1 = gen_new_label(); 3771 if ((bo & 0x4) == 0) { 3772 /* Decrement and test CTR */ 3773 TCGv temp = tcg_temp_new(); 3774 if (unlikely(type == BCOND_CTR)) { 3775 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3776 return; 3777 } 3778 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3779 if (NARROW_MODE(ctx)) { 3780 tcg_gen_ext32u_tl(temp, cpu_ctr); 3781 } else { 3782 tcg_gen_mov_tl(temp, cpu_ctr); 3783 } 3784 if (bo & 0x2) { 3785 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3786 } else { 3787 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3788 } 3789 tcg_temp_free(temp); 3790 } 3791 if ((bo & 0x10) == 0) { 3792 /* Test CR */ 3793 uint32_t bi = BI(ctx->opcode); 3794 uint32_t mask = 0x08 >> (bi & 0x03); 3795 TCGv_i32 temp = tcg_temp_new_i32(); 3796 3797 if (bo & 0x8) { 3798 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3799 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3800 } else { 3801 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3802 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3803 } 3804 tcg_temp_free_i32(temp); 3805 } 3806 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3807 if (type == BCOND_IM) { 3808 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3809 if (likely(AA(ctx->opcode) == 0)) { 3810 gen_goto_tb(ctx, 0, ctx->base.pc_next + li - 4); 3811 } else { 3812 gen_goto_tb(ctx, 0, li); 3813 } 3814 } else { 3815 if (NARROW_MODE(ctx)) { 3816 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3817 } else { 3818 tcg_gen_andi_tl(cpu_nip, target, ~3); 3819 } 3820 gen_lookup_and_goto_ptr(ctx); 3821 tcg_temp_free(target); 3822 } 3823 if ((bo & 0x14) != 0x14) { 3824 /* fallthrough case */ 3825 gen_set_label(l1); 3826 gen_goto_tb(ctx, 1, ctx->base.pc_next); 3827 } 3828 } 3829 3830 static void gen_bc(DisasContext *ctx) 3831 { 3832 gen_bcond(ctx, BCOND_IM); 3833 } 3834 3835 static void gen_bcctr(DisasContext *ctx) 3836 { 3837 gen_bcond(ctx, BCOND_CTR); 3838 } 3839 3840 static void gen_bclr(DisasContext *ctx) 3841 { 3842 gen_bcond(ctx, BCOND_LR); 3843 } 3844 3845 static void gen_bctar(DisasContext *ctx) 3846 { 3847 gen_bcond(ctx, BCOND_TAR); 3848 } 3849 3850 /*** Condition register logical ***/ 3851 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3852 static void glue(gen_, name)(DisasContext *ctx) \ 3853 { \ 3854 uint8_t bitmask; \ 3855 int sh; \ 3856 TCGv_i32 t0, t1; \ 3857 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3858 t0 = tcg_temp_new_i32(); \ 3859 if (sh > 0) \ 3860 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3861 else if (sh < 0) \ 3862 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3863 else \ 3864 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3865 t1 = tcg_temp_new_i32(); \ 3866 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3867 if (sh > 0) \ 3868 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3869 else if (sh < 0) \ 3870 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3871 else \ 3872 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3873 tcg_op(t0, t0, t1); \ 3874 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3875 tcg_gen_andi_i32(t0, t0, bitmask); \ 3876 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3877 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3878 tcg_temp_free_i32(t0); \ 3879 tcg_temp_free_i32(t1); \ 3880 } 3881 3882 /* crand */ 3883 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3884 /* crandc */ 3885 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3886 /* creqv */ 3887 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3888 /* crnand */ 3889 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3890 /* crnor */ 3891 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3892 /* cror */ 3893 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3894 /* crorc */ 3895 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3896 /* crxor */ 3897 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3898 3899 /* mcrf */ 3900 static void gen_mcrf(DisasContext *ctx) 3901 { 3902 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3903 } 3904 3905 /*** System linkage ***/ 3906 3907 /* rfi (supervisor only) */ 3908 static void gen_rfi(DisasContext *ctx) 3909 { 3910 #if defined(CONFIG_USER_ONLY) 3911 GEN_PRIV; 3912 #else 3913 /* This instruction doesn't exist anymore on 64-bit server 3914 * processors compliant with arch 2.x 3915 */ 3916 if (ctx->insns_flags & PPC_SEGMENT_64B) { 3917 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3918 return; 3919 } 3920 /* Restore CPU state */ 3921 CHK_SV; 3922 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3923 gen_io_start(); 3924 } 3925 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3926 gen_helper_rfi(cpu_env); 3927 gen_sync_exception(ctx); 3928 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3929 gen_io_end(); 3930 } 3931 #endif 3932 } 3933 3934 #if defined(TARGET_PPC64) 3935 static void gen_rfid(DisasContext *ctx) 3936 { 3937 #if defined(CONFIG_USER_ONLY) 3938 GEN_PRIV; 3939 #else 3940 /* Restore CPU state */ 3941 CHK_SV; 3942 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3943 gen_io_start(); 3944 } 3945 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3946 gen_helper_rfid(cpu_env); 3947 gen_sync_exception(ctx); 3948 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 3949 gen_io_end(); 3950 } 3951 #endif 3952 } 3953 3954 static void gen_hrfid(DisasContext *ctx) 3955 { 3956 #if defined(CONFIG_USER_ONLY) 3957 GEN_PRIV; 3958 #else 3959 /* Restore CPU state */ 3960 CHK_HV; 3961 gen_helper_hrfid(cpu_env); 3962 gen_sync_exception(ctx); 3963 #endif 3964 } 3965 #endif 3966 3967 /* sc */ 3968 #if defined(CONFIG_USER_ONLY) 3969 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3970 #else 3971 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3972 #endif 3973 static void gen_sc(DisasContext *ctx) 3974 { 3975 uint32_t lev; 3976 3977 lev = (ctx->opcode >> 5) & 0x7F; 3978 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3979 } 3980 3981 /*** Trap ***/ 3982 3983 /* Check for unconditional traps (always or never) */ 3984 static bool check_unconditional_trap(DisasContext *ctx) 3985 { 3986 /* Trap never */ 3987 if (TO(ctx->opcode) == 0) { 3988 return true; 3989 } 3990 /* Trap always */ 3991 if (TO(ctx->opcode) == 31) { 3992 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 3993 return true; 3994 } 3995 return false; 3996 } 3997 3998 /* tw */ 3999 static void gen_tw(DisasContext *ctx) 4000 { 4001 TCGv_i32 t0; 4002 4003 if (check_unconditional_trap(ctx)) { 4004 return; 4005 } 4006 t0 = tcg_const_i32(TO(ctx->opcode)); 4007 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4008 t0); 4009 tcg_temp_free_i32(t0); 4010 } 4011 4012 /* twi */ 4013 static void gen_twi(DisasContext *ctx) 4014 { 4015 TCGv t0; 4016 TCGv_i32 t1; 4017 4018 if (check_unconditional_trap(ctx)) { 4019 return; 4020 } 4021 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4022 t1 = tcg_const_i32(TO(ctx->opcode)); 4023 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4024 tcg_temp_free(t0); 4025 tcg_temp_free_i32(t1); 4026 } 4027 4028 #if defined(TARGET_PPC64) 4029 /* td */ 4030 static void gen_td(DisasContext *ctx) 4031 { 4032 TCGv_i32 t0; 4033 4034 if (check_unconditional_trap(ctx)) { 4035 return; 4036 } 4037 t0 = tcg_const_i32(TO(ctx->opcode)); 4038 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4039 t0); 4040 tcg_temp_free_i32(t0); 4041 } 4042 4043 /* tdi */ 4044 static void gen_tdi(DisasContext *ctx) 4045 { 4046 TCGv t0; 4047 TCGv_i32 t1; 4048 4049 if (check_unconditional_trap(ctx)) { 4050 return; 4051 } 4052 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4053 t1 = tcg_const_i32(TO(ctx->opcode)); 4054 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4055 tcg_temp_free(t0); 4056 tcg_temp_free_i32(t1); 4057 } 4058 #endif 4059 4060 /*** Processor control ***/ 4061 4062 static void gen_read_xer(DisasContext *ctx, TCGv dst) 4063 { 4064 TCGv t0 = tcg_temp_new(); 4065 TCGv t1 = tcg_temp_new(); 4066 TCGv t2 = tcg_temp_new(); 4067 tcg_gen_mov_tl(dst, cpu_xer); 4068 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 4069 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 4070 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 4071 tcg_gen_or_tl(t0, t0, t1); 4072 tcg_gen_or_tl(dst, dst, t2); 4073 tcg_gen_or_tl(dst, dst, t0); 4074 if (is_isa300(ctx)) { 4075 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 4076 tcg_gen_or_tl(dst, dst, t0); 4077 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 4078 tcg_gen_or_tl(dst, dst, t0); 4079 } 4080 tcg_temp_free(t0); 4081 tcg_temp_free(t1); 4082 tcg_temp_free(t2); 4083 } 4084 4085 static void gen_write_xer(TCGv src) 4086 { 4087 /* Write all flags, while reading back check for isa300 */ 4088 tcg_gen_andi_tl(cpu_xer, src, 4089 ~((1u << XER_SO) | 4090 (1u << XER_OV) | (1u << XER_OV32) | 4091 (1u << XER_CA) | (1u << XER_CA32))); 4092 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 4093 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 4094 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 4095 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 4096 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 4097 } 4098 4099 /* mcrxr */ 4100 static void gen_mcrxr(DisasContext *ctx) 4101 { 4102 TCGv_i32 t0 = tcg_temp_new_i32(); 4103 TCGv_i32 t1 = tcg_temp_new_i32(); 4104 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4105 4106 tcg_gen_trunc_tl_i32(t0, cpu_so); 4107 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4108 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4109 tcg_gen_shli_i32(t0, t0, 3); 4110 tcg_gen_shli_i32(t1, t1, 2); 4111 tcg_gen_shli_i32(dst, dst, 1); 4112 tcg_gen_or_i32(dst, dst, t0); 4113 tcg_gen_or_i32(dst, dst, t1); 4114 tcg_temp_free_i32(t0); 4115 tcg_temp_free_i32(t1); 4116 4117 tcg_gen_movi_tl(cpu_so, 0); 4118 tcg_gen_movi_tl(cpu_ov, 0); 4119 tcg_gen_movi_tl(cpu_ca, 0); 4120 } 4121 4122 #ifdef TARGET_PPC64 4123 /* mcrxrx */ 4124 static void gen_mcrxrx(DisasContext *ctx) 4125 { 4126 TCGv t0 = tcg_temp_new(); 4127 TCGv t1 = tcg_temp_new(); 4128 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4129 4130 /* copy OV and OV32 */ 4131 tcg_gen_shli_tl(t0, cpu_ov, 1); 4132 tcg_gen_or_tl(t0, t0, cpu_ov32); 4133 tcg_gen_shli_tl(t0, t0, 2); 4134 /* copy CA and CA32 */ 4135 tcg_gen_shli_tl(t1, cpu_ca, 1); 4136 tcg_gen_or_tl(t1, t1, cpu_ca32); 4137 tcg_gen_or_tl(t0, t0, t1); 4138 tcg_gen_trunc_tl_i32(dst, t0); 4139 tcg_temp_free(t0); 4140 tcg_temp_free(t1); 4141 } 4142 #endif 4143 4144 /* mfcr mfocrf */ 4145 static void gen_mfcr(DisasContext *ctx) 4146 { 4147 uint32_t crm, crn; 4148 4149 if (likely(ctx->opcode & 0x00100000)) { 4150 crm = CRM(ctx->opcode); 4151 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4152 crn = ctz32 (crm); 4153 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4154 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4155 cpu_gpr[rD(ctx->opcode)], crn * 4); 4156 } 4157 } else { 4158 TCGv_i32 t0 = tcg_temp_new_i32(); 4159 tcg_gen_mov_i32(t0, cpu_crf[0]); 4160 tcg_gen_shli_i32(t0, t0, 4); 4161 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4162 tcg_gen_shli_i32(t0, t0, 4); 4163 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4164 tcg_gen_shli_i32(t0, t0, 4); 4165 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4166 tcg_gen_shli_i32(t0, t0, 4); 4167 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4168 tcg_gen_shli_i32(t0, t0, 4); 4169 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4170 tcg_gen_shli_i32(t0, t0, 4); 4171 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4172 tcg_gen_shli_i32(t0, t0, 4); 4173 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4174 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4175 tcg_temp_free_i32(t0); 4176 } 4177 } 4178 4179 /* mfmsr */ 4180 static void gen_mfmsr(DisasContext *ctx) 4181 { 4182 CHK_SV; 4183 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4184 } 4185 4186 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 4187 { 4188 #if 0 4189 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 4190 printf("ERROR: try to access SPR %d !\n", sprn); 4191 #endif 4192 } 4193 #define SPR_NOACCESS (&spr_noaccess) 4194 4195 /* mfspr */ 4196 static inline void gen_op_mfspr(DisasContext *ctx) 4197 { 4198 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4199 uint32_t sprn = SPR(ctx->opcode); 4200 4201 #if defined(CONFIG_USER_ONLY) 4202 read_cb = ctx->spr_cb[sprn].uea_read; 4203 #else 4204 if (ctx->pr) { 4205 read_cb = ctx->spr_cb[sprn].uea_read; 4206 } else if (ctx->hv) { 4207 read_cb = ctx->spr_cb[sprn].hea_read; 4208 } else { 4209 read_cb = ctx->spr_cb[sprn].oea_read; 4210 } 4211 #endif 4212 if (likely(read_cb != NULL)) { 4213 if (likely(read_cb != SPR_NOACCESS)) { 4214 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4215 } else { 4216 /* Privilege exception */ 4217 /* This is a hack to avoid warnings when running Linux: 4218 * this OS breaks the PowerPC virtualisation model, 4219 * allowing userland application to read the PVR 4220 */ 4221 if (sprn != SPR_PVR) { 4222 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4223 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4224 ctx->base.pc_next - 4); 4225 } 4226 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4227 } 4228 } else { 4229 /* ISA 2.07 defines these as no-ops */ 4230 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4231 (sprn >= 808 && sprn <= 811)) { 4232 /* This is a nop */ 4233 return; 4234 } 4235 /* Not defined */ 4236 qemu_log_mask(LOG_GUEST_ERROR, 4237 "Trying to read invalid spr %d (0x%03x) at " 4238 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4239 4240 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4241 * it can generate a priv, a hv emu or a no-op 4242 */ 4243 if (sprn & 0x10) { 4244 if (ctx->pr) { 4245 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4246 } 4247 } else { 4248 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4249 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4250 } 4251 } 4252 } 4253 } 4254 4255 static void gen_mfspr(DisasContext *ctx) 4256 { 4257 gen_op_mfspr(ctx); 4258 } 4259 4260 /* mftb */ 4261 static void gen_mftb(DisasContext *ctx) 4262 { 4263 gen_op_mfspr(ctx); 4264 } 4265 4266 /* mtcrf mtocrf*/ 4267 static void gen_mtcrf(DisasContext *ctx) 4268 { 4269 uint32_t crm, crn; 4270 4271 crm = CRM(ctx->opcode); 4272 if (likely((ctx->opcode & 0x00100000))) { 4273 if (crm && ((crm & (crm - 1)) == 0)) { 4274 TCGv_i32 temp = tcg_temp_new_i32(); 4275 crn = ctz32 (crm); 4276 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4277 tcg_gen_shri_i32(temp, temp, crn * 4); 4278 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4279 tcg_temp_free_i32(temp); 4280 } 4281 } else { 4282 TCGv_i32 temp = tcg_temp_new_i32(); 4283 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4284 for (crn = 0 ; crn < 8 ; crn++) { 4285 if (crm & (1 << crn)) { 4286 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4287 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4288 } 4289 } 4290 tcg_temp_free_i32(temp); 4291 } 4292 } 4293 4294 /* mtmsr */ 4295 #if defined(TARGET_PPC64) 4296 static void gen_mtmsrd(DisasContext *ctx) 4297 { 4298 CHK_SV; 4299 4300 #if !defined(CONFIG_USER_ONLY) 4301 if (ctx->opcode & 0x00010000) { 4302 /* Special form that does not need any synchronisation */ 4303 TCGv t0 = tcg_temp_new(); 4304 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4305 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4306 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4307 tcg_temp_free(t0); 4308 } else { 4309 /* XXX: we need to update nip before the store 4310 * if we enter power saving mode, we will exit the loop 4311 * directly from ppc_store_msr 4312 */ 4313 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4314 gen_io_start(); 4315 } 4316 gen_update_nip(ctx, ctx->base.pc_next); 4317 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4318 /* Must stop the translation as machine state (may have) changed */ 4319 /* Note that mtmsr is not always defined as context-synchronizing */ 4320 gen_stop_exception(ctx); 4321 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4322 gen_io_end(); 4323 } 4324 } 4325 #endif /* !defined(CONFIG_USER_ONLY) */ 4326 } 4327 #endif /* defined(TARGET_PPC64) */ 4328 4329 static void gen_mtmsr(DisasContext *ctx) 4330 { 4331 CHK_SV; 4332 4333 #if !defined(CONFIG_USER_ONLY) 4334 if (ctx->opcode & 0x00010000) { 4335 /* Special form that does not need any synchronisation */ 4336 TCGv t0 = tcg_temp_new(); 4337 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4338 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4339 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4340 tcg_temp_free(t0); 4341 } else { 4342 TCGv msr = tcg_temp_new(); 4343 4344 /* XXX: we need to update nip before the store 4345 * if we enter power saving mode, we will exit the loop 4346 * directly from ppc_store_msr 4347 */ 4348 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4349 gen_io_start(); 4350 } 4351 gen_update_nip(ctx, ctx->base.pc_next); 4352 #if defined(TARGET_PPC64) 4353 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4354 #else 4355 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4356 #endif 4357 gen_helper_store_msr(cpu_env, msr); 4358 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 4359 gen_io_end(); 4360 } 4361 tcg_temp_free(msr); 4362 /* Must stop the translation as machine state (may have) changed */ 4363 /* Note that mtmsr is not always defined as context-synchronizing */ 4364 gen_stop_exception(ctx); 4365 } 4366 #endif 4367 } 4368 4369 /* mtspr */ 4370 static void gen_mtspr(DisasContext *ctx) 4371 { 4372 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4373 uint32_t sprn = SPR(ctx->opcode); 4374 4375 #if defined(CONFIG_USER_ONLY) 4376 write_cb = ctx->spr_cb[sprn].uea_write; 4377 #else 4378 if (ctx->pr) { 4379 write_cb = ctx->spr_cb[sprn].uea_write; 4380 } else if (ctx->hv) { 4381 write_cb = ctx->spr_cb[sprn].hea_write; 4382 } else { 4383 write_cb = ctx->spr_cb[sprn].oea_write; 4384 } 4385 #endif 4386 if (likely(write_cb != NULL)) { 4387 if (likely(write_cb != SPR_NOACCESS)) { 4388 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4389 } else { 4390 /* Privilege exception */ 4391 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 4392 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4393 ctx->base.pc_next - 4); 4394 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4395 } 4396 } else { 4397 /* ISA 2.07 defines these as no-ops */ 4398 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4399 (sprn >= 808 && sprn <= 811)) { 4400 /* This is a nop */ 4401 return; 4402 } 4403 4404 /* Not defined */ 4405 qemu_log_mask(LOG_GUEST_ERROR, 4406 "Trying to write invalid spr %d (0x%03x) at " 4407 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4408 4409 4410 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4411 * it can generate a priv, a hv emu or a no-op 4412 */ 4413 if (sprn & 0x10) { 4414 if (ctx->pr) { 4415 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4416 } 4417 } else { 4418 if (ctx->pr || sprn == 0) { 4419 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4420 } 4421 } 4422 } 4423 } 4424 4425 #if defined(TARGET_PPC64) 4426 /* setb */ 4427 static void gen_setb(DisasContext *ctx) 4428 { 4429 TCGv_i32 t0 = tcg_temp_new_i32(); 4430 TCGv_i32 t8 = tcg_temp_new_i32(); 4431 TCGv_i32 tm1 = tcg_temp_new_i32(); 4432 int crf = crfS(ctx->opcode); 4433 4434 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4435 tcg_gen_movi_i32(t8, 8); 4436 tcg_gen_movi_i32(tm1, -1); 4437 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4438 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4439 4440 tcg_temp_free_i32(t0); 4441 tcg_temp_free_i32(t8); 4442 tcg_temp_free_i32(tm1); 4443 } 4444 #endif 4445 4446 /*** Cache management ***/ 4447 4448 /* dcbf */ 4449 static void gen_dcbf(DisasContext *ctx) 4450 { 4451 /* XXX: specification says this is treated as a load by the MMU */ 4452 TCGv t0; 4453 gen_set_access_type(ctx, ACCESS_CACHE); 4454 t0 = tcg_temp_new(); 4455 gen_addr_reg_index(ctx, t0); 4456 gen_qemu_ld8u(ctx, t0, t0); 4457 tcg_temp_free(t0); 4458 } 4459 4460 /* dcbfep (external PID dcbf) */ 4461 static void gen_dcbfep(DisasContext *ctx) 4462 { 4463 /* XXX: specification says this is treated as a load by the MMU */ 4464 TCGv t0; 4465 CHK_SV; 4466 gen_set_access_type(ctx, ACCESS_CACHE); 4467 t0 = tcg_temp_new(); 4468 gen_addr_reg_index(ctx, t0); 4469 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4470 tcg_temp_free(t0); 4471 } 4472 4473 /* dcbi (Supervisor only) */ 4474 static void gen_dcbi(DisasContext *ctx) 4475 { 4476 #if defined(CONFIG_USER_ONLY) 4477 GEN_PRIV; 4478 #else 4479 TCGv EA, val; 4480 4481 CHK_SV; 4482 EA = tcg_temp_new(); 4483 gen_set_access_type(ctx, ACCESS_CACHE); 4484 gen_addr_reg_index(ctx, EA); 4485 val = tcg_temp_new(); 4486 /* XXX: specification says this should be treated as a store by the MMU */ 4487 gen_qemu_ld8u(ctx, val, EA); 4488 gen_qemu_st8(ctx, val, EA); 4489 tcg_temp_free(val); 4490 tcg_temp_free(EA); 4491 #endif /* defined(CONFIG_USER_ONLY) */ 4492 } 4493 4494 /* dcdst */ 4495 static void gen_dcbst(DisasContext *ctx) 4496 { 4497 /* XXX: specification say this is treated as a load by the MMU */ 4498 TCGv t0; 4499 gen_set_access_type(ctx, ACCESS_CACHE); 4500 t0 = tcg_temp_new(); 4501 gen_addr_reg_index(ctx, t0); 4502 gen_qemu_ld8u(ctx, t0, t0); 4503 tcg_temp_free(t0); 4504 } 4505 4506 /* dcbstep (dcbstep External PID version) */ 4507 static void gen_dcbstep(DisasContext *ctx) 4508 { 4509 /* XXX: specification say this is treated as a load by the MMU */ 4510 TCGv t0; 4511 gen_set_access_type(ctx, ACCESS_CACHE); 4512 t0 = tcg_temp_new(); 4513 gen_addr_reg_index(ctx, t0); 4514 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 4515 tcg_temp_free(t0); 4516 } 4517 4518 /* dcbt */ 4519 static void gen_dcbt(DisasContext *ctx) 4520 { 4521 /* interpreted as no-op */ 4522 /* XXX: specification say this is treated as a load by the MMU 4523 * but does not generate any exception 4524 */ 4525 } 4526 4527 /* dcbtep */ 4528 static void gen_dcbtep(DisasContext *ctx) 4529 { 4530 /* interpreted as no-op */ 4531 /* XXX: specification say this is treated as a load by the MMU 4532 * but does not generate any exception 4533 */ 4534 } 4535 4536 /* dcbtst */ 4537 static void gen_dcbtst(DisasContext *ctx) 4538 { 4539 /* interpreted as no-op */ 4540 /* XXX: specification say this is treated as a load by the MMU 4541 * but does not generate any exception 4542 */ 4543 } 4544 4545 /* dcbtstep */ 4546 static void gen_dcbtstep(DisasContext *ctx) 4547 { 4548 /* interpreted as no-op */ 4549 /* XXX: specification say this is treated as a load by the MMU 4550 * but does not generate any exception 4551 */ 4552 } 4553 4554 /* dcbtls */ 4555 static void gen_dcbtls(DisasContext *ctx) 4556 { 4557 /* Always fails locking the cache */ 4558 TCGv t0 = tcg_temp_new(); 4559 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4560 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4561 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4562 tcg_temp_free(t0); 4563 } 4564 4565 /* dcbz */ 4566 static void gen_dcbz(DisasContext *ctx) 4567 { 4568 TCGv tcgv_addr; 4569 TCGv_i32 tcgv_op; 4570 4571 gen_set_access_type(ctx, ACCESS_CACHE); 4572 tcgv_addr = tcg_temp_new(); 4573 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4574 gen_addr_reg_index(ctx, tcgv_addr); 4575 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4576 tcg_temp_free(tcgv_addr); 4577 tcg_temp_free_i32(tcgv_op); 4578 } 4579 4580 /* dcbzep */ 4581 static void gen_dcbzep(DisasContext *ctx) 4582 { 4583 TCGv tcgv_addr; 4584 TCGv_i32 tcgv_op; 4585 4586 gen_set_access_type(ctx, ACCESS_CACHE); 4587 tcgv_addr = tcg_temp_new(); 4588 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4589 gen_addr_reg_index(ctx, tcgv_addr); 4590 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 4591 tcg_temp_free(tcgv_addr); 4592 tcg_temp_free_i32(tcgv_op); 4593 } 4594 4595 /* dst / dstt */ 4596 static void gen_dst(DisasContext *ctx) 4597 { 4598 if (rA(ctx->opcode) == 0) { 4599 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4600 } else { 4601 /* interpreted as no-op */ 4602 } 4603 } 4604 4605 /* dstst /dststt */ 4606 static void gen_dstst(DisasContext *ctx) 4607 { 4608 if (rA(ctx->opcode) == 0) { 4609 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4610 } else { 4611 /* interpreted as no-op */ 4612 } 4613 4614 } 4615 4616 /* dss / dssall */ 4617 static void gen_dss(DisasContext *ctx) 4618 { 4619 /* interpreted as no-op */ 4620 } 4621 4622 /* icbi */ 4623 static void gen_icbi(DisasContext *ctx) 4624 { 4625 TCGv t0; 4626 gen_set_access_type(ctx, ACCESS_CACHE); 4627 t0 = tcg_temp_new(); 4628 gen_addr_reg_index(ctx, t0); 4629 gen_helper_icbi(cpu_env, t0); 4630 tcg_temp_free(t0); 4631 } 4632 4633 /* icbiep */ 4634 static void gen_icbiep(DisasContext *ctx) 4635 { 4636 TCGv t0; 4637 gen_set_access_type(ctx, ACCESS_CACHE); 4638 t0 = tcg_temp_new(); 4639 gen_addr_reg_index(ctx, t0); 4640 gen_helper_icbiep(cpu_env, t0); 4641 tcg_temp_free(t0); 4642 } 4643 4644 /* Optional: */ 4645 /* dcba */ 4646 static void gen_dcba(DisasContext *ctx) 4647 { 4648 /* interpreted as no-op */ 4649 /* XXX: specification say this is treated as a store by the MMU 4650 * but does not generate any exception 4651 */ 4652 } 4653 4654 /*** Segment register manipulation ***/ 4655 /* Supervisor only: */ 4656 4657 /* mfsr */ 4658 static void gen_mfsr(DisasContext *ctx) 4659 { 4660 #if defined(CONFIG_USER_ONLY) 4661 GEN_PRIV; 4662 #else 4663 TCGv t0; 4664 4665 CHK_SV; 4666 t0 = tcg_const_tl(SR(ctx->opcode)); 4667 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4668 tcg_temp_free(t0); 4669 #endif /* defined(CONFIG_USER_ONLY) */ 4670 } 4671 4672 /* mfsrin */ 4673 static void gen_mfsrin(DisasContext *ctx) 4674 { 4675 #if defined(CONFIG_USER_ONLY) 4676 GEN_PRIV; 4677 #else 4678 TCGv t0; 4679 4680 CHK_SV; 4681 t0 = tcg_temp_new(); 4682 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4683 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4684 tcg_temp_free(t0); 4685 #endif /* defined(CONFIG_USER_ONLY) */ 4686 } 4687 4688 /* mtsr */ 4689 static void gen_mtsr(DisasContext *ctx) 4690 { 4691 #if defined(CONFIG_USER_ONLY) 4692 GEN_PRIV; 4693 #else 4694 TCGv t0; 4695 4696 CHK_SV; 4697 t0 = tcg_const_tl(SR(ctx->opcode)); 4698 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4699 tcg_temp_free(t0); 4700 #endif /* defined(CONFIG_USER_ONLY) */ 4701 } 4702 4703 /* mtsrin */ 4704 static void gen_mtsrin(DisasContext *ctx) 4705 { 4706 #if defined(CONFIG_USER_ONLY) 4707 GEN_PRIV; 4708 #else 4709 TCGv t0; 4710 CHK_SV; 4711 4712 t0 = tcg_temp_new(); 4713 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4714 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4715 tcg_temp_free(t0); 4716 #endif /* defined(CONFIG_USER_ONLY) */ 4717 } 4718 4719 #if defined(TARGET_PPC64) 4720 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4721 4722 /* mfsr */ 4723 static void gen_mfsr_64b(DisasContext *ctx) 4724 { 4725 #if defined(CONFIG_USER_ONLY) 4726 GEN_PRIV; 4727 #else 4728 TCGv t0; 4729 4730 CHK_SV; 4731 t0 = tcg_const_tl(SR(ctx->opcode)); 4732 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4733 tcg_temp_free(t0); 4734 #endif /* defined(CONFIG_USER_ONLY) */ 4735 } 4736 4737 /* mfsrin */ 4738 static void gen_mfsrin_64b(DisasContext *ctx) 4739 { 4740 #if defined(CONFIG_USER_ONLY) 4741 GEN_PRIV; 4742 #else 4743 TCGv t0; 4744 4745 CHK_SV; 4746 t0 = tcg_temp_new(); 4747 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4748 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4749 tcg_temp_free(t0); 4750 #endif /* defined(CONFIG_USER_ONLY) */ 4751 } 4752 4753 /* mtsr */ 4754 static void gen_mtsr_64b(DisasContext *ctx) 4755 { 4756 #if defined(CONFIG_USER_ONLY) 4757 GEN_PRIV; 4758 #else 4759 TCGv t0; 4760 4761 CHK_SV; 4762 t0 = tcg_const_tl(SR(ctx->opcode)); 4763 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4764 tcg_temp_free(t0); 4765 #endif /* defined(CONFIG_USER_ONLY) */ 4766 } 4767 4768 /* mtsrin */ 4769 static void gen_mtsrin_64b(DisasContext *ctx) 4770 { 4771 #if defined(CONFIG_USER_ONLY) 4772 GEN_PRIV; 4773 #else 4774 TCGv t0; 4775 4776 CHK_SV; 4777 t0 = tcg_temp_new(); 4778 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4779 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4780 tcg_temp_free(t0); 4781 #endif /* defined(CONFIG_USER_ONLY) */ 4782 } 4783 4784 /* slbmte */ 4785 static void gen_slbmte(DisasContext *ctx) 4786 { 4787 #if defined(CONFIG_USER_ONLY) 4788 GEN_PRIV; 4789 #else 4790 CHK_SV; 4791 4792 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4793 cpu_gpr[rS(ctx->opcode)]); 4794 #endif /* defined(CONFIG_USER_ONLY) */ 4795 } 4796 4797 static void gen_slbmfee(DisasContext *ctx) 4798 { 4799 #if defined(CONFIG_USER_ONLY) 4800 GEN_PRIV; 4801 #else 4802 CHK_SV; 4803 4804 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4805 cpu_gpr[rB(ctx->opcode)]); 4806 #endif /* defined(CONFIG_USER_ONLY) */ 4807 } 4808 4809 static void gen_slbmfev(DisasContext *ctx) 4810 { 4811 #if defined(CONFIG_USER_ONLY) 4812 GEN_PRIV; 4813 #else 4814 CHK_SV; 4815 4816 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4817 cpu_gpr[rB(ctx->opcode)]); 4818 #endif /* defined(CONFIG_USER_ONLY) */ 4819 } 4820 4821 static void gen_slbfee_(DisasContext *ctx) 4822 { 4823 #if defined(CONFIG_USER_ONLY) 4824 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4825 #else 4826 TCGLabel *l1, *l2; 4827 4828 if (unlikely(ctx->pr)) { 4829 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4830 return; 4831 } 4832 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4833 cpu_gpr[rB(ctx->opcode)]); 4834 l1 = gen_new_label(); 4835 l2 = gen_new_label(); 4836 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4837 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4838 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4839 tcg_gen_br(l2); 4840 gen_set_label(l1); 4841 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4842 gen_set_label(l2); 4843 #endif 4844 } 4845 #endif /* defined(TARGET_PPC64) */ 4846 4847 /*** Lookaside buffer management ***/ 4848 /* Optional & supervisor only: */ 4849 4850 /* tlbia */ 4851 static void gen_tlbia(DisasContext *ctx) 4852 { 4853 #if defined(CONFIG_USER_ONLY) 4854 GEN_PRIV; 4855 #else 4856 CHK_HV; 4857 4858 gen_helper_tlbia(cpu_env); 4859 #endif /* defined(CONFIG_USER_ONLY) */ 4860 } 4861 4862 /* tlbiel */ 4863 static void gen_tlbiel(DisasContext *ctx) 4864 { 4865 #if defined(CONFIG_USER_ONLY) 4866 GEN_PRIV; 4867 #else 4868 CHK_SV; 4869 4870 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4871 #endif /* defined(CONFIG_USER_ONLY) */ 4872 } 4873 4874 /* tlbie */ 4875 static void gen_tlbie(DisasContext *ctx) 4876 { 4877 #if defined(CONFIG_USER_ONLY) 4878 GEN_PRIV; 4879 #else 4880 TCGv_i32 t1; 4881 4882 if (ctx->gtse) { 4883 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */ 4884 } else { 4885 CHK_HV; /* Else hypervisor privileged */ 4886 } 4887 4888 if (NARROW_MODE(ctx)) { 4889 TCGv t0 = tcg_temp_new(); 4890 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4891 gen_helper_tlbie(cpu_env, t0); 4892 tcg_temp_free(t0); 4893 } else { 4894 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4895 } 4896 t1 = tcg_temp_new_i32(); 4897 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4898 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4899 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4900 tcg_temp_free_i32(t1); 4901 #endif /* defined(CONFIG_USER_ONLY) */ 4902 } 4903 4904 /* tlbsync */ 4905 static void gen_tlbsync(DisasContext *ctx) 4906 { 4907 #if defined(CONFIG_USER_ONLY) 4908 GEN_PRIV; 4909 #else 4910 4911 if (ctx->gtse) { 4912 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 4913 } else { 4914 CHK_HV; /* Else hypervisor privileged */ 4915 } 4916 4917 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4918 if (ctx->insns_flags & PPC_BOOKE) { 4919 gen_check_tlb_flush(ctx, true); 4920 } 4921 #endif /* defined(CONFIG_USER_ONLY) */ 4922 } 4923 4924 #if defined(TARGET_PPC64) 4925 /* slbia */ 4926 static void gen_slbia(DisasContext *ctx) 4927 { 4928 #if defined(CONFIG_USER_ONLY) 4929 GEN_PRIV; 4930 #else 4931 CHK_SV; 4932 4933 gen_helper_slbia(cpu_env); 4934 #endif /* defined(CONFIG_USER_ONLY) */ 4935 } 4936 4937 /* slbie */ 4938 static void gen_slbie(DisasContext *ctx) 4939 { 4940 #if defined(CONFIG_USER_ONLY) 4941 GEN_PRIV; 4942 #else 4943 CHK_SV; 4944 4945 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4946 #endif /* defined(CONFIG_USER_ONLY) */ 4947 } 4948 4949 /* slbieg */ 4950 static void gen_slbieg(DisasContext *ctx) 4951 { 4952 #if defined(CONFIG_USER_ONLY) 4953 GEN_PRIV; 4954 #else 4955 CHK_SV; 4956 4957 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4958 #endif /* defined(CONFIG_USER_ONLY) */ 4959 } 4960 4961 /* slbsync */ 4962 static void gen_slbsync(DisasContext *ctx) 4963 { 4964 #if defined(CONFIG_USER_ONLY) 4965 GEN_PRIV; 4966 #else 4967 CHK_SV; 4968 gen_check_tlb_flush(ctx, true); 4969 #endif /* defined(CONFIG_USER_ONLY) */ 4970 } 4971 4972 #endif /* defined(TARGET_PPC64) */ 4973 4974 /*** External control ***/ 4975 /* Optional: */ 4976 4977 /* eciwx */ 4978 static void gen_eciwx(DisasContext *ctx) 4979 { 4980 TCGv t0; 4981 /* Should check EAR[E] ! */ 4982 gen_set_access_type(ctx, ACCESS_EXT); 4983 t0 = tcg_temp_new(); 4984 gen_addr_reg_index(ctx, t0); 4985 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4986 DEF_MEMOP(MO_UL | MO_ALIGN)); 4987 tcg_temp_free(t0); 4988 } 4989 4990 /* ecowx */ 4991 static void gen_ecowx(DisasContext *ctx) 4992 { 4993 TCGv t0; 4994 /* Should check EAR[E] ! */ 4995 gen_set_access_type(ctx, ACCESS_EXT); 4996 t0 = tcg_temp_new(); 4997 gen_addr_reg_index(ctx, t0); 4998 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4999 DEF_MEMOP(MO_UL | MO_ALIGN)); 5000 tcg_temp_free(t0); 5001 } 5002 5003 /* PowerPC 601 specific instructions */ 5004 5005 /* abs - abs. */ 5006 static void gen_abs(DisasContext *ctx) 5007 { 5008 TCGLabel *l1 = gen_new_label(); 5009 TCGLabel *l2 = gen_new_label(); 5010 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 5011 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5012 tcg_gen_br(l2); 5013 gen_set_label(l1); 5014 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5015 gen_set_label(l2); 5016 if (unlikely(Rc(ctx->opcode) != 0)) 5017 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5018 } 5019 5020 /* abso - abso. */ 5021 static void gen_abso(DisasContext *ctx) 5022 { 5023 TCGLabel *l1 = gen_new_label(); 5024 TCGLabel *l2 = gen_new_label(); 5025 TCGLabel *l3 = gen_new_label(); 5026 /* Start with XER OV disabled, the most likely case */ 5027 tcg_gen_movi_tl(cpu_ov, 0); 5028 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 5029 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 5030 tcg_gen_movi_tl(cpu_ov, 1); 5031 tcg_gen_movi_tl(cpu_so, 1); 5032 tcg_gen_br(l2); 5033 gen_set_label(l1); 5034 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5035 tcg_gen_br(l3); 5036 gen_set_label(l2); 5037 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5038 gen_set_label(l3); 5039 if (unlikely(Rc(ctx->opcode) != 0)) 5040 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5041 } 5042 5043 /* clcs */ 5044 static void gen_clcs(DisasContext *ctx) 5045 { 5046 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5047 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5048 tcg_temp_free_i32(t0); 5049 /* Rc=1 sets CR0 to an undefined state */ 5050 } 5051 5052 /* div - div. */ 5053 static void gen_div(DisasContext *ctx) 5054 { 5055 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5056 cpu_gpr[rB(ctx->opcode)]); 5057 if (unlikely(Rc(ctx->opcode) != 0)) 5058 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5059 } 5060 5061 /* divo - divo. */ 5062 static void gen_divo(DisasContext *ctx) 5063 { 5064 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5065 cpu_gpr[rB(ctx->opcode)]); 5066 if (unlikely(Rc(ctx->opcode) != 0)) 5067 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5068 } 5069 5070 /* divs - divs. */ 5071 static void gen_divs(DisasContext *ctx) 5072 { 5073 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5074 cpu_gpr[rB(ctx->opcode)]); 5075 if (unlikely(Rc(ctx->opcode) != 0)) 5076 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5077 } 5078 5079 /* divso - divso. */ 5080 static void gen_divso(DisasContext *ctx) 5081 { 5082 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5083 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5084 if (unlikely(Rc(ctx->opcode) != 0)) 5085 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5086 } 5087 5088 /* doz - doz. */ 5089 static void gen_doz(DisasContext *ctx) 5090 { 5091 TCGLabel *l1 = gen_new_label(); 5092 TCGLabel *l2 = gen_new_label(); 5093 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5094 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5095 tcg_gen_br(l2); 5096 gen_set_label(l1); 5097 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5098 gen_set_label(l2); 5099 if (unlikely(Rc(ctx->opcode) != 0)) 5100 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5101 } 5102 5103 /* dozo - dozo. */ 5104 static void gen_dozo(DisasContext *ctx) 5105 { 5106 TCGLabel *l1 = gen_new_label(); 5107 TCGLabel *l2 = gen_new_label(); 5108 TCGv t0 = tcg_temp_new(); 5109 TCGv t1 = tcg_temp_new(); 5110 TCGv t2 = tcg_temp_new(); 5111 /* Start with XER OV disabled, the most likely case */ 5112 tcg_gen_movi_tl(cpu_ov, 0); 5113 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 5114 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5115 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5116 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5117 tcg_gen_andc_tl(t1, t1, t2); 5118 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5119 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5120 tcg_gen_movi_tl(cpu_ov, 1); 5121 tcg_gen_movi_tl(cpu_so, 1); 5122 tcg_gen_br(l2); 5123 gen_set_label(l1); 5124 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5125 gen_set_label(l2); 5126 tcg_temp_free(t0); 5127 tcg_temp_free(t1); 5128 tcg_temp_free(t2); 5129 if (unlikely(Rc(ctx->opcode) != 0)) 5130 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5131 } 5132 5133 /* dozi */ 5134 static void gen_dozi(DisasContext *ctx) 5135 { 5136 target_long simm = SIMM(ctx->opcode); 5137 TCGLabel *l1 = gen_new_label(); 5138 TCGLabel *l2 = gen_new_label(); 5139 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5140 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5141 tcg_gen_br(l2); 5142 gen_set_label(l1); 5143 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5144 gen_set_label(l2); 5145 if (unlikely(Rc(ctx->opcode) != 0)) 5146 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5147 } 5148 5149 /* lscbx - lscbx. */ 5150 static void gen_lscbx(DisasContext *ctx) 5151 { 5152 TCGv t0 = tcg_temp_new(); 5153 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5154 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5155 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5156 5157 gen_addr_reg_index(ctx, t0); 5158 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5159 tcg_temp_free_i32(t1); 5160 tcg_temp_free_i32(t2); 5161 tcg_temp_free_i32(t3); 5162 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5163 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5164 if (unlikely(Rc(ctx->opcode) != 0)) 5165 gen_set_Rc0(ctx, t0); 5166 tcg_temp_free(t0); 5167 } 5168 5169 /* maskg - maskg. */ 5170 static void gen_maskg(DisasContext *ctx) 5171 { 5172 TCGLabel *l1 = gen_new_label(); 5173 TCGv t0 = tcg_temp_new(); 5174 TCGv t1 = tcg_temp_new(); 5175 TCGv t2 = tcg_temp_new(); 5176 TCGv t3 = tcg_temp_new(); 5177 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5178 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5179 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5180 tcg_gen_addi_tl(t2, t0, 1); 5181 tcg_gen_shr_tl(t2, t3, t2); 5182 tcg_gen_shr_tl(t3, t3, t1); 5183 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5184 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5185 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5186 gen_set_label(l1); 5187 tcg_temp_free(t0); 5188 tcg_temp_free(t1); 5189 tcg_temp_free(t2); 5190 tcg_temp_free(t3); 5191 if (unlikely(Rc(ctx->opcode) != 0)) 5192 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5193 } 5194 5195 /* maskir - maskir. */ 5196 static void gen_maskir(DisasContext *ctx) 5197 { 5198 TCGv t0 = tcg_temp_new(); 5199 TCGv t1 = tcg_temp_new(); 5200 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5201 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5202 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5203 tcg_temp_free(t0); 5204 tcg_temp_free(t1); 5205 if (unlikely(Rc(ctx->opcode) != 0)) 5206 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5207 } 5208 5209 /* mul - mul. */ 5210 static void gen_mul(DisasContext *ctx) 5211 { 5212 TCGv_i64 t0 = tcg_temp_new_i64(); 5213 TCGv_i64 t1 = tcg_temp_new_i64(); 5214 TCGv t2 = tcg_temp_new(); 5215 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5216 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5217 tcg_gen_mul_i64(t0, t0, t1); 5218 tcg_gen_trunc_i64_tl(t2, t0); 5219 gen_store_spr(SPR_MQ, t2); 5220 tcg_gen_shri_i64(t1, t0, 32); 5221 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5222 tcg_temp_free_i64(t0); 5223 tcg_temp_free_i64(t1); 5224 tcg_temp_free(t2); 5225 if (unlikely(Rc(ctx->opcode) != 0)) 5226 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5227 } 5228 5229 /* mulo - mulo. */ 5230 static void gen_mulo(DisasContext *ctx) 5231 { 5232 TCGLabel *l1 = gen_new_label(); 5233 TCGv_i64 t0 = tcg_temp_new_i64(); 5234 TCGv_i64 t1 = tcg_temp_new_i64(); 5235 TCGv t2 = tcg_temp_new(); 5236 /* Start with XER OV disabled, the most likely case */ 5237 tcg_gen_movi_tl(cpu_ov, 0); 5238 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5239 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5240 tcg_gen_mul_i64(t0, t0, t1); 5241 tcg_gen_trunc_i64_tl(t2, t0); 5242 gen_store_spr(SPR_MQ, t2); 5243 tcg_gen_shri_i64(t1, t0, 32); 5244 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5245 tcg_gen_ext32s_i64(t1, t0); 5246 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5247 tcg_gen_movi_tl(cpu_ov, 1); 5248 tcg_gen_movi_tl(cpu_so, 1); 5249 gen_set_label(l1); 5250 tcg_temp_free_i64(t0); 5251 tcg_temp_free_i64(t1); 5252 tcg_temp_free(t2); 5253 if (unlikely(Rc(ctx->opcode) != 0)) 5254 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5255 } 5256 5257 /* nabs - nabs. */ 5258 static void gen_nabs(DisasContext *ctx) 5259 { 5260 TCGLabel *l1 = gen_new_label(); 5261 TCGLabel *l2 = gen_new_label(); 5262 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5263 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5264 tcg_gen_br(l2); 5265 gen_set_label(l1); 5266 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5267 gen_set_label(l2); 5268 if (unlikely(Rc(ctx->opcode) != 0)) 5269 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5270 } 5271 5272 /* nabso - nabso. */ 5273 static void gen_nabso(DisasContext *ctx) 5274 { 5275 TCGLabel *l1 = gen_new_label(); 5276 TCGLabel *l2 = gen_new_label(); 5277 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5278 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5279 tcg_gen_br(l2); 5280 gen_set_label(l1); 5281 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5282 gen_set_label(l2); 5283 /* nabs never overflows */ 5284 tcg_gen_movi_tl(cpu_ov, 0); 5285 if (unlikely(Rc(ctx->opcode) != 0)) 5286 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5287 } 5288 5289 /* rlmi - rlmi. */ 5290 static void gen_rlmi(DisasContext *ctx) 5291 { 5292 uint32_t mb = MB(ctx->opcode); 5293 uint32_t me = ME(ctx->opcode); 5294 TCGv t0 = tcg_temp_new(); 5295 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5296 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5297 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5298 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 5299 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5300 tcg_temp_free(t0); 5301 if (unlikely(Rc(ctx->opcode) != 0)) 5302 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5303 } 5304 5305 /* rrib - rrib. */ 5306 static void gen_rrib(DisasContext *ctx) 5307 { 5308 TCGv t0 = tcg_temp_new(); 5309 TCGv t1 = tcg_temp_new(); 5310 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5311 tcg_gen_movi_tl(t1, 0x80000000); 5312 tcg_gen_shr_tl(t1, t1, t0); 5313 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5314 tcg_gen_and_tl(t0, t0, t1); 5315 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5316 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5317 tcg_temp_free(t0); 5318 tcg_temp_free(t1); 5319 if (unlikely(Rc(ctx->opcode) != 0)) 5320 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5321 } 5322 5323 /* sle - sle. */ 5324 static void gen_sle(DisasContext *ctx) 5325 { 5326 TCGv t0 = tcg_temp_new(); 5327 TCGv t1 = tcg_temp_new(); 5328 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5329 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5330 tcg_gen_subfi_tl(t1, 32, t1); 5331 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5332 tcg_gen_or_tl(t1, t0, t1); 5333 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5334 gen_store_spr(SPR_MQ, t1); 5335 tcg_temp_free(t0); 5336 tcg_temp_free(t1); 5337 if (unlikely(Rc(ctx->opcode) != 0)) 5338 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5339 } 5340 5341 /* sleq - sleq. */ 5342 static void gen_sleq(DisasContext *ctx) 5343 { 5344 TCGv t0 = tcg_temp_new(); 5345 TCGv t1 = tcg_temp_new(); 5346 TCGv t2 = tcg_temp_new(); 5347 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5348 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 5349 tcg_gen_shl_tl(t2, t2, t0); 5350 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5351 gen_load_spr(t1, SPR_MQ); 5352 gen_store_spr(SPR_MQ, t0); 5353 tcg_gen_and_tl(t0, t0, t2); 5354 tcg_gen_andc_tl(t1, t1, t2); 5355 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5356 tcg_temp_free(t0); 5357 tcg_temp_free(t1); 5358 tcg_temp_free(t2); 5359 if (unlikely(Rc(ctx->opcode) != 0)) 5360 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5361 } 5362 5363 /* sliq - sliq. */ 5364 static void gen_sliq(DisasContext *ctx) 5365 { 5366 int sh = SH(ctx->opcode); 5367 TCGv t0 = tcg_temp_new(); 5368 TCGv t1 = tcg_temp_new(); 5369 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5370 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5371 tcg_gen_or_tl(t1, t0, t1); 5372 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5373 gen_store_spr(SPR_MQ, t1); 5374 tcg_temp_free(t0); 5375 tcg_temp_free(t1); 5376 if (unlikely(Rc(ctx->opcode) != 0)) 5377 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5378 } 5379 5380 /* slliq - slliq. */ 5381 static void gen_slliq(DisasContext *ctx) 5382 { 5383 int sh = SH(ctx->opcode); 5384 TCGv t0 = tcg_temp_new(); 5385 TCGv t1 = tcg_temp_new(); 5386 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5387 gen_load_spr(t1, SPR_MQ); 5388 gen_store_spr(SPR_MQ, t0); 5389 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 5390 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 5391 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5392 tcg_temp_free(t0); 5393 tcg_temp_free(t1); 5394 if (unlikely(Rc(ctx->opcode) != 0)) 5395 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5396 } 5397 5398 /* sllq - sllq. */ 5399 static void gen_sllq(DisasContext *ctx) 5400 { 5401 TCGLabel *l1 = gen_new_label(); 5402 TCGLabel *l2 = gen_new_label(); 5403 TCGv t0 = tcg_temp_local_new(); 5404 TCGv t1 = tcg_temp_local_new(); 5405 TCGv t2 = tcg_temp_local_new(); 5406 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5407 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5408 tcg_gen_shl_tl(t1, t1, t2); 5409 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5410 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5411 gen_load_spr(t0, SPR_MQ); 5412 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5413 tcg_gen_br(l2); 5414 gen_set_label(l1); 5415 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5416 gen_load_spr(t2, SPR_MQ); 5417 tcg_gen_andc_tl(t1, t2, t1); 5418 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5419 gen_set_label(l2); 5420 tcg_temp_free(t0); 5421 tcg_temp_free(t1); 5422 tcg_temp_free(t2); 5423 if (unlikely(Rc(ctx->opcode) != 0)) 5424 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5425 } 5426 5427 /* slq - slq. */ 5428 static void gen_slq(DisasContext *ctx) 5429 { 5430 TCGLabel *l1 = gen_new_label(); 5431 TCGv t0 = tcg_temp_new(); 5432 TCGv t1 = tcg_temp_new(); 5433 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5434 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5435 tcg_gen_subfi_tl(t1, 32, t1); 5436 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5437 tcg_gen_or_tl(t1, t0, t1); 5438 gen_store_spr(SPR_MQ, t1); 5439 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5440 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5441 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5442 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5443 gen_set_label(l1); 5444 tcg_temp_free(t0); 5445 tcg_temp_free(t1); 5446 if (unlikely(Rc(ctx->opcode) != 0)) 5447 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5448 } 5449 5450 /* sraiq - sraiq. */ 5451 static void gen_sraiq(DisasContext *ctx) 5452 { 5453 int sh = SH(ctx->opcode); 5454 TCGLabel *l1 = gen_new_label(); 5455 TCGv t0 = tcg_temp_new(); 5456 TCGv t1 = tcg_temp_new(); 5457 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5458 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5459 tcg_gen_or_tl(t0, t0, t1); 5460 gen_store_spr(SPR_MQ, t0); 5461 tcg_gen_movi_tl(cpu_ca, 0); 5462 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5463 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5464 tcg_gen_movi_tl(cpu_ca, 1); 5465 gen_set_label(l1); 5466 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5467 tcg_temp_free(t0); 5468 tcg_temp_free(t1); 5469 if (unlikely(Rc(ctx->opcode) != 0)) 5470 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5471 } 5472 5473 /* sraq - sraq. */ 5474 static void gen_sraq(DisasContext *ctx) 5475 { 5476 TCGLabel *l1 = gen_new_label(); 5477 TCGLabel *l2 = gen_new_label(); 5478 TCGv t0 = tcg_temp_new(); 5479 TCGv t1 = tcg_temp_local_new(); 5480 TCGv t2 = tcg_temp_local_new(); 5481 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5482 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5483 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5484 tcg_gen_subfi_tl(t2, 32, t2); 5485 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5486 tcg_gen_or_tl(t0, t0, t2); 5487 gen_store_spr(SPR_MQ, t0); 5488 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5489 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5490 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5491 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5492 gen_set_label(l1); 5493 tcg_temp_free(t0); 5494 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5495 tcg_gen_movi_tl(cpu_ca, 0); 5496 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5497 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5498 tcg_gen_movi_tl(cpu_ca, 1); 5499 gen_set_label(l2); 5500 tcg_temp_free(t1); 5501 tcg_temp_free(t2); 5502 if (unlikely(Rc(ctx->opcode) != 0)) 5503 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5504 } 5505 5506 /* sre - sre. */ 5507 static void gen_sre(DisasContext *ctx) 5508 { 5509 TCGv t0 = tcg_temp_new(); 5510 TCGv t1 = tcg_temp_new(); 5511 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5512 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5513 tcg_gen_subfi_tl(t1, 32, t1); 5514 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5515 tcg_gen_or_tl(t1, t0, t1); 5516 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5517 gen_store_spr(SPR_MQ, t1); 5518 tcg_temp_free(t0); 5519 tcg_temp_free(t1); 5520 if (unlikely(Rc(ctx->opcode) != 0)) 5521 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5522 } 5523 5524 /* srea - srea. */ 5525 static void gen_srea(DisasContext *ctx) 5526 { 5527 TCGv t0 = tcg_temp_new(); 5528 TCGv t1 = tcg_temp_new(); 5529 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5530 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5531 gen_store_spr(SPR_MQ, t0); 5532 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5533 tcg_temp_free(t0); 5534 tcg_temp_free(t1); 5535 if (unlikely(Rc(ctx->opcode) != 0)) 5536 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5537 } 5538 5539 /* sreq */ 5540 static void gen_sreq(DisasContext *ctx) 5541 { 5542 TCGv t0 = tcg_temp_new(); 5543 TCGv t1 = tcg_temp_new(); 5544 TCGv t2 = tcg_temp_new(); 5545 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5546 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5547 tcg_gen_shr_tl(t1, t1, t0); 5548 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5549 gen_load_spr(t2, SPR_MQ); 5550 gen_store_spr(SPR_MQ, t0); 5551 tcg_gen_and_tl(t0, t0, t1); 5552 tcg_gen_andc_tl(t2, t2, t1); 5553 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5554 tcg_temp_free(t0); 5555 tcg_temp_free(t1); 5556 tcg_temp_free(t2); 5557 if (unlikely(Rc(ctx->opcode) != 0)) 5558 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5559 } 5560 5561 /* sriq */ 5562 static void gen_sriq(DisasContext *ctx) 5563 { 5564 int sh = SH(ctx->opcode); 5565 TCGv t0 = tcg_temp_new(); 5566 TCGv t1 = tcg_temp_new(); 5567 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5568 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5569 tcg_gen_or_tl(t1, t0, t1); 5570 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5571 gen_store_spr(SPR_MQ, t1); 5572 tcg_temp_free(t0); 5573 tcg_temp_free(t1); 5574 if (unlikely(Rc(ctx->opcode) != 0)) 5575 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5576 } 5577 5578 /* srliq */ 5579 static void gen_srliq(DisasContext *ctx) 5580 { 5581 int sh = SH(ctx->opcode); 5582 TCGv t0 = tcg_temp_new(); 5583 TCGv t1 = tcg_temp_new(); 5584 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5585 gen_load_spr(t1, SPR_MQ); 5586 gen_store_spr(SPR_MQ, t0); 5587 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5588 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5589 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5590 tcg_temp_free(t0); 5591 tcg_temp_free(t1); 5592 if (unlikely(Rc(ctx->opcode) != 0)) 5593 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5594 } 5595 5596 /* srlq */ 5597 static void gen_srlq(DisasContext *ctx) 5598 { 5599 TCGLabel *l1 = gen_new_label(); 5600 TCGLabel *l2 = gen_new_label(); 5601 TCGv t0 = tcg_temp_local_new(); 5602 TCGv t1 = tcg_temp_local_new(); 5603 TCGv t2 = tcg_temp_local_new(); 5604 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5605 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5606 tcg_gen_shr_tl(t2, t1, t2); 5607 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5608 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5609 gen_load_spr(t0, SPR_MQ); 5610 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5611 tcg_gen_br(l2); 5612 gen_set_label(l1); 5613 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5614 tcg_gen_and_tl(t0, t0, t2); 5615 gen_load_spr(t1, SPR_MQ); 5616 tcg_gen_andc_tl(t1, t1, t2); 5617 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5618 gen_set_label(l2); 5619 tcg_temp_free(t0); 5620 tcg_temp_free(t1); 5621 tcg_temp_free(t2); 5622 if (unlikely(Rc(ctx->opcode) != 0)) 5623 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5624 } 5625 5626 /* srq */ 5627 static void gen_srq(DisasContext *ctx) 5628 { 5629 TCGLabel *l1 = gen_new_label(); 5630 TCGv t0 = tcg_temp_new(); 5631 TCGv t1 = tcg_temp_new(); 5632 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5633 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5634 tcg_gen_subfi_tl(t1, 32, t1); 5635 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5636 tcg_gen_or_tl(t1, t0, t1); 5637 gen_store_spr(SPR_MQ, t1); 5638 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5639 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5640 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5641 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5642 gen_set_label(l1); 5643 tcg_temp_free(t0); 5644 tcg_temp_free(t1); 5645 if (unlikely(Rc(ctx->opcode) != 0)) 5646 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5647 } 5648 5649 /* PowerPC 602 specific instructions */ 5650 5651 /* dsa */ 5652 static void gen_dsa(DisasContext *ctx) 5653 { 5654 /* XXX: TODO */ 5655 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5656 } 5657 5658 /* esa */ 5659 static void gen_esa(DisasContext *ctx) 5660 { 5661 /* XXX: TODO */ 5662 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5663 } 5664 5665 /* mfrom */ 5666 static void gen_mfrom(DisasContext *ctx) 5667 { 5668 #if defined(CONFIG_USER_ONLY) 5669 GEN_PRIV; 5670 #else 5671 CHK_SV; 5672 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5673 #endif /* defined(CONFIG_USER_ONLY) */ 5674 } 5675 5676 /* 602 - 603 - G2 TLB management */ 5677 5678 /* tlbld */ 5679 static void gen_tlbld_6xx(DisasContext *ctx) 5680 { 5681 #if defined(CONFIG_USER_ONLY) 5682 GEN_PRIV; 5683 #else 5684 CHK_SV; 5685 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5686 #endif /* defined(CONFIG_USER_ONLY) */ 5687 } 5688 5689 /* tlbli */ 5690 static void gen_tlbli_6xx(DisasContext *ctx) 5691 { 5692 #if defined(CONFIG_USER_ONLY) 5693 GEN_PRIV; 5694 #else 5695 CHK_SV; 5696 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5697 #endif /* defined(CONFIG_USER_ONLY) */ 5698 } 5699 5700 /* 74xx TLB management */ 5701 5702 /* tlbld */ 5703 static void gen_tlbld_74xx(DisasContext *ctx) 5704 { 5705 #if defined(CONFIG_USER_ONLY) 5706 GEN_PRIV; 5707 #else 5708 CHK_SV; 5709 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5710 #endif /* defined(CONFIG_USER_ONLY) */ 5711 } 5712 5713 /* tlbli */ 5714 static void gen_tlbli_74xx(DisasContext *ctx) 5715 { 5716 #if defined(CONFIG_USER_ONLY) 5717 GEN_PRIV; 5718 #else 5719 CHK_SV; 5720 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5721 #endif /* defined(CONFIG_USER_ONLY) */ 5722 } 5723 5724 /* POWER instructions not in PowerPC 601 */ 5725 5726 /* clf */ 5727 static void gen_clf(DisasContext *ctx) 5728 { 5729 /* Cache line flush: implemented as no-op */ 5730 } 5731 5732 /* cli */ 5733 static void gen_cli(DisasContext *ctx) 5734 { 5735 #if defined(CONFIG_USER_ONLY) 5736 GEN_PRIV; 5737 #else 5738 /* Cache line invalidate: privileged and treated as no-op */ 5739 CHK_SV; 5740 #endif /* defined(CONFIG_USER_ONLY) */ 5741 } 5742 5743 /* dclst */ 5744 static void gen_dclst(DisasContext *ctx) 5745 { 5746 /* Data cache line store: treated as no-op */ 5747 } 5748 5749 static void gen_mfsri(DisasContext *ctx) 5750 { 5751 #if defined(CONFIG_USER_ONLY) 5752 GEN_PRIV; 5753 #else 5754 int ra = rA(ctx->opcode); 5755 int rd = rD(ctx->opcode); 5756 TCGv t0; 5757 5758 CHK_SV; 5759 t0 = tcg_temp_new(); 5760 gen_addr_reg_index(ctx, t0); 5761 tcg_gen_extract_tl(t0, t0, 28, 4); 5762 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5763 tcg_temp_free(t0); 5764 if (ra != 0 && ra != rd) 5765 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5766 #endif /* defined(CONFIG_USER_ONLY) */ 5767 } 5768 5769 static void gen_rac(DisasContext *ctx) 5770 { 5771 #if defined(CONFIG_USER_ONLY) 5772 GEN_PRIV; 5773 #else 5774 TCGv t0; 5775 5776 CHK_SV; 5777 t0 = tcg_temp_new(); 5778 gen_addr_reg_index(ctx, t0); 5779 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5780 tcg_temp_free(t0); 5781 #endif /* defined(CONFIG_USER_ONLY) */ 5782 } 5783 5784 static void gen_rfsvc(DisasContext *ctx) 5785 { 5786 #if defined(CONFIG_USER_ONLY) 5787 GEN_PRIV; 5788 #else 5789 CHK_SV; 5790 5791 gen_helper_rfsvc(cpu_env); 5792 gen_sync_exception(ctx); 5793 #endif /* defined(CONFIG_USER_ONLY) */ 5794 } 5795 5796 /* svc is not implemented for now */ 5797 5798 /* BookE specific instructions */ 5799 5800 /* XXX: not implemented on 440 ? */ 5801 static void gen_mfapidi(DisasContext *ctx) 5802 { 5803 /* XXX: TODO */ 5804 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5805 } 5806 5807 /* XXX: not implemented on 440 ? */ 5808 static void gen_tlbiva(DisasContext *ctx) 5809 { 5810 #if defined(CONFIG_USER_ONLY) 5811 GEN_PRIV; 5812 #else 5813 TCGv t0; 5814 5815 CHK_SV; 5816 t0 = tcg_temp_new(); 5817 gen_addr_reg_index(ctx, t0); 5818 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5819 tcg_temp_free(t0); 5820 #endif /* defined(CONFIG_USER_ONLY) */ 5821 } 5822 5823 /* All 405 MAC instructions are translated here */ 5824 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5825 int ra, int rb, int rt, int Rc) 5826 { 5827 TCGv t0, t1; 5828 5829 t0 = tcg_temp_local_new(); 5830 t1 = tcg_temp_local_new(); 5831 5832 switch (opc3 & 0x0D) { 5833 case 0x05: 5834 /* macchw - macchw. - macchwo - macchwo. */ 5835 /* macchws - macchws. - macchwso - macchwso. */ 5836 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5837 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5838 /* mulchw - mulchw. */ 5839 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5840 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5841 tcg_gen_ext16s_tl(t1, t1); 5842 break; 5843 case 0x04: 5844 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5845 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5846 /* mulchwu - mulchwu. */ 5847 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5848 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5849 tcg_gen_ext16u_tl(t1, t1); 5850 break; 5851 case 0x01: 5852 /* machhw - machhw. - machhwo - machhwo. */ 5853 /* machhws - machhws. - machhwso - machhwso. */ 5854 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5855 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5856 /* mulhhw - mulhhw. */ 5857 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5858 tcg_gen_ext16s_tl(t0, t0); 5859 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5860 tcg_gen_ext16s_tl(t1, t1); 5861 break; 5862 case 0x00: 5863 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5864 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5865 /* mulhhwu - mulhhwu. */ 5866 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5867 tcg_gen_ext16u_tl(t0, t0); 5868 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5869 tcg_gen_ext16u_tl(t1, t1); 5870 break; 5871 case 0x0D: 5872 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5873 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5874 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5875 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5876 /* mullhw - mullhw. */ 5877 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5878 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5879 break; 5880 case 0x0C: 5881 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5882 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5883 /* mullhwu - mullhwu. */ 5884 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5885 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5886 break; 5887 } 5888 if (opc2 & 0x04) { 5889 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5890 tcg_gen_mul_tl(t1, t0, t1); 5891 if (opc2 & 0x02) { 5892 /* nmultiply-and-accumulate (0x0E) */ 5893 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5894 } else { 5895 /* multiply-and-accumulate (0x0C) */ 5896 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5897 } 5898 5899 if (opc3 & 0x12) { 5900 /* Check overflow and/or saturate */ 5901 TCGLabel *l1 = gen_new_label(); 5902 5903 if (opc3 & 0x10) { 5904 /* Start with XER OV disabled, the most likely case */ 5905 tcg_gen_movi_tl(cpu_ov, 0); 5906 } 5907 if (opc3 & 0x01) { 5908 /* Signed */ 5909 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5910 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5911 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5912 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5913 if (opc3 & 0x02) { 5914 /* Saturate */ 5915 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5916 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5917 } 5918 } else { 5919 /* Unsigned */ 5920 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5921 if (opc3 & 0x02) { 5922 /* Saturate */ 5923 tcg_gen_movi_tl(t0, UINT32_MAX); 5924 } 5925 } 5926 if (opc3 & 0x10) { 5927 /* Check overflow */ 5928 tcg_gen_movi_tl(cpu_ov, 1); 5929 tcg_gen_movi_tl(cpu_so, 1); 5930 } 5931 gen_set_label(l1); 5932 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5933 } 5934 } else { 5935 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5936 } 5937 tcg_temp_free(t0); 5938 tcg_temp_free(t1); 5939 if (unlikely(Rc) != 0) { 5940 /* Update Rc0 */ 5941 gen_set_Rc0(ctx, cpu_gpr[rt]); 5942 } 5943 } 5944 5945 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5946 static void glue(gen_, name)(DisasContext *ctx) \ 5947 { \ 5948 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5949 rD(ctx->opcode), Rc(ctx->opcode)); \ 5950 } 5951 5952 /* macchw - macchw. */ 5953 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5954 /* macchwo - macchwo. */ 5955 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5956 /* macchws - macchws. */ 5957 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5958 /* macchwso - macchwso. */ 5959 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5960 /* macchwsu - macchwsu. */ 5961 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5962 /* macchwsuo - macchwsuo. */ 5963 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5964 /* macchwu - macchwu. */ 5965 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5966 /* macchwuo - macchwuo. */ 5967 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5968 /* machhw - machhw. */ 5969 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5970 /* machhwo - machhwo. */ 5971 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5972 /* machhws - machhws. */ 5973 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5974 /* machhwso - machhwso. */ 5975 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5976 /* machhwsu - machhwsu. */ 5977 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5978 /* machhwsuo - machhwsuo. */ 5979 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5980 /* machhwu - machhwu. */ 5981 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5982 /* machhwuo - machhwuo. */ 5983 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5984 /* maclhw - maclhw. */ 5985 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5986 /* maclhwo - maclhwo. */ 5987 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5988 /* maclhws - maclhws. */ 5989 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5990 /* maclhwso - maclhwso. */ 5991 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 5992 /* maclhwu - maclhwu. */ 5993 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 5994 /* maclhwuo - maclhwuo. */ 5995 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 5996 /* maclhwsu - maclhwsu. */ 5997 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 5998 /* maclhwsuo - maclhwsuo. */ 5999 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 6000 /* nmacchw - nmacchw. */ 6001 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 6002 /* nmacchwo - nmacchwo. */ 6003 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 6004 /* nmacchws - nmacchws. */ 6005 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 6006 /* nmacchwso - nmacchwso. */ 6007 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 6008 /* nmachhw - nmachhw. */ 6009 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 6010 /* nmachhwo - nmachhwo. */ 6011 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 6012 /* nmachhws - nmachhws. */ 6013 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 6014 /* nmachhwso - nmachhwso. */ 6015 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 6016 /* nmaclhw - nmaclhw. */ 6017 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 6018 /* nmaclhwo - nmaclhwo. */ 6019 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 6020 /* nmaclhws - nmaclhws. */ 6021 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 6022 /* nmaclhwso - nmaclhwso. */ 6023 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 6024 6025 /* mulchw - mulchw. */ 6026 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 6027 /* mulchwu - mulchwu. */ 6028 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 6029 /* mulhhw - mulhhw. */ 6030 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 6031 /* mulhhwu - mulhhwu. */ 6032 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 6033 /* mullhw - mullhw. */ 6034 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 6035 /* mullhwu - mullhwu. */ 6036 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 6037 6038 /* mfdcr */ 6039 static void gen_mfdcr(DisasContext *ctx) 6040 { 6041 #if defined(CONFIG_USER_ONLY) 6042 GEN_PRIV; 6043 #else 6044 TCGv dcrn; 6045 6046 CHK_SV; 6047 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6048 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6049 tcg_temp_free(dcrn); 6050 #endif /* defined(CONFIG_USER_ONLY) */ 6051 } 6052 6053 /* mtdcr */ 6054 static void gen_mtdcr(DisasContext *ctx) 6055 { 6056 #if defined(CONFIG_USER_ONLY) 6057 GEN_PRIV; 6058 #else 6059 TCGv dcrn; 6060 6061 CHK_SV; 6062 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6063 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6064 tcg_temp_free(dcrn); 6065 #endif /* defined(CONFIG_USER_ONLY) */ 6066 } 6067 6068 /* mfdcrx */ 6069 /* XXX: not implemented on 440 ? */ 6070 static void gen_mfdcrx(DisasContext *ctx) 6071 { 6072 #if defined(CONFIG_USER_ONLY) 6073 GEN_PRIV; 6074 #else 6075 CHK_SV; 6076 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6077 cpu_gpr[rA(ctx->opcode)]); 6078 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6079 #endif /* defined(CONFIG_USER_ONLY) */ 6080 } 6081 6082 /* mtdcrx */ 6083 /* XXX: not implemented on 440 ? */ 6084 static void gen_mtdcrx(DisasContext *ctx) 6085 { 6086 #if defined(CONFIG_USER_ONLY) 6087 GEN_PRIV; 6088 #else 6089 CHK_SV; 6090 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6091 cpu_gpr[rS(ctx->opcode)]); 6092 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6093 #endif /* defined(CONFIG_USER_ONLY) */ 6094 } 6095 6096 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6097 static void gen_mfdcrux(DisasContext *ctx) 6098 { 6099 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6100 cpu_gpr[rA(ctx->opcode)]); 6101 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6102 } 6103 6104 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6105 static void gen_mtdcrux(DisasContext *ctx) 6106 { 6107 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6108 cpu_gpr[rS(ctx->opcode)]); 6109 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6110 } 6111 6112 /* dccci */ 6113 static void gen_dccci(DisasContext *ctx) 6114 { 6115 CHK_SV; 6116 /* interpreted as no-op */ 6117 } 6118 6119 /* dcread */ 6120 static void gen_dcread(DisasContext *ctx) 6121 { 6122 #if defined(CONFIG_USER_ONLY) 6123 GEN_PRIV; 6124 #else 6125 TCGv EA, val; 6126 6127 CHK_SV; 6128 gen_set_access_type(ctx, ACCESS_CACHE); 6129 EA = tcg_temp_new(); 6130 gen_addr_reg_index(ctx, EA); 6131 val = tcg_temp_new(); 6132 gen_qemu_ld32u(ctx, val, EA); 6133 tcg_temp_free(val); 6134 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6135 tcg_temp_free(EA); 6136 #endif /* defined(CONFIG_USER_ONLY) */ 6137 } 6138 6139 /* icbt */ 6140 static void gen_icbt_40x(DisasContext *ctx) 6141 { 6142 /* interpreted as no-op */ 6143 /* XXX: specification say this is treated as a load by the MMU 6144 * but does not generate any exception 6145 */ 6146 } 6147 6148 /* iccci */ 6149 static void gen_iccci(DisasContext *ctx) 6150 { 6151 CHK_SV; 6152 /* interpreted as no-op */ 6153 } 6154 6155 /* icread */ 6156 static void gen_icread(DisasContext *ctx) 6157 { 6158 CHK_SV; 6159 /* interpreted as no-op */ 6160 } 6161 6162 /* rfci (supervisor only) */ 6163 static void gen_rfci_40x(DisasContext *ctx) 6164 { 6165 #if defined(CONFIG_USER_ONLY) 6166 GEN_PRIV; 6167 #else 6168 CHK_SV; 6169 /* Restore CPU state */ 6170 gen_helper_40x_rfci(cpu_env); 6171 gen_sync_exception(ctx); 6172 #endif /* defined(CONFIG_USER_ONLY) */ 6173 } 6174 6175 static void gen_rfci(DisasContext *ctx) 6176 { 6177 #if defined(CONFIG_USER_ONLY) 6178 GEN_PRIV; 6179 #else 6180 CHK_SV; 6181 /* Restore CPU state */ 6182 gen_helper_rfci(cpu_env); 6183 gen_sync_exception(ctx); 6184 #endif /* defined(CONFIG_USER_ONLY) */ 6185 } 6186 6187 /* BookE specific */ 6188 6189 /* XXX: not implemented on 440 ? */ 6190 static void gen_rfdi(DisasContext *ctx) 6191 { 6192 #if defined(CONFIG_USER_ONLY) 6193 GEN_PRIV; 6194 #else 6195 CHK_SV; 6196 /* Restore CPU state */ 6197 gen_helper_rfdi(cpu_env); 6198 gen_sync_exception(ctx); 6199 #endif /* defined(CONFIG_USER_ONLY) */ 6200 } 6201 6202 /* XXX: not implemented on 440 ? */ 6203 static void gen_rfmci(DisasContext *ctx) 6204 { 6205 #if defined(CONFIG_USER_ONLY) 6206 GEN_PRIV; 6207 #else 6208 CHK_SV; 6209 /* Restore CPU state */ 6210 gen_helper_rfmci(cpu_env); 6211 gen_sync_exception(ctx); 6212 #endif /* defined(CONFIG_USER_ONLY) */ 6213 } 6214 6215 /* TLB management - PowerPC 405 implementation */ 6216 6217 /* tlbre */ 6218 static void gen_tlbre_40x(DisasContext *ctx) 6219 { 6220 #if defined(CONFIG_USER_ONLY) 6221 GEN_PRIV; 6222 #else 6223 CHK_SV; 6224 switch (rB(ctx->opcode)) { 6225 case 0: 6226 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6227 cpu_gpr[rA(ctx->opcode)]); 6228 break; 6229 case 1: 6230 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6231 cpu_gpr[rA(ctx->opcode)]); 6232 break; 6233 default: 6234 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6235 break; 6236 } 6237 #endif /* defined(CONFIG_USER_ONLY) */ 6238 } 6239 6240 /* tlbsx - tlbsx. */ 6241 static void gen_tlbsx_40x(DisasContext *ctx) 6242 { 6243 #if defined(CONFIG_USER_ONLY) 6244 GEN_PRIV; 6245 #else 6246 TCGv t0; 6247 6248 CHK_SV; 6249 t0 = tcg_temp_new(); 6250 gen_addr_reg_index(ctx, t0); 6251 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6252 tcg_temp_free(t0); 6253 if (Rc(ctx->opcode)) { 6254 TCGLabel *l1 = gen_new_label(); 6255 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6256 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6257 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6258 gen_set_label(l1); 6259 } 6260 #endif /* defined(CONFIG_USER_ONLY) */ 6261 } 6262 6263 /* tlbwe */ 6264 static void gen_tlbwe_40x(DisasContext *ctx) 6265 { 6266 #if defined(CONFIG_USER_ONLY) 6267 GEN_PRIV; 6268 #else 6269 CHK_SV; 6270 6271 switch (rB(ctx->opcode)) { 6272 case 0: 6273 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6274 cpu_gpr[rS(ctx->opcode)]); 6275 break; 6276 case 1: 6277 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6278 cpu_gpr[rS(ctx->opcode)]); 6279 break; 6280 default: 6281 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6282 break; 6283 } 6284 #endif /* defined(CONFIG_USER_ONLY) */ 6285 } 6286 6287 /* TLB management - PowerPC 440 implementation */ 6288 6289 /* tlbre */ 6290 static void gen_tlbre_440(DisasContext *ctx) 6291 { 6292 #if defined(CONFIG_USER_ONLY) 6293 GEN_PRIV; 6294 #else 6295 CHK_SV; 6296 6297 switch (rB(ctx->opcode)) { 6298 case 0: 6299 case 1: 6300 case 2: 6301 { 6302 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6303 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6304 t0, cpu_gpr[rA(ctx->opcode)]); 6305 tcg_temp_free_i32(t0); 6306 } 6307 break; 6308 default: 6309 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6310 break; 6311 } 6312 #endif /* defined(CONFIG_USER_ONLY) */ 6313 } 6314 6315 /* tlbsx - tlbsx. */ 6316 static void gen_tlbsx_440(DisasContext *ctx) 6317 { 6318 #if defined(CONFIG_USER_ONLY) 6319 GEN_PRIV; 6320 #else 6321 TCGv t0; 6322 6323 CHK_SV; 6324 t0 = tcg_temp_new(); 6325 gen_addr_reg_index(ctx, t0); 6326 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6327 tcg_temp_free(t0); 6328 if (Rc(ctx->opcode)) { 6329 TCGLabel *l1 = gen_new_label(); 6330 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6331 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6332 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6333 gen_set_label(l1); 6334 } 6335 #endif /* defined(CONFIG_USER_ONLY) */ 6336 } 6337 6338 /* tlbwe */ 6339 static void gen_tlbwe_440(DisasContext *ctx) 6340 { 6341 #if defined(CONFIG_USER_ONLY) 6342 GEN_PRIV; 6343 #else 6344 CHK_SV; 6345 switch (rB(ctx->opcode)) { 6346 case 0: 6347 case 1: 6348 case 2: 6349 { 6350 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6351 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 6352 cpu_gpr[rS(ctx->opcode)]); 6353 tcg_temp_free_i32(t0); 6354 } 6355 break; 6356 default: 6357 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6358 break; 6359 } 6360 #endif /* defined(CONFIG_USER_ONLY) */ 6361 } 6362 6363 /* TLB management - PowerPC BookE 2.06 implementation */ 6364 6365 /* tlbre */ 6366 static void gen_tlbre_booke206(DisasContext *ctx) 6367 { 6368 #if defined(CONFIG_USER_ONLY) 6369 GEN_PRIV; 6370 #else 6371 CHK_SV; 6372 gen_helper_booke206_tlbre(cpu_env); 6373 #endif /* defined(CONFIG_USER_ONLY) */ 6374 } 6375 6376 /* tlbsx - tlbsx. */ 6377 static void gen_tlbsx_booke206(DisasContext *ctx) 6378 { 6379 #if defined(CONFIG_USER_ONLY) 6380 GEN_PRIV; 6381 #else 6382 TCGv t0; 6383 6384 CHK_SV; 6385 if (rA(ctx->opcode)) { 6386 t0 = tcg_temp_new(); 6387 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6388 } else { 6389 t0 = tcg_const_tl(0); 6390 } 6391 6392 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6393 gen_helper_booke206_tlbsx(cpu_env, t0); 6394 tcg_temp_free(t0); 6395 #endif /* defined(CONFIG_USER_ONLY) */ 6396 } 6397 6398 /* tlbwe */ 6399 static void gen_tlbwe_booke206(DisasContext *ctx) 6400 { 6401 #if defined(CONFIG_USER_ONLY) 6402 GEN_PRIV; 6403 #else 6404 CHK_SV; 6405 gen_helper_booke206_tlbwe(cpu_env); 6406 #endif /* defined(CONFIG_USER_ONLY) */ 6407 } 6408 6409 static void gen_tlbivax_booke206(DisasContext *ctx) 6410 { 6411 #if defined(CONFIG_USER_ONLY) 6412 GEN_PRIV; 6413 #else 6414 TCGv t0; 6415 6416 CHK_SV; 6417 t0 = tcg_temp_new(); 6418 gen_addr_reg_index(ctx, t0); 6419 gen_helper_booke206_tlbivax(cpu_env, t0); 6420 tcg_temp_free(t0); 6421 #endif /* defined(CONFIG_USER_ONLY) */ 6422 } 6423 6424 static void gen_tlbilx_booke206(DisasContext *ctx) 6425 { 6426 #if defined(CONFIG_USER_ONLY) 6427 GEN_PRIV; 6428 #else 6429 TCGv t0; 6430 6431 CHK_SV; 6432 t0 = tcg_temp_new(); 6433 gen_addr_reg_index(ctx, t0); 6434 6435 switch((ctx->opcode >> 21) & 0x3) { 6436 case 0: 6437 gen_helper_booke206_tlbilx0(cpu_env, t0); 6438 break; 6439 case 1: 6440 gen_helper_booke206_tlbilx1(cpu_env, t0); 6441 break; 6442 case 3: 6443 gen_helper_booke206_tlbilx3(cpu_env, t0); 6444 break; 6445 default: 6446 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6447 break; 6448 } 6449 6450 tcg_temp_free(t0); 6451 #endif /* defined(CONFIG_USER_ONLY) */ 6452 } 6453 6454 6455 /* wrtee */ 6456 static void gen_wrtee(DisasContext *ctx) 6457 { 6458 #if defined(CONFIG_USER_ONLY) 6459 GEN_PRIV; 6460 #else 6461 TCGv t0; 6462 6463 CHK_SV; 6464 t0 = tcg_temp_new(); 6465 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6466 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6467 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6468 tcg_temp_free(t0); 6469 /* Stop translation to have a chance to raise an exception 6470 * if we just set msr_ee to 1 6471 */ 6472 gen_stop_exception(ctx); 6473 #endif /* defined(CONFIG_USER_ONLY) */ 6474 } 6475 6476 /* wrteei */ 6477 static void gen_wrteei(DisasContext *ctx) 6478 { 6479 #if defined(CONFIG_USER_ONLY) 6480 GEN_PRIV; 6481 #else 6482 CHK_SV; 6483 if (ctx->opcode & 0x00008000) { 6484 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6485 /* Stop translation to have a chance to raise an exception */ 6486 gen_stop_exception(ctx); 6487 } else { 6488 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6489 } 6490 #endif /* defined(CONFIG_USER_ONLY) */ 6491 } 6492 6493 /* PowerPC 440 specific instructions */ 6494 6495 /* dlmzb */ 6496 static void gen_dlmzb(DisasContext *ctx) 6497 { 6498 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6499 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6500 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6501 tcg_temp_free_i32(t0); 6502 } 6503 6504 /* mbar replaces eieio on 440 */ 6505 static void gen_mbar(DisasContext *ctx) 6506 { 6507 /* interpreted as no-op */ 6508 } 6509 6510 /* msync replaces sync on 440 */ 6511 static void gen_msync_4xx(DisasContext *ctx) 6512 { 6513 /* interpreted as no-op */ 6514 } 6515 6516 /* icbt */ 6517 static void gen_icbt_440(DisasContext *ctx) 6518 { 6519 /* interpreted as no-op */ 6520 /* XXX: specification say this is treated as a load by the MMU 6521 * but does not generate any exception 6522 */ 6523 } 6524 6525 /* Embedded.Processor Control */ 6526 6527 static void gen_msgclr(DisasContext *ctx) 6528 { 6529 #if defined(CONFIG_USER_ONLY) 6530 GEN_PRIV; 6531 #else 6532 CHK_HV; 6533 /* 64-bit server processors compliant with arch 2.x */ 6534 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6535 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6536 } else { 6537 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6538 } 6539 #endif /* defined(CONFIG_USER_ONLY) */ 6540 } 6541 6542 static void gen_msgsnd(DisasContext *ctx) 6543 { 6544 #if defined(CONFIG_USER_ONLY) 6545 GEN_PRIV; 6546 #else 6547 CHK_HV; 6548 /* 64-bit server processors compliant with arch 2.x */ 6549 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6550 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6551 } else { 6552 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6553 } 6554 #endif /* defined(CONFIG_USER_ONLY) */ 6555 } 6556 6557 static void gen_msgsync(DisasContext *ctx) 6558 { 6559 #if defined(CONFIG_USER_ONLY) 6560 GEN_PRIV; 6561 #else 6562 CHK_HV; 6563 #endif /* defined(CONFIG_USER_ONLY) */ 6564 /* interpreted as no-op */ 6565 } 6566 6567 #if defined(TARGET_PPC64) 6568 static void gen_maddld(DisasContext *ctx) 6569 { 6570 TCGv_i64 t1 = tcg_temp_new_i64(); 6571 6572 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6573 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6574 tcg_temp_free_i64(t1); 6575 } 6576 6577 /* maddhd maddhdu */ 6578 static void gen_maddhd_maddhdu(DisasContext *ctx) 6579 { 6580 TCGv_i64 lo = tcg_temp_new_i64(); 6581 TCGv_i64 hi = tcg_temp_new_i64(); 6582 TCGv_i64 t1 = tcg_temp_new_i64(); 6583 6584 if (Rc(ctx->opcode)) { 6585 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6586 cpu_gpr[rB(ctx->opcode)]); 6587 tcg_gen_movi_i64(t1, 0); 6588 } else { 6589 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6590 cpu_gpr[rB(ctx->opcode)]); 6591 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6592 } 6593 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6594 cpu_gpr[rC(ctx->opcode)], t1); 6595 tcg_temp_free_i64(lo); 6596 tcg_temp_free_i64(hi); 6597 tcg_temp_free_i64(t1); 6598 } 6599 #endif /* defined(TARGET_PPC64) */ 6600 6601 static void gen_tbegin(DisasContext *ctx) 6602 { 6603 if (unlikely(!ctx->tm_enabled)) { 6604 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6605 return; 6606 } 6607 gen_helper_tbegin(cpu_env); 6608 } 6609 6610 #define GEN_TM_NOOP(name) \ 6611 static inline void gen_##name(DisasContext *ctx) \ 6612 { \ 6613 if (unlikely(!ctx->tm_enabled)) { \ 6614 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6615 return; \ 6616 } \ 6617 /* Because tbegin always fails in QEMU, these user \ 6618 * space instructions all have a simple implementation: \ 6619 * \ 6620 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6621 * = 0b0 || 0b00 || 0b0 \ 6622 */ \ 6623 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6624 } 6625 6626 GEN_TM_NOOP(tend); 6627 GEN_TM_NOOP(tabort); 6628 GEN_TM_NOOP(tabortwc); 6629 GEN_TM_NOOP(tabortwci); 6630 GEN_TM_NOOP(tabortdc); 6631 GEN_TM_NOOP(tabortdci); 6632 GEN_TM_NOOP(tsr); 6633 static inline void gen_cp_abort(DisasContext *ctx) 6634 { 6635 // Do Nothing 6636 } 6637 6638 #define GEN_CP_PASTE_NOOP(name) \ 6639 static inline void gen_##name(DisasContext *ctx) \ 6640 { \ 6641 /* Generate invalid exception until \ 6642 * we have an implementation of the copy \ 6643 * paste facility \ 6644 */ \ 6645 gen_invalid(ctx); \ 6646 } 6647 6648 GEN_CP_PASTE_NOOP(copy) 6649 GEN_CP_PASTE_NOOP(paste) 6650 6651 static void gen_tcheck(DisasContext *ctx) 6652 { 6653 if (unlikely(!ctx->tm_enabled)) { 6654 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6655 return; 6656 } 6657 /* Because tbegin always fails, the tcheck implementation 6658 * is simple: 6659 * 6660 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6661 * = 0b1 || 0b00 || 0b0 6662 */ 6663 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6664 } 6665 6666 #if defined(CONFIG_USER_ONLY) 6667 #define GEN_TM_PRIV_NOOP(name) \ 6668 static inline void gen_##name(DisasContext *ctx) \ 6669 { \ 6670 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6671 } 6672 6673 #else 6674 6675 #define GEN_TM_PRIV_NOOP(name) \ 6676 static inline void gen_##name(DisasContext *ctx) \ 6677 { \ 6678 CHK_SV; \ 6679 if (unlikely(!ctx->tm_enabled)) { \ 6680 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6681 return; \ 6682 } \ 6683 /* Because tbegin always fails, the implementation is \ 6684 * simple: \ 6685 * \ 6686 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6687 * = 0b0 || 0b00 | 0b0 \ 6688 */ \ 6689 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6690 } 6691 6692 #endif 6693 6694 GEN_TM_PRIV_NOOP(treclaim); 6695 GEN_TM_PRIV_NOOP(trechkpt); 6696 6697 #include "translate/fp-impl.inc.c" 6698 6699 #include "translate/vmx-impl.inc.c" 6700 6701 #include "translate/vsx-impl.inc.c" 6702 6703 #include "translate/dfp-impl.inc.c" 6704 6705 #include "translate/spe-impl.inc.c" 6706 6707 /* Handles lfdp, lxsd, lxssp */ 6708 static void gen_dform39(DisasContext *ctx) 6709 { 6710 switch (ctx->opcode & 0x3) { 6711 case 0: /* lfdp */ 6712 if (ctx->insns_flags2 & PPC2_ISA205) { 6713 return gen_lfdp(ctx); 6714 } 6715 break; 6716 case 2: /* lxsd */ 6717 if (ctx->insns_flags2 & PPC2_ISA300) { 6718 return gen_lxsd(ctx); 6719 } 6720 break; 6721 case 3: /* lxssp */ 6722 if (ctx->insns_flags2 & PPC2_ISA300) { 6723 return gen_lxssp(ctx); 6724 } 6725 break; 6726 } 6727 return gen_invalid(ctx); 6728 } 6729 6730 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 6731 static void gen_dform3D(DisasContext *ctx) 6732 { 6733 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 6734 switch (ctx->opcode & 0x7) { 6735 case 1: /* lxv */ 6736 if (ctx->insns_flags2 & PPC2_ISA300) { 6737 return gen_lxv(ctx); 6738 } 6739 break; 6740 case 5: /* stxv */ 6741 if (ctx->insns_flags2 & PPC2_ISA300) { 6742 return gen_stxv(ctx); 6743 } 6744 break; 6745 } 6746 } else { /* DS-FORM */ 6747 switch (ctx->opcode & 0x3) { 6748 case 0: /* stfdp */ 6749 if (ctx->insns_flags2 & PPC2_ISA205) { 6750 return gen_stfdp(ctx); 6751 } 6752 break; 6753 case 2: /* stxsd */ 6754 if (ctx->insns_flags2 & PPC2_ISA300) { 6755 return gen_stxsd(ctx); 6756 } 6757 break; 6758 case 3: /* stxssp */ 6759 if (ctx->insns_flags2 & PPC2_ISA300) { 6760 return gen_stxssp(ctx); 6761 } 6762 break; 6763 } 6764 } 6765 return gen_invalid(ctx); 6766 } 6767 6768 static opcode_t opcodes[] = { 6769 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6770 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6771 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6772 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6773 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6774 #if defined(TARGET_PPC64) 6775 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6776 #endif 6777 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6778 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6779 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6780 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6781 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6782 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6783 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6784 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6785 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6786 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6787 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6788 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6789 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6790 #if defined(TARGET_PPC64) 6791 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6792 #endif 6793 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6794 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6795 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6796 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6797 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6798 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6799 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6800 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 6801 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6802 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 6803 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6804 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6805 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6806 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6807 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6808 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6809 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6810 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6811 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6812 #if defined(TARGET_PPC64) 6813 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6814 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6815 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6816 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6817 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6818 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6819 #endif 6820 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6821 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6822 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6823 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6824 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6825 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6826 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6827 #if defined(TARGET_PPC64) 6828 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6829 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6830 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6831 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6832 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6833 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6834 PPC_NONE, PPC2_ISA300), 6835 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6836 PPC_NONE, PPC2_ISA300), 6837 #endif 6838 #if defined(TARGET_PPC64) 6839 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6840 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6841 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6842 #endif 6843 /* handles lfdp, lxsd, lxssp */ 6844 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6845 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 6846 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6847 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6848 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6849 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6850 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6851 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6852 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6853 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 6854 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6855 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6856 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6857 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6858 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 6859 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 6860 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6861 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6862 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6863 #if defined(TARGET_PPC64) 6864 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 6865 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 6866 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6867 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6868 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6869 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6870 #endif 6871 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6872 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6873 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 6874 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6875 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6876 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6877 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6878 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6879 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6880 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6881 #if defined(TARGET_PPC64) 6882 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6883 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6884 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6885 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6886 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6887 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6888 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6889 #endif 6890 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6891 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6892 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6893 #if defined(TARGET_PPC64) 6894 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6895 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6896 #endif 6897 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6898 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6899 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6900 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6901 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6902 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6903 #if defined(TARGET_PPC64) 6904 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6905 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6906 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 6907 #endif 6908 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6909 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6910 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6911 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6912 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6913 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6914 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6915 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6916 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6917 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6918 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 6919 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6920 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6921 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 6922 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6923 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 6924 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6925 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6926 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 6927 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6928 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6929 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6930 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6931 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6932 #if defined(TARGET_PPC64) 6933 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6934 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6935 PPC_SEGMENT_64B), 6936 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6937 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6938 PPC_SEGMENT_64B), 6939 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6940 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6941 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6942 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6943 #endif 6944 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6945 /* XXX Those instructions will need to be handled differently for 6946 * different ISA versions */ 6947 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6948 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6949 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 6950 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 6951 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6952 #if defined(TARGET_PPC64) 6953 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6954 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6955 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 6956 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6957 #endif 6958 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6959 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6960 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6961 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6962 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6963 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6964 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6965 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6966 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6967 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6968 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 6969 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6970 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 6971 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 6972 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 6973 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 6974 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 6975 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 6976 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 6977 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6978 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 6979 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 6980 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 6981 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 6982 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 6983 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 6984 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 6985 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 6986 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 6987 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 6988 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 6989 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 6990 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 6991 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 6992 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 6993 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 6994 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 6995 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 6996 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 6997 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 6998 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 6999 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 7000 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 7001 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 7002 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7003 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7004 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7005 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7006 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7007 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7008 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7009 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7010 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7011 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7012 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7013 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7014 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7015 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7016 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7017 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7018 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7019 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7020 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7021 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7022 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7023 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7024 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7025 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7026 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7027 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7028 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7029 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7030 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7031 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7032 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7033 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7034 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7035 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7036 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7037 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7038 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7039 PPC_NONE, PPC2_BOOKE206), 7040 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7041 PPC_NONE, PPC2_BOOKE206), 7042 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7043 PPC_NONE, PPC2_BOOKE206), 7044 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7045 PPC_NONE, PPC2_BOOKE206), 7046 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7047 PPC_NONE, PPC2_BOOKE206), 7048 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7049 PPC_NONE, PPC2_PRCNTL), 7050 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7051 PPC_NONE, PPC2_PRCNTL), 7052 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7053 PPC_NONE, PPC2_PRCNTL), 7054 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7055 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7056 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7057 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7058 PPC_BOOKE, PPC2_BOOKE206), 7059 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE), 7060 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7061 PPC_BOOKE, PPC2_BOOKE206), 7062 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7063 PPC_440_SPEC), 7064 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7065 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7066 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7067 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7068 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7069 #if defined(TARGET_PPC64) 7070 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7071 PPC2_ISA300), 7072 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7073 #endif 7074 7075 #undef GEN_INT_ARITH_ADD 7076 #undef GEN_INT_ARITH_ADD_CONST 7077 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7078 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7079 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7080 add_ca, compute_ca, compute_ov) \ 7081 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7082 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7083 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7084 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7085 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7086 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7087 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7088 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7089 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7090 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7091 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7092 7093 #undef GEN_INT_ARITH_DIVW 7094 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7095 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7096 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7097 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7098 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7099 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7100 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7101 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7102 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7103 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7104 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7105 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7106 7107 #if defined(TARGET_PPC64) 7108 #undef GEN_INT_ARITH_DIVD 7109 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7110 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7111 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7112 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7113 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7114 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7115 7116 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7117 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7118 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7119 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7120 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7121 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7122 7123 #undef GEN_INT_ARITH_MUL_HELPER 7124 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7125 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7126 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7127 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7128 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7129 #endif 7130 7131 #undef GEN_INT_ARITH_SUBF 7132 #undef GEN_INT_ARITH_SUBF_CONST 7133 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7134 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7135 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7136 add_ca, compute_ca, compute_ov) \ 7137 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7138 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7139 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7140 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7141 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7142 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7143 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7144 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7145 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7146 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7147 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7148 7149 #undef GEN_LOGICAL1 7150 #undef GEN_LOGICAL2 7151 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7152 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7153 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7154 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7155 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7156 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7157 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7158 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7159 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7160 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7161 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7162 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 7163 #if defined(TARGET_PPC64) 7164 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 7165 #endif 7166 7167 #if defined(TARGET_PPC64) 7168 #undef GEN_PPC64_R2 7169 #undef GEN_PPC64_R4 7170 #define GEN_PPC64_R2(name, opc1, opc2) \ 7171 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7172 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7173 PPC_64B) 7174 #define GEN_PPC64_R4(name, opc1, opc2) \ 7175 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7176 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 7177 PPC_64B), \ 7178 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7179 PPC_64B), \ 7180 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 7181 PPC_64B) 7182 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 7183 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 7184 GEN_PPC64_R4(rldic, 0x1E, 0x04), 7185 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 7186 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 7187 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 7188 #endif 7189 7190 #undef GEN_LD 7191 #undef GEN_LDU 7192 #undef GEN_LDUX 7193 #undef GEN_LDX_E 7194 #undef GEN_LDS 7195 #define GEN_LD(name, ldop, opc, type) \ 7196 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7197 #define GEN_LDU(name, ldop, opc, type) \ 7198 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 7199 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 7200 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7201 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 7202 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 7203 #define GEN_LDS(name, ldop, op, type) \ 7204 GEN_LD(name, ldop, op | 0x20, type) \ 7205 GEN_LDU(name, ldop, op | 0x21, type) \ 7206 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 7207 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 7208 7209 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 7210 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 7211 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 7212 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 7213 #if defined(TARGET_PPC64) 7214 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 7215 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 7216 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 7217 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 7218 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 7219 7220 /* HV/P7 and later only */ 7221 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 7222 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 7223 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 7224 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 7225 #endif 7226 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 7227 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 7228 7229 /* External PID based load */ 7230 #undef GEN_LDEPX 7231 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 7232 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7233 0x00000001, PPC_NONE, PPC2_BOOKE206), 7234 7235 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 7236 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 7237 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 7238 #if defined(TARGET_PPC64) 7239 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 7240 #endif 7241 7242 #undef GEN_ST 7243 #undef GEN_STU 7244 #undef GEN_STUX 7245 #undef GEN_STX_E 7246 #undef GEN_STS 7247 #define GEN_ST(name, stop, opc, type) \ 7248 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7249 #define GEN_STU(name, stop, opc, type) \ 7250 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 7251 #define GEN_STUX(name, stop, opc2, opc3, type) \ 7252 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7253 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 7254 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 7255 #define GEN_STS(name, stop, op, type) \ 7256 GEN_ST(name, stop, op | 0x20, type) \ 7257 GEN_STU(name, stop, op | 0x21, type) \ 7258 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 7259 GEN_STX(name, stop, 0x17, op | 0x00, type) 7260 7261 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 7262 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 7263 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 7264 #if defined(TARGET_PPC64) 7265 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 7266 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 7267 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 7268 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 7269 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 7270 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 7271 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 7272 #endif 7273 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 7274 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 7275 7276 #undef GEN_STEPX 7277 #define GEN_STEPX(name, ldop, opc2, opc3) \ 7278 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7279 0x00000001, PPC_NONE, PPC2_BOOKE206), 7280 7281 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 7282 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 7283 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 7284 #if defined(TARGET_PPC64) 7285 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04) 7286 #endif 7287 7288 #undef GEN_CRLOGIC 7289 #define GEN_CRLOGIC(name, tcg_op, opc) \ 7290 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 7291 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 7292 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 7293 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 7294 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 7295 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 7296 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 7297 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 7298 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 7299 7300 #undef GEN_MAC_HANDLER 7301 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 7302 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 7303 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 7304 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 7305 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 7306 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 7307 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 7308 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 7309 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 7310 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 7311 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 7312 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 7313 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 7314 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 7315 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 7316 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 7317 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 7318 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 7319 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 7320 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 7321 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 7322 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 7323 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 7324 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 7325 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 7326 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 7327 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 7328 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 7329 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 7330 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 7331 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 7332 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 7333 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 7334 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 7335 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 7336 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 7337 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 7338 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 7339 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 7340 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 7341 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 7342 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 7343 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 7344 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 7345 7346 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 7347 PPC_NONE, PPC2_TM), 7348 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 7349 PPC_NONE, PPC2_TM), 7350 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 7351 PPC_NONE, PPC2_TM), 7352 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 7353 PPC_NONE, PPC2_TM), 7354 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 7355 PPC_NONE, PPC2_TM), 7356 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 7357 PPC_NONE, PPC2_TM), 7358 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 7359 PPC_NONE, PPC2_TM), 7360 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 7361 PPC_NONE, PPC2_TM), 7362 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 7363 PPC_NONE, PPC2_TM), 7364 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 7365 PPC_NONE, PPC2_TM), 7366 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 7367 PPC_NONE, PPC2_TM), 7368 7369 #include "translate/fp-ops.inc.c" 7370 7371 #include "translate/vmx-ops.inc.c" 7372 7373 #include "translate/vsx-ops.inc.c" 7374 7375 #include "translate/dfp-ops.inc.c" 7376 7377 #include "translate/spe-ops.inc.c" 7378 }; 7379 7380 #include "helper_regs.h" 7381 #include "translate_init.inc.c" 7382 7383 /*****************************************************************************/ 7384 /* Misc PowerPC helpers */ 7385 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 7386 int flags) 7387 { 7388 #define RGPL 4 7389 #define RFPL 4 7390 7391 PowerPCCPU *cpu = POWERPC_CPU(cs); 7392 CPUPPCState *env = &cpu->env; 7393 int i; 7394 7395 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 7396 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 7397 env->nip, env->lr, env->ctr, cpu_read_xer(env), 7398 cs->cpu_index); 7399 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 7400 TARGET_FMT_lx " iidx %d didx %d\n", 7401 env->msr, env->spr[SPR_HID0], 7402 env->hflags, env->immu_idx, env->dmmu_idx); 7403 #if !defined(NO_TIMER_DUMP) 7404 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 7405 #if !defined(CONFIG_USER_ONLY) 7406 " DECR %08" PRIu32 7407 #endif 7408 "\n", 7409 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 7410 #if !defined(CONFIG_USER_ONLY) 7411 , cpu_ppc_load_decr(env) 7412 #endif 7413 ); 7414 #endif 7415 for (i = 0; i < 32; i++) { 7416 if ((i & (RGPL - 1)) == 0) 7417 cpu_fprintf(f, "GPR%02d", i); 7418 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 7419 if ((i & (RGPL - 1)) == (RGPL - 1)) 7420 cpu_fprintf(f, "\n"); 7421 } 7422 cpu_fprintf(f, "CR "); 7423 for (i = 0; i < 8; i++) 7424 cpu_fprintf(f, "%01x", env->crf[i]); 7425 cpu_fprintf(f, " ["); 7426 for (i = 0; i < 8; i++) { 7427 char a = '-'; 7428 if (env->crf[i] & 0x08) 7429 a = 'L'; 7430 else if (env->crf[i] & 0x04) 7431 a = 'G'; 7432 else if (env->crf[i] & 0x02) 7433 a = 'E'; 7434 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 7435 } 7436 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 7437 env->reserve_addr); 7438 7439 if (flags & CPU_DUMP_FPU) { 7440 for (i = 0; i < 32; i++) { 7441 if ((i & (RFPL - 1)) == 0) { 7442 cpu_fprintf(f, "FPR%02d", i); 7443 } 7444 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i])); 7445 if ((i & (RFPL - 1)) == (RFPL - 1)) { 7446 cpu_fprintf(f, "\n"); 7447 } 7448 } 7449 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 7450 } 7451 7452 #if !defined(CONFIG_USER_ONLY) 7453 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 7454 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 7455 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 7456 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 7457 7458 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 7459 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 7460 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 7461 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 7462 7463 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 7464 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 7465 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 7466 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 7467 7468 #if defined(TARGET_PPC64) 7469 if (env->excp_model == POWERPC_EXCP_POWER7 || 7470 env->excp_model == POWERPC_EXCP_POWER8) { 7471 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 7472 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 7473 } 7474 #endif 7475 if (env->excp_model == POWERPC_EXCP_BOOKE) { 7476 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 7477 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 7478 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 7479 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 7480 7481 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 7482 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 7483 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 7484 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 7485 7486 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 7487 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 7488 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 7489 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 7490 7491 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 7492 " EPR " TARGET_FMT_lx "\n", 7493 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 7494 env->spr[SPR_BOOKE_EPR]); 7495 7496 /* FSL-specific */ 7497 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 7498 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 7499 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 7500 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 7501 7502 /* 7503 * IVORs are left out as they are large and do not change often -- 7504 * they can be read with "p $ivor0", "p $ivor1", etc. 7505 */ 7506 } 7507 7508 #if defined(TARGET_PPC64) 7509 if (env->flags & POWERPC_FLAG_CFAR) { 7510 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 7511 } 7512 #endif 7513 7514 if (env->spr_cb[SPR_LPCR].name) 7515 cpu_fprintf(f, " LPCR " TARGET_FMT_lx "\n", env->spr[SPR_LPCR]); 7516 7517 switch (env->mmu_model) { 7518 case POWERPC_MMU_32B: 7519 case POWERPC_MMU_601: 7520 case POWERPC_MMU_SOFT_6xx: 7521 case POWERPC_MMU_SOFT_74xx: 7522 #if defined(TARGET_PPC64) 7523 case POWERPC_MMU_64B: 7524 case POWERPC_MMU_2_03: 7525 case POWERPC_MMU_2_06: 7526 case POWERPC_MMU_2_07: 7527 case POWERPC_MMU_3_00: 7528 #endif 7529 if (env->spr_cb[SPR_SDR1].name) { /* SDR1 Exists */ 7530 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " ", env->spr[SPR_SDR1]); 7531 } 7532 if (env->spr_cb[SPR_PTCR].name) { /* PTCR Exists */ 7533 cpu_fprintf(f, " PTCR " TARGET_FMT_lx " ", env->spr[SPR_PTCR]); 7534 } 7535 cpu_fprintf(f, " DAR " TARGET_FMT_lx " DSISR " TARGET_FMT_lx "\n", 7536 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 7537 break; 7538 case POWERPC_MMU_BOOKE206: 7539 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 7540 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 7541 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 7542 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 7543 7544 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 7545 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 7546 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 7547 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 7548 7549 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 7550 " TLB1CFG " TARGET_FMT_lx "\n", 7551 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 7552 env->spr[SPR_BOOKE_TLB1CFG]); 7553 break; 7554 default: 7555 break; 7556 } 7557 #endif 7558 7559 #undef RGPL 7560 #undef RFPL 7561 } 7562 7563 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 7564 fprintf_function cpu_fprintf, int flags) 7565 { 7566 #if defined(DO_PPC_STATISTICS) 7567 PowerPCCPU *cpu = POWERPC_CPU(cs); 7568 opc_handler_t **t1, **t2, **t3, *handler; 7569 int op1, op2, op3; 7570 7571 t1 = cpu->env.opcodes; 7572 for (op1 = 0; op1 < 64; op1++) { 7573 handler = t1[op1]; 7574 if (is_indirect_opcode(handler)) { 7575 t2 = ind_table(handler); 7576 for (op2 = 0; op2 < 32; op2++) { 7577 handler = t2[op2]; 7578 if (is_indirect_opcode(handler)) { 7579 t3 = ind_table(handler); 7580 for (op3 = 0; op3 < 32; op3++) { 7581 handler = t3[op3]; 7582 if (handler->count == 0) 7583 continue; 7584 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 7585 "%016" PRIx64 " %" PRId64 "\n", 7586 op1, op2, op3, op1, (op3 << 5) | op2, 7587 handler->oname, 7588 handler->count, handler->count); 7589 } 7590 } else { 7591 if (handler->count == 0) 7592 continue; 7593 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 7594 "%016" PRIx64 " %" PRId64 "\n", 7595 op1, op2, op1, op2, handler->oname, 7596 handler->count, handler->count); 7597 } 7598 } 7599 } else { 7600 if (handler->count == 0) 7601 continue; 7602 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 7603 " %" PRId64 "\n", 7604 op1, op1, handler->oname, 7605 handler->count, handler->count); 7606 } 7607 } 7608 #endif 7609 } 7610 7611 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 7612 { 7613 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7614 CPUPPCState *env = cs->env_ptr; 7615 int bound; 7616 7617 ctx->exception = POWERPC_EXCP_NONE; 7618 ctx->spr_cb = env->spr_cb; 7619 ctx->pr = msr_pr; 7620 ctx->mem_idx = env->dmmu_idx; 7621 ctx->dr = msr_dr; 7622 #if !defined(CONFIG_USER_ONLY) 7623 ctx->hv = msr_hv || !env->has_hv_mode; 7624 #endif 7625 ctx->insns_flags = env->insns_flags; 7626 ctx->insns_flags2 = env->insns_flags2; 7627 ctx->access_type = -1; 7628 ctx->need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7629 ctx->le_mode = !!(env->hflags & (1 << MSR_LE)); 7630 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 7631 ctx->flags = env->flags; 7632 #if defined(TARGET_PPC64) 7633 ctx->sf_mode = msr_is_64bit(env, env->msr); 7634 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7635 #endif 7636 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 7637 || env->mmu_model == POWERPC_MMU_601 7638 || (env->mmu_model & POWERPC_MMU_64B); 7639 7640 ctx->fpu_enabled = !!msr_fp; 7641 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7642 ctx->spe_enabled = !!msr_spe; 7643 else 7644 ctx->spe_enabled = false; 7645 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7646 ctx->altivec_enabled = !!msr_vr; 7647 else 7648 ctx->altivec_enabled = false; 7649 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7650 ctx->vsx_enabled = !!msr_vsx; 7651 } else { 7652 ctx->vsx_enabled = false; 7653 } 7654 #if defined(TARGET_PPC64) 7655 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7656 ctx->tm_enabled = !!msr_tm; 7657 } else { 7658 ctx->tm_enabled = false; 7659 } 7660 #endif 7661 ctx->gtse = !!(env->spr[SPR_LPCR] & LPCR_GTSE); 7662 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7663 ctx->singlestep_enabled = CPU_SINGLE_STEP; 7664 else 7665 ctx->singlestep_enabled = 0; 7666 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7667 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7668 if ((env->flags & POWERPC_FLAG_DE) && msr_de) { 7669 ctx->singlestep_enabled = 0; 7670 target_ulong dbcr0 = env->spr[SPR_BOOKE_DBCR0]; 7671 if (dbcr0 & DBCR0_ICMP) { 7672 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 7673 } 7674 if (dbcr0 & DBCR0_BRT) { 7675 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7676 } 7677 7678 } 7679 if (unlikely(ctx->base.singlestep_enabled)) { 7680 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7681 } 7682 #if defined (DO_SINGLE_STEP) && 0 7683 /* Single step trace mode */ 7684 msr_se = 1; 7685 #endif 7686 7687 bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4; 7688 ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 7689 } 7690 7691 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 7692 { 7693 } 7694 7695 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 7696 { 7697 tcg_gen_insn_start(dcbase->pc_next); 7698 } 7699 7700 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs, 7701 const CPUBreakpoint *bp) 7702 { 7703 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7704 7705 gen_debug_exception(ctx); 7706 dcbase->is_jmp = DISAS_NORETURN; 7707 /* The address covered by the breakpoint must be included in 7708 [tb->pc, tb->pc + tb->size) in order to for it to be 7709 properly cleared -- thus we increment the PC here so that 7710 the logic setting tb->size below does the right thing. */ 7711 ctx->base.pc_next += 4; 7712 return true; 7713 } 7714 7715 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 7716 { 7717 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7718 CPUPPCState *env = cs->env_ptr; 7719 opc_handler_t **table, *handler; 7720 7721 LOG_DISAS("----------------\n"); 7722 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7723 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 7724 7725 if (unlikely(need_byteswap(ctx))) { 7726 ctx->opcode = bswap32(cpu_ldl_code(env, ctx->base.pc_next)); 7727 } else { 7728 ctx->opcode = cpu_ldl_code(env, ctx->base.pc_next); 7729 } 7730 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7731 ctx->opcode, opc1(ctx->opcode), opc2(ctx->opcode), 7732 opc3(ctx->opcode), opc4(ctx->opcode), 7733 ctx->le_mode ? "little" : "big"); 7734 ctx->base.pc_next += 4; 7735 table = env->opcodes; 7736 handler = table[opc1(ctx->opcode)]; 7737 if (is_indirect_opcode(handler)) { 7738 table = ind_table(handler); 7739 handler = table[opc2(ctx->opcode)]; 7740 if (is_indirect_opcode(handler)) { 7741 table = ind_table(handler); 7742 handler = table[opc3(ctx->opcode)]; 7743 if (is_indirect_opcode(handler)) { 7744 table = ind_table(handler); 7745 handler = table[opc4(ctx->opcode)]; 7746 } 7747 } 7748 } 7749 /* Is opcode *REALLY* valid ? */ 7750 if (unlikely(handler->handler == &gen_invalid)) { 7751 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7752 "%02x - %02x - %02x - %02x (%08x) " 7753 TARGET_FMT_lx " %d\n", 7754 opc1(ctx->opcode), opc2(ctx->opcode), 7755 opc3(ctx->opcode), opc4(ctx->opcode), 7756 ctx->opcode, ctx->base.pc_next - 4, (int)msr_ir); 7757 } else { 7758 uint32_t inval; 7759 7760 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 7761 && Rc(ctx->opcode))) { 7762 inval = handler->inval2; 7763 } else { 7764 inval = handler->inval1; 7765 } 7766 7767 if (unlikely((ctx->opcode & inval) != 0)) { 7768 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7769 "%02x - %02x - %02x - %02x (%08x) " 7770 TARGET_FMT_lx "\n", ctx->opcode & inval, 7771 opc1(ctx->opcode), opc2(ctx->opcode), 7772 opc3(ctx->opcode), opc4(ctx->opcode), 7773 ctx->opcode, ctx->base.pc_next - 4); 7774 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7775 ctx->base.is_jmp = DISAS_NORETURN; 7776 return; 7777 } 7778 } 7779 (*(handler->handler))(ctx); 7780 #if defined(DO_PPC_STATISTICS) 7781 handler->count++; 7782 #endif 7783 /* Check trace mode exceptions */ 7784 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP && 7785 (ctx->base.pc_next <= 0x100 || ctx->base.pc_next > 0xF00) && 7786 ctx->exception != POWERPC_SYSCALL && 7787 ctx->exception != POWERPC_EXCP_TRAP && 7788 ctx->exception != POWERPC_EXCP_BRANCH)) { 7789 uint32_t excp = gen_prep_dbgex(ctx, POWERPC_EXCP_TRACE); 7790 if (excp != POWERPC_EXCP_NONE) 7791 gen_exception_nip(ctx, excp, ctx->base.pc_next); 7792 } 7793 7794 if (tcg_check_temp_count()) { 7795 qemu_log("Opcode %02x %02x %02x %02x (%08x) leaked " 7796 "temporaries\n", opc1(ctx->opcode), opc2(ctx->opcode), 7797 opc3(ctx->opcode), opc4(ctx->opcode), ctx->opcode); 7798 } 7799 7800 ctx->base.is_jmp = ctx->exception == POWERPC_EXCP_NONE ? 7801 DISAS_NEXT : DISAS_NORETURN; 7802 } 7803 7804 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 7805 { 7806 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7807 7808 if (ctx->exception == POWERPC_EXCP_NONE) { 7809 gen_goto_tb(ctx, 0, ctx->base.pc_next); 7810 } else if (ctx->exception != POWERPC_EXCP_BRANCH) { 7811 if (unlikely(ctx->base.singlestep_enabled)) { 7812 gen_debug_exception(ctx); 7813 } 7814 /* Generate the return instruction */ 7815 tcg_gen_exit_tb(NULL, 0); 7816 } 7817 } 7818 7819 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 7820 { 7821 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 7822 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 7823 } 7824 7825 static const TranslatorOps ppc_tr_ops = { 7826 .init_disas_context = ppc_tr_init_disas_context, 7827 .tb_start = ppc_tr_tb_start, 7828 .insn_start = ppc_tr_insn_start, 7829 .breakpoint_check = ppc_tr_breakpoint_check, 7830 .translate_insn = ppc_tr_translate_insn, 7831 .tb_stop = ppc_tr_tb_stop, 7832 .disas_log = ppc_tr_disas_log, 7833 }; 7834 7835 void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb) 7836 { 7837 DisasContext ctx; 7838 7839 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb); 7840 } 7841 7842 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7843 target_ulong *data) 7844 { 7845 env->nip = data[0]; 7846 } 7847