1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "qemu/host-utils.h" 28 #include "exec/cpu_ldst.h" 29 30 #include "exec/helper-proto.h" 31 #include "exec/helper-gen.h" 32 33 #include "trace-tcg.h" 34 #include "exec/translator.h" 35 #include "exec/log.h" 36 37 38 #define CPU_SINGLE_STEP 0x1 39 #define CPU_BRANCH_STEP 0x2 40 #define GDBSTUB_SINGLE_STEP 0x4 41 42 /* Include definitions for instructions classes and implementations flags */ 43 //#define PPC_DEBUG_DISAS 44 //#define DO_PPC_STATISTICS 45 46 #ifdef PPC_DEBUG_DISAS 47 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 48 #else 49 # define LOG_DISAS(...) do { } while (0) 50 #endif 51 /*****************************************************************************/ 52 /* Code translation helpers */ 53 54 /* global register indexes */ 55 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 56 + 10*4 + 22*5 /* SPE GPRh */ 57 + 10*4 + 22*5 /* FPR */ 58 + 2*(10*6 + 22*7) /* AVRh, AVRl */ 59 + 10*5 + 22*6 /* VSR */ 60 + 8*5 /* CRF */]; 61 static TCGv cpu_gpr[32]; 62 static TCGv cpu_gprh[32]; 63 static TCGv_i64 cpu_fpr[32]; 64 static TCGv_i64 cpu_avrh[32], cpu_avrl[32]; 65 static TCGv_i64 cpu_vsr[32]; 66 static TCGv_i32 cpu_crf[8]; 67 static TCGv cpu_nip; 68 static TCGv cpu_msr; 69 static TCGv cpu_ctr; 70 static TCGv cpu_lr; 71 #if defined(TARGET_PPC64) 72 static TCGv cpu_cfar; 73 #endif 74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 75 static TCGv cpu_reserve; 76 static TCGv cpu_reserve_val; 77 static TCGv cpu_fpscr; 78 static TCGv_i32 cpu_access_type; 79 80 #include "exec/gen-icount.h" 81 82 void ppc_translate_init(void) 83 { 84 int i; 85 char* p; 86 size_t cpu_reg_names_size; 87 88 p = cpu_reg_names; 89 cpu_reg_names_size = sizeof(cpu_reg_names); 90 91 for (i = 0; i < 8; i++) { 92 snprintf(p, cpu_reg_names_size, "crf%d", i); 93 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 94 offsetof(CPUPPCState, crf[i]), p); 95 p += 5; 96 cpu_reg_names_size -= 5; 97 } 98 99 for (i = 0; i < 32; i++) { 100 snprintf(p, cpu_reg_names_size, "r%d", i); 101 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 102 offsetof(CPUPPCState, gpr[i]), p); 103 p += (i < 10) ? 3 : 4; 104 cpu_reg_names_size -= (i < 10) ? 3 : 4; 105 snprintf(p, cpu_reg_names_size, "r%dH", i); 106 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 107 offsetof(CPUPPCState, gprh[i]), p); 108 p += (i < 10) ? 4 : 5; 109 cpu_reg_names_size -= (i < 10) ? 4 : 5; 110 111 snprintf(p, cpu_reg_names_size, "fp%d", i); 112 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env, 113 offsetof(CPUPPCState, fpr[i]), p); 114 p += (i < 10) ? 4 : 5; 115 cpu_reg_names_size -= (i < 10) ? 4 : 5; 116 117 snprintf(p, cpu_reg_names_size, "avr%dH", i); 118 #ifdef HOST_WORDS_BIGENDIAN 119 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 120 offsetof(CPUPPCState, avr[i].u64[0]), p); 121 #else 122 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 123 offsetof(CPUPPCState, avr[i].u64[1]), p); 124 #endif 125 p += (i < 10) ? 6 : 7; 126 cpu_reg_names_size -= (i < 10) ? 6 : 7; 127 128 snprintf(p, cpu_reg_names_size, "avr%dL", i); 129 #ifdef HOST_WORDS_BIGENDIAN 130 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 131 offsetof(CPUPPCState, avr[i].u64[1]), p); 132 #else 133 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 134 offsetof(CPUPPCState, avr[i].u64[0]), p); 135 #endif 136 p += (i < 10) ? 6 : 7; 137 cpu_reg_names_size -= (i < 10) ? 6 : 7; 138 snprintf(p, cpu_reg_names_size, "vsr%d", i); 139 cpu_vsr[i] = tcg_global_mem_new_i64(cpu_env, 140 offsetof(CPUPPCState, vsr[i]), p); 141 p += (i < 10) ? 5 : 6; 142 cpu_reg_names_size -= (i < 10) ? 5 : 6; 143 } 144 145 cpu_nip = tcg_global_mem_new(cpu_env, 146 offsetof(CPUPPCState, nip), "nip"); 147 148 cpu_msr = tcg_global_mem_new(cpu_env, 149 offsetof(CPUPPCState, msr), "msr"); 150 151 cpu_ctr = tcg_global_mem_new(cpu_env, 152 offsetof(CPUPPCState, ctr), "ctr"); 153 154 cpu_lr = tcg_global_mem_new(cpu_env, 155 offsetof(CPUPPCState, lr), "lr"); 156 157 #if defined(TARGET_PPC64) 158 cpu_cfar = tcg_global_mem_new(cpu_env, 159 offsetof(CPUPPCState, cfar), "cfar"); 160 #endif 161 162 cpu_xer = tcg_global_mem_new(cpu_env, 163 offsetof(CPUPPCState, xer), "xer"); 164 cpu_so = tcg_global_mem_new(cpu_env, 165 offsetof(CPUPPCState, so), "SO"); 166 cpu_ov = tcg_global_mem_new(cpu_env, 167 offsetof(CPUPPCState, ov), "OV"); 168 cpu_ca = tcg_global_mem_new(cpu_env, 169 offsetof(CPUPPCState, ca), "CA"); 170 cpu_ov32 = tcg_global_mem_new(cpu_env, 171 offsetof(CPUPPCState, ov32), "OV32"); 172 cpu_ca32 = tcg_global_mem_new(cpu_env, 173 offsetof(CPUPPCState, ca32), "CA32"); 174 175 cpu_reserve = tcg_global_mem_new(cpu_env, 176 offsetof(CPUPPCState, reserve_addr), 177 "reserve_addr"); 178 cpu_reserve_val = tcg_global_mem_new(cpu_env, 179 offsetof(CPUPPCState, reserve_val), 180 "reserve_val"); 181 182 cpu_fpscr = tcg_global_mem_new(cpu_env, 183 offsetof(CPUPPCState, fpscr), "fpscr"); 184 185 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 186 offsetof(CPUPPCState, access_type), "access_type"); 187 } 188 189 /* internal defines */ 190 struct DisasContext { 191 DisasContextBase base; 192 uint32_t opcode; 193 uint32_t exception; 194 /* Routine used to access memory */ 195 bool pr, hv, dr, le_mode; 196 bool lazy_tlb_flush; 197 bool need_access_type; 198 int mem_idx; 199 int access_type; 200 /* Translation flags */ 201 TCGMemOp default_tcg_memop_mask; 202 #if defined(TARGET_PPC64) 203 bool sf_mode; 204 bool has_cfar; 205 #endif 206 bool fpu_enabled; 207 bool altivec_enabled; 208 bool vsx_enabled; 209 bool spe_enabled; 210 bool tm_enabled; 211 bool gtse; 212 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 213 int singlestep_enabled; 214 uint32_t flags; 215 uint64_t insns_flags; 216 uint64_t insns_flags2; 217 }; 218 219 /* Return true iff byteswap is needed in a scalar memop */ 220 static inline bool need_byteswap(const DisasContext *ctx) 221 { 222 #if defined(TARGET_WORDS_BIGENDIAN) 223 return ctx->le_mode; 224 #else 225 return !ctx->le_mode; 226 #endif 227 } 228 229 /* True when active word size < size of target_long. */ 230 #ifdef TARGET_PPC64 231 # define NARROW_MODE(C) (!(C)->sf_mode) 232 #else 233 # define NARROW_MODE(C) 0 234 #endif 235 236 struct opc_handler_t { 237 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 238 uint32_t inval1; 239 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 240 uint32_t inval2; 241 /* instruction type */ 242 uint64_t type; 243 /* extended instruction type */ 244 uint64_t type2; 245 /* handler */ 246 void (*handler)(DisasContext *ctx); 247 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 248 const char *oname; 249 #endif 250 #if defined(DO_PPC_STATISTICS) 251 uint64_t count; 252 #endif 253 }; 254 255 /* SPR load/store helpers */ 256 static inline void gen_load_spr(TCGv t, int reg) 257 { 258 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 259 } 260 261 static inline void gen_store_spr(int reg, TCGv t) 262 { 263 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 264 } 265 266 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 267 { 268 if (ctx->need_access_type && ctx->access_type != access_type) { 269 tcg_gen_movi_i32(cpu_access_type, access_type); 270 ctx->access_type = access_type; 271 } 272 } 273 274 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 275 { 276 if (NARROW_MODE(ctx)) { 277 nip = (uint32_t)nip; 278 } 279 tcg_gen_movi_tl(cpu_nip, nip); 280 } 281 282 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 283 { 284 TCGv_i32 t0, t1; 285 286 /* These are all synchronous exceptions, we set the PC back to 287 * the faulting instruction 288 */ 289 if (ctx->exception == POWERPC_EXCP_NONE) { 290 gen_update_nip(ctx, ctx->base.pc_next - 4); 291 } 292 t0 = tcg_const_i32(excp); 293 t1 = tcg_const_i32(error); 294 gen_helper_raise_exception_err(cpu_env, t0, t1); 295 tcg_temp_free_i32(t0); 296 tcg_temp_free_i32(t1); 297 ctx->exception = (excp); 298 } 299 300 static void gen_exception(DisasContext *ctx, uint32_t excp) 301 { 302 TCGv_i32 t0; 303 304 /* These are all synchronous exceptions, we set the PC back to 305 * the faulting instruction 306 */ 307 if (ctx->exception == POWERPC_EXCP_NONE) { 308 gen_update_nip(ctx, ctx->base.pc_next - 4); 309 } 310 t0 = tcg_const_i32(excp); 311 gen_helper_raise_exception(cpu_env, t0); 312 tcg_temp_free_i32(t0); 313 ctx->exception = (excp); 314 } 315 316 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 317 target_ulong nip) 318 { 319 TCGv_i32 t0; 320 321 gen_update_nip(ctx, nip); 322 t0 = tcg_const_i32(excp); 323 gen_helper_raise_exception(cpu_env, t0); 324 tcg_temp_free_i32(t0); 325 ctx->exception = (excp); 326 } 327 328 /* Translates the EXCP_TRACE/BRANCH exceptions used on most PowerPCs to 329 * EXCP_DEBUG, if we are running on cores using the debug enable bit (e.g. 330 * BookE). 331 */ 332 static uint32_t gen_prep_dbgex(DisasContext *ctx, uint32_t excp) 333 { 334 if ((ctx->singlestep_enabled & CPU_SINGLE_STEP) 335 && (excp == POWERPC_EXCP_BRANCH)) { 336 /* Trace excpt. has priority */ 337 excp = POWERPC_EXCP_TRACE; 338 } 339 if (ctx->flags & POWERPC_FLAG_DE) { 340 target_ulong dbsr = 0; 341 switch (excp) { 342 case POWERPC_EXCP_TRACE: 343 dbsr = DBCR0_ICMP; 344 break; 345 case POWERPC_EXCP_BRANCH: 346 dbsr = DBCR0_BRT; 347 break; 348 } 349 TCGv t0 = tcg_temp_new(); 350 gen_load_spr(t0, SPR_BOOKE_DBSR); 351 tcg_gen_ori_tl(t0, t0, dbsr); 352 gen_store_spr(SPR_BOOKE_DBSR, t0); 353 tcg_temp_free(t0); 354 return POWERPC_EXCP_DEBUG; 355 } else { 356 return excp; 357 } 358 } 359 360 static void gen_debug_exception(DisasContext *ctx) 361 { 362 TCGv_i32 t0; 363 364 /* These are all synchronous exceptions, we set the PC back to 365 * the faulting instruction 366 */ 367 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 368 (ctx->exception != POWERPC_EXCP_SYNC)) { 369 gen_update_nip(ctx, ctx->base.pc_next); 370 } 371 t0 = tcg_const_i32(EXCP_DEBUG); 372 gen_helper_raise_exception(cpu_env, t0); 373 tcg_temp_free_i32(t0); 374 } 375 376 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 377 { 378 /* Will be converted to program check if needed */ 379 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 380 } 381 382 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 383 { 384 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 385 } 386 387 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 388 { 389 /* Will be converted to program check if needed */ 390 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 391 } 392 393 /* Stop translation */ 394 static inline void gen_stop_exception(DisasContext *ctx) 395 { 396 gen_update_nip(ctx, ctx->base.pc_next); 397 ctx->exception = POWERPC_EXCP_STOP; 398 } 399 400 #ifndef CONFIG_USER_ONLY 401 /* No need to update nip here, as execution flow will change */ 402 static inline void gen_sync_exception(DisasContext *ctx) 403 { 404 ctx->exception = POWERPC_EXCP_SYNC; 405 } 406 #endif 407 408 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 409 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 410 411 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 412 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 413 414 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 415 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 416 417 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 418 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 419 420 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 421 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 422 423 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 424 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 425 426 typedef struct opcode_t { 427 unsigned char opc1, opc2, opc3, opc4; 428 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 429 unsigned char pad[4]; 430 #endif 431 opc_handler_t handler; 432 const char *oname; 433 } opcode_t; 434 435 /* Helpers for priv. check */ 436 #define GEN_PRIV \ 437 do { \ 438 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 439 } while (0) 440 441 #if defined(CONFIG_USER_ONLY) 442 #define CHK_HV GEN_PRIV 443 #define CHK_SV GEN_PRIV 444 #define CHK_HVRM GEN_PRIV 445 #else 446 #define CHK_HV \ 447 do { \ 448 if (unlikely(ctx->pr || !ctx->hv)) { \ 449 GEN_PRIV; \ 450 } \ 451 } while (0) 452 #define CHK_SV \ 453 do { \ 454 if (unlikely(ctx->pr)) { \ 455 GEN_PRIV; \ 456 } \ 457 } while (0) 458 #define CHK_HVRM \ 459 do { \ 460 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 461 GEN_PRIV; \ 462 } \ 463 } while (0) 464 #endif 465 466 #define CHK_NONE 467 468 /*****************************************************************************/ 469 /* PowerPC instructions table */ 470 471 #if defined(DO_PPC_STATISTICS) 472 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 473 { \ 474 .opc1 = op1, \ 475 .opc2 = op2, \ 476 .opc3 = op3, \ 477 .opc4 = 0xff, \ 478 .handler = { \ 479 .inval1 = invl, \ 480 .type = _typ, \ 481 .type2 = _typ2, \ 482 .handler = &gen_##name, \ 483 .oname = stringify(name), \ 484 }, \ 485 .oname = stringify(name), \ 486 } 487 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 488 { \ 489 .opc1 = op1, \ 490 .opc2 = op2, \ 491 .opc3 = op3, \ 492 .opc4 = 0xff, \ 493 .handler = { \ 494 .inval1 = invl1, \ 495 .inval2 = invl2, \ 496 .type = _typ, \ 497 .type2 = _typ2, \ 498 .handler = &gen_##name, \ 499 .oname = stringify(name), \ 500 }, \ 501 .oname = stringify(name), \ 502 } 503 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 504 { \ 505 .opc1 = op1, \ 506 .opc2 = op2, \ 507 .opc3 = op3, \ 508 .opc4 = 0xff, \ 509 .handler = { \ 510 .inval1 = invl, \ 511 .type = _typ, \ 512 .type2 = _typ2, \ 513 .handler = &gen_##name, \ 514 .oname = onam, \ 515 }, \ 516 .oname = onam, \ 517 } 518 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 519 { \ 520 .opc1 = op1, \ 521 .opc2 = op2, \ 522 .opc3 = op3, \ 523 .opc4 = op4, \ 524 .handler = { \ 525 .inval1 = invl, \ 526 .type = _typ, \ 527 .type2 = _typ2, \ 528 .handler = &gen_##name, \ 529 .oname = stringify(name), \ 530 }, \ 531 .oname = stringify(name), \ 532 } 533 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 534 { \ 535 .opc1 = op1, \ 536 .opc2 = op2, \ 537 .opc3 = op3, \ 538 .opc4 = op4, \ 539 .handler = { \ 540 .inval1 = invl, \ 541 .type = _typ, \ 542 .type2 = _typ2, \ 543 .handler = &gen_##name, \ 544 .oname = onam, \ 545 }, \ 546 .oname = onam, \ 547 } 548 #else 549 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 550 { \ 551 .opc1 = op1, \ 552 .opc2 = op2, \ 553 .opc3 = op3, \ 554 .opc4 = 0xff, \ 555 .handler = { \ 556 .inval1 = invl, \ 557 .type = _typ, \ 558 .type2 = _typ2, \ 559 .handler = &gen_##name, \ 560 }, \ 561 .oname = stringify(name), \ 562 } 563 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 564 { \ 565 .opc1 = op1, \ 566 .opc2 = op2, \ 567 .opc3 = op3, \ 568 .opc4 = 0xff, \ 569 .handler = { \ 570 .inval1 = invl1, \ 571 .inval2 = invl2, \ 572 .type = _typ, \ 573 .type2 = _typ2, \ 574 .handler = &gen_##name, \ 575 }, \ 576 .oname = stringify(name), \ 577 } 578 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 579 { \ 580 .opc1 = op1, \ 581 .opc2 = op2, \ 582 .opc3 = op3, \ 583 .opc4 = 0xff, \ 584 .handler = { \ 585 .inval1 = invl, \ 586 .type = _typ, \ 587 .type2 = _typ2, \ 588 .handler = &gen_##name, \ 589 }, \ 590 .oname = onam, \ 591 } 592 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 593 { \ 594 .opc1 = op1, \ 595 .opc2 = op2, \ 596 .opc3 = op3, \ 597 .opc4 = op4, \ 598 .handler = { \ 599 .inval1 = invl, \ 600 .type = _typ, \ 601 .type2 = _typ2, \ 602 .handler = &gen_##name, \ 603 }, \ 604 .oname = stringify(name), \ 605 } 606 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 607 { \ 608 .opc1 = op1, \ 609 .opc2 = op2, \ 610 .opc3 = op3, \ 611 .opc4 = op4, \ 612 .handler = { \ 613 .inval1 = invl, \ 614 .type = _typ, \ 615 .type2 = _typ2, \ 616 .handler = &gen_##name, \ 617 }, \ 618 .oname = onam, \ 619 } 620 #endif 621 622 /* Invalid instruction */ 623 static void gen_invalid(DisasContext *ctx) 624 { 625 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 626 } 627 628 static opc_handler_t invalid_handler = { 629 .inval1 = 0xFFFFFFFF, 630 .inval2 = 0xFFFFFFFF, 631 .type = PPC_NONE, 632 .type2 = PPC_NONE, 633 .handler = gen_invalid, 634 }; 635 636 /*** Integer comparison ***/ 637 638 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 639 { 640 TCGv t0 = tcg_temp_new(); 641 TCGv t1 = tcg_temp_new(); 642 TCGv_i32 t = tcg_temp_new_i32(); 643 644 tcg_gen_movi_tl(t0, CRF_EQ); 645 tcg_gen_movi_tl(t1, CRF_LT); 646 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), t0, arg0, arg1, t1, t0); 647 tcg_gen_movi_tl(t1, CRF_GT); 648 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), t0, arg0, arg1, t1, t0); 649 650 tcg_gen_trunc_tl_i32(t, t0); 651 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 652 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 653 654 tcg_temp_free(t0); 655 tcg_temp_free(t1); 656 tcg_temp_free_i32(t); 657 } 658 659 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 660 { 661 TCGv t0 = tcg_const_tl(arg1); 662 gen_op_cmp(arg0, t0, s, crf); 663 tcg_temp_free(t0); 664 } 665 666 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 667 { 668 TCGv t0, t1; 669 t0 = tcg_temp_new(); 670 t1 = tcg_temp_new(); 671 if (s) { 672 tcg_gen_ext32s_tl(t0, arg0); 673 tcg_gen_ext32s_tl(t1, arg1); 674 } else { 675 tcg_gen_ext32u_tl(t0, arg0); 676 tcg_gen_ext32u_tl(t1, arg1); 677 } 678 gen_op_cmp(t0, t1, s, crf); 679 tcg_temp_free(t1); 680 tcg_temp_free(t0); 681 } 682 683 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 684 { 685 TCGv t0 = tcg_const_tl(arg1); 686 gen_op_cmp32(arg0, t0, s, crf); 687 tcg_temp_free(t0); 688 } 689 690 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 691 { 692 if (NARROW_MODE(ctx)) { 693 gen_op_cmpi32(reg, 0, 1, 0); 694 } else { 695 gen_op_cmpi(reg, 0, 1, 0); 696 } 697 } 698 699 /* cmp */ 700 static void gen_cmp(DisasContext *ctx) 701 { 702 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 703 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 704 1, crfD(ctx->opcode)); 705 } else { 706 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 707 1, crfD(ctx->opcode)); 708 } 709 } 710 711 /* cmpi */ 712 static void gen_cmpi(DisasContext *ctx) 713 { 714 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 715 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 716 1, crfD(ctx->opcode)); 717 } else { 718 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 719 1, crfD(ctx->opcode)); 720 } 721 } 722 723 /* cmpl */ 724 static void gen_cmpl(DisasContext *ctx) 725 { 726 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 727 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 728 0, crfD(ctx->opcode)); 729 } else { 730 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 731 0, crfD(ctx->opcode)); 732 } 733 } 734 735 /* cmpli */ 736 static void gen_cmpli(DisasContext *ctx) 737 { 738 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 739 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 740 0, crfD(ctx->opcode)); 741 } else { 742 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 743 0, crfD(ctx->opcode)); 744 } 745 } 746 747 /* cmprb - range comparison: isupper, isaplha, islower*/ 748 static void gen_cmprb(DisasContext *ctx) 749 { 750 TCGv_i32 src1 = tcg_temp_new_i32(); 751 TCGv_i32 src2 = tcg_temp_new_i32(); 752 TCGv_i32 src2lo = tcg_temp_new_i32(); 753 TCGv_i32 src2hi = tcg_temp_new_i32(); 754 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 755 756 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 757 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 758 759 tcg_gen_andi_i32(src1, src1, 0xFF); 760 tcg_gen_ext8u_i32(src2lo, src2); 761 tcg_gen_shri_i32(src2, src2, 8); 762 tcg_gen_ext8u_i32(src2hi, src2); 763 764 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 765 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 766 tcg_gen_and_i32(crf, src2lo, src2hi); 767 768 if (ctx->opcode & 0x00200000) { 769 tcg_gen_shri_i32(src2, src2, 8); 770 tcg_gen_ext8u_i32(src2lo, src2); 771 tcg_gen_shri_i32(src2, src2, 8); 772 tcg_gen_ext8u_i32(src2hi, src2); 773 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 774 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 775 tcg_gen_and_i32(src2lo, src2lo, src2hi); 776 tcg_gen_or_i32(crf, crf, src2lo); 777 } 778 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 779 tcg_temp_free_i32(src1); 780 tcg_temp_free_i32(src2); 781 tcg_temp_free_i32(src2lo); 782 tcg_temp_free_i32(src2hi); 783 } 784 785 #if defined(TARGET_PPC64) 786 /* cmpeqb */ 787 static void gen_cmpeqb(DisasContext *ctx) 788 { 789 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 790 cpu_gpr[rB(ctx->opcode)]); 791 } 792 #endif 793 794 /* isel (PowerPC 2.03 specification) */ 795 static void gen_isel(DisasContext *ctx) 796 { 797 uint32_t bi = rC(ctx->opcode); 798 uint32_t mask = 0x08 >> (bi & 0x03); 799 TCGv t0 = tcg_temp_new(); 800 TCGv zr; 801 802 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 803 tcg_gen_andi_tl(t0, t0, mask); 804 805 zr = tcg_const_tl(0); 806 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 807 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 808 cpu_gpr[rB(ctx->opcode)]); 809 tcg_temp_free(zr); 810 tcg_temp_free(t0); 811 } 812 813 /* cmpb: PowerPC 2.05 specification */ 814 static void gen_cmpb(DisasContext *ctx) 815 { 816 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 817 cpu_gpr[rB(ctx->opcode)]); 818 } 819 820 /*** Integer arithmetic ***/ 821 822 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 823 TCGv arg1, TCGv arg2, int sub) 824 { 825 TCGv t0 = tcg_temp_new(); 826 827 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 828 tcg_gen_xor_tl(t0, arg1, arg2); 829 if (sub) { 830 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 831 } else { 832 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 833 } 834 tcg_temp_free(t0); 835 if (NARROW_MODE(ctx)) { 836 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 837 if (is_isa300(ctx)) { 838 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 839 } 840 } else { 841 if (is_isa300(ctx)) { 842 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 843 } 844 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 845 } 846 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 847 } 848 849 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 850 TCGv res, TCGv arg0, TCGv arg1, 851 int sub) 852 { 853 TCGv t0; 854 855 if (!is_isa300(ctx)) { 856 return; 857 } 858 859 t0 = tcg_temp_new(); 860 if (sub) { 861 tcg_gen_eqv_tl(t0, arg0, arg1); 862 } else { 863 tcg_gen_xor_tl(t0, arg0, arg1); 864 } 865 tcg_gen_xor_tl(t0, t0, res); 866 tcg_gen_extract_tl(cpu_ca32, t0, 32, 1); 867 tcg_temp_free(t0); 868 } 869 870 /* Common add function */ 871 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 872 TCGv arg2, bool add_ca, bool compute_ca, 873 bool compute_ov, bool compute_rc0) 874 { 875 TCGv t0 = ret; 876 877 if (compute_ca || compute_ov) { 878 t0 = tcg_temp_new(); 879 } 880 881 if (compute_ca) { 882 if (NARROW_MODE(ctx)) { 883 /* Caution: a non-obvious corner case of the spec is that we 884 must produce the *entire* 64-bit addition, but produce the 885 carry into bit 32. */ 886 TCGv t1 = tcg_temp_new(); 887 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 888 tcg_gen_add_tl(t0, arg1, arg2); 889 if (add_ca) { 890 tcg_gen_add_tl(t0, t0, cpu_ca); 891 } 892 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */ 893 tcg_temp_free(t1); 894 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 895 if (is_isa300(ctx)) { 896 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 897 } 898 } else { 899 TCGv zero = tcg_const_tl(0); 900 if (add_ca) { 901 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero); 902 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero); 903 } else { 904 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero); 905 } 906 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, 0); 907 tcg_temp_free(zero); 908 } 909 } else { 910 tcg_gen_add_tl(t0, arg1, arg2); 911 if (add_ca) { 912 tcg_gen_add_tl(t0, t0, cpu_ca); 913 } 914 } 915 916 if (compute_ov) { 917 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 918 } 919 if (unlikely(compute_rc0)) { 920 gen_set_Rc0(ctx, t0); 921 } 922 923 if (t0 != ret) { 924 tcg_gen_mov_tl(ret, t0); 925 tcg_temp_free(t0); 926 } 927 } 928 /* Add functions with two operands */ 929 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 930 static void glue(gen_, name)(DisasContext *ctx) \ 931 { \ 932 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 933 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 934 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 935 } 936 /* Add functions with one operand and one immediate */ 937 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 938 add_ca, compute_ca, compute_ov) \ 939 static void glue(gen_, name)(DisasContext *ctx) \ 940 { \ 941 TCGv t0 = tcg_const_tl(const_val); \ 942 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 943 cpu_gpr[rA(ctx->opcode)], t0, \ 944 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 945 tcg_temp_free(t0); \ 946 } 947 948 /* add add. addo addo. */ 949 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 950 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 951 /* addc addc. addco addco. */ 952 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 953 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 954 /* adde adde. addeo addeo. */ 955 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 956 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 957 /* addme addme. addmeo addmeo. */ 958 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 959 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 960 /* addze addze. addzeo addzeo.*/ 961 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 962 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 963 /* addi */ 964 static void gen_addi(DisasContext *ctx) 965 { 966 target_long simm = SIMM(ctx->opcode); 967 968 if (rA(ctx->opcode) == 0) { 969 /* li case */ 970 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 971 } else { 972 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 973 cpu_gpr[rA(ctx->opcode)], simm); 974 } 975 } 976 /* addic addic.*/ 977 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 978 { 979 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 980 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 981 c, 0, 1, 0, compute_rc0); 982 tcg_temp_free(c); 983 } 984 985 static void gen_addic(DisasContext *ctx) 986 { 987 gen_op_addic(ctx, 0); 988 } 989 990 static void gen_addic_(DisasContext *ctx) 991 { 992 gen_op_addic(ctx, 1); 993 } 994 995 /* addis */ 996 static void gen_addis(DisasContext *ctx) 997 { 998 target_long simm = SIMM(ctx->opcode); 999 1000 if (rA(ctx->opcode) == 0) { 1001 /* lis case */ 1002 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 1003 } else { 1004 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 1005 cpu_gpr[rA(ctx->opcode)], simm << 16); 1006 } 1007 } 1008 1009 /* addpcis */ 1010 static void gen_addpcis(DisasContext *ctx) 1011 { 1012 target_long d = DX(ctx->opcode); 1013 1014 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16)); 1015 } 1016 1017 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1018 TCGv arg2, int sign, int compute_ov) 1019 { 1020 TCGv_i32 t0 = tcg_temp_new_i32(); 1021 TCGv_i32 t1 = tcg_temp_new_i32(); 1022 TCGv_i32 t2 = tcg_temp_new_i32(); 1023 TCGv_i32 t3 = tcg_temp_new_i32(); 1024 1025 tcg_gen_trunc_tl_i32(t0, arg1); 1026 tcg_gen_trunc_tl_i32(t1, arg2); 1027 if (sign) { 1028 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1029 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1030 tcg_gen_and_i32(t2, t2, t3); 1031 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1032 tcg_gen_or_i32(t2, t2, t3); 1033 tcg_gen_movi_i32(t3, 0); 1034 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1035 tcg_gen_div_i32(t3, t0, t1); 1036 tcg_gen_extu_i32_tl(ret, t3); 1037 } else { 1038 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1039 tcg_gen_movi_i32(t3, 0); 1040 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1041 tcg_gen_divu_i32(t3, t0, t1); 1042 tcg_gen_extu_i32_tl(ret, t3); 1043 } 1044 if (compute_ov) { 1045 tcg_gen_extu_i32_tl(cpu_ov, t2); 1046 if (is_isa300(ctx)) { 1047 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1048 } 1049 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1050 } 1051 tcg_temp_free_i32(t0); 1052 tcg_temp_free_i32(t1); 1053 tcg_temp_free_i32(t2); 1054 tcg_temp_free_i32(t3); 1055 1056 if (unlikely(Rc(ctx->opcode) != 0)) 1057 gen_set_Rc0(ctx, ret); 1058 } 1059 /* Div functions */ 1060 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1061 static void glue(gen_, name)(DisasContext *ctx) \ 1062 { \ 1063 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1064 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1065 sign, compute_ov); \ 1066 } 1067 /* divwu divwu. divwuo divwuo. */ 1068 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1069 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1070 /* divw divw. divwo divwo. */ 1071 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1072 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1073 1074 /* div[wd]eu[o][.] */ 1075 #define GEN_DIVE(name, hlpr, compute_ov) \ 1076 static void gen_##name(DisasContext *ctx) \ 1077 { \ 1078 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1079 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1080 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1081 tcg_temp_free_i32(t0); \ 1082 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1083 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1084 } \ 1085 } 1086 1087 GEN_DIVE(divweu, divweu, 0); 1088 GEN_DIVE(divweuo, divweu, 1); 1089 GEN_DIVE(divwe, divwe, 0); 1090 GEN_DIVE(divweo, divwe, 1); 1091 1092 #if defined(TARGET_PPC64) 1093 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1094 TCGv arg2, int sign, int compute_ov) 1095 { 1096 TCGv_i64 t0 = tcg_temp_new_i64(); 1097 TCGv_i64 t1 = tcg_temp_new_i64(); 1098 TCGv_i64 t2 = tcg_temp_new_i64(); 1099 TCGv_i64 t3 = tcg_temp_new_i64(); 1100 1101 tcg_gen_mov_i64(t0, arg1); 1102 tcg_gen_mov_i64(t1, arg2); 1103 if (sign) { 1104 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1105 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1106 tcg_gen_and_i64(t2, t2, t3); 1107 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1108 tcg_gen_or_i64(t2, t2, t3); 1109 tcg_gen_movi_i64(t3, 0); 1110 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1111 tcg_gen_div_i64(ret, t0, t1); 1112 } else { 1113 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1114 tcg_gen_movi_i64(t3, 0); 1115 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1116 tcg_gen_divu_i64(ret, t0, t1); 1117 } 1118 if (compute_ov) { 1119 tcg_gen_mov_tl(cpu_ov, t2); 1120 if (is_isa300(ctx)) { 1121 tcg_gen_mov_tl(cpu_ov32, t2); 1122 } 1123 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1124 } 1125 tcg_temp_free_i64(t0); 1126 tcg_temp_free_i64(t1); 1127 tcg_temp_free_i64(t2); 1128 tcg_temp_free_i64(t3); 1129 1130 if (unlikely(Rc(ctx->opcode) != 0)) 1131 gen_set_Rc0(ctx, ret); 1132 } 1133 1134 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1135 static void glue(gen_, name)(DisasContext *ctx) \ 1136 { \ 1137 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1138 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1139 sign, compute_ov); \ 1140 } 1141 /* divdu divdu. divduo divduo. */ 1142 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1143 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1144 /* divd divd. divdo divdo. */ 1145 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1146 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1147 1148 GEN_DIVE(divdeu, divdeu, 0); 1149 GEN_DIVE(divdeuo, divdeu, 1); 1150 GEN_DIVE(divde, divde, 0); 1151 GEN_DIVE(divdeo, divde, 1); 1152 #endif 1153 1154 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1155 TCGv arg2, int sign) 1156 { 1157 TCGv_i32 t0 = tcg_temp_new_i32(); 1158 TCGv_i32 t1 = tcg_temp_new_i32(); 1159 1160 tcg_gen_trunc_tl_i32(t0, arg1); 1161 tcg_gen_trunc_tl_i32(t1, arg2); 1162 if (sign) { 1163 TCGv_i32 t2 = tcg_temp_new_i32(); 1164 TCGv_i32 t3 = tcg_temp_new_i32(); 1165 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1166 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1167 tcg_gen_and_i32(t2, t2, t3); 1168 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1169 tcg_gen_or_i32(t2, t2, t3); 1170 tcg_gen_movi_i32(t3, 0); 1171 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1172 tcg_gen_rem_i32(t3, t0, t1); 1173 tcg_gen_ext_i32_tl(ret, t3); 1174 tcg_temp_free_i32(t2); 1175 tcg_temp_free_i32(t3); 1176 } else { 1177 TCGv_i32 t2 = tcg_const_i32(1); 1178 TCGv_i32 t3 = tcg_const_i32(0); 1179 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1180 tcg_gen_remu_i32(t3, t0, t1); 1181 tcg_gen_extu_i32_tl(ret, t3); 1182 tcg_temp_free_i32(t2); 1183 tcg_temp_free_i32(t3); 1184 } 1185 tcg_temp_free_i32(t0); 1186 tcg_temp_free_i32(t1); 1187 } 1188 1189 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1190 static void glue(gen_, name)(DisasContext *ctx) \ 1191 { \ 1192 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1193 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1194 sign); \ 1195 } 1196 1197 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1198 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1199 1200 #if defined(TARGET_PPC64) 1201 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1202 TCGv arg2, int sign) 1203 { 1204 TCGv_i64 t0 = tcg_temp_new_i64(); 1205 TCGv_i64 t1 = tcg_temp_new_i64(); 1206 1207 tcg_gen_mov_i64(t0, arg1); 1208 tcg_gen_mov_i64(t1, arg2); 1209 if (sign) { 1210 TCGv_i64 t2 = tcg_temp_new_i64(); 1211 TCGv_i64 t3 = tcg_temp_new_i64(); 1212 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1213 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1214 tcg_gen_and_i64(t2, t2, t3); 1215 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1216 tcg_gen_or_i64(t2, t2, t3); 1217 tcg_gen_movi_i64(t3, 0); 1218 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1219 tcg_gen_rem_i64(ret, t0, t1); 1220 tcg_temp_free_i64(t2); 1221 tcg_temp_free_i64(t3); 1222 } else { 1223 TCGv_i64 t2 = tcg_const_i64(1); 1224 TCGv_i64 t3 = tcg_const_i64(0); 1225 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1226 tcg_gen_remu_i64(ret, t0, t1); 1227 tcg_temp_free_i64(t2); 1228 tcg_temp_free_i64(t3); 1229 } 1230 tcg_temp_free_i64(t0); 1231 tcg_temp_free_i64(t1); 1232 } 1233 1234 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1235 static void glue(gen_, name)(DisasContext *ctx) \ 1236 { \ 1237 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1238 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1239 sign); \ 1240 } 1241 1242 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1243 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1244 #endif 1245 1246 /* mulhw mulhw. */ 1247 static void gen_mulhw(DisasContext *ctx) 1248 { 1249 TCGv_i32 t0 = tcg_temp_new_i32(); 1250 TCGv_i32 t1 = tcg_temp_new_i32(); 1251 1252 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1253 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1254 tcg_gen_muls2_i32(t0, t1, t0, t1); 1255 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1256 tcg_temp_free_i32(t0); 1257 tcg_temp_free_i32(t1); 1258 if (unlikely(Rc(ctx->opcode) != 0)) 1259 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1260 } 1261 1262 /* mulhwu mulhwu. */ 1263 static void gen_mulhwu(DisasContext *ctx) 1264 { 1265 TCGv_i32 t0 = tcg_temp_new_i32(); 1266 TCGv_i32 t1 = tcg_temp_new_i32(); 1267 1268 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1269 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1270 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1271 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1272 tcg_temp_free_i32(t0); 1273 tcg_temp_free_i32(t1); 1274 if (unlikely(Rc(ctx->opcode) != 0)) 1275 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1276 } 1277 1278 /* mullw mullw. */ 1279 static void gen_mullw(DisasContext *ctx) 1280 { 1281 #if defined(TARGET_PPC64) 1282 TCGv_i64 t0, t1; 1283 t0 = tcg_temp_new_i64(); 1284 t1 = tcg_temp_new_i64(); 1285 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1286 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1287 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1288 tcg_temp_free(t0); 1289 tcg_temp_free(t1); 1290 #else 1291 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1292 cpu_gpr[rB(ctx->opcode)]); 1293 #endif 1294 if (unlikely(Rc(ctx->opcode) != 0)) 1295 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1296 } 1297 1298 /* mullwo mullwo. */ 1299 static void gen_mullwo(DisasContext *ctx) 1300 { 1301 TCGv_i32 t0 = tcg_temp_new_i32(); 1302 TCGv_i32 t1 = tcg_temp_new_i32(); 1303 1304 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1305 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1306 tcg_gen_muls2_i32(t0, t1, t0, t1); 1307 #if defined(TARGET_PPC64) 1308 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1309 #else 1310 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1311 #endif 1312 1313 tcg_gen_sari_i32(t0, t0, 31); 1314 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1315 tcg_gen_extu_i32_tl(cpu_ov, t0); 1316 if (is_isa300(ctx)) { 1317 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1318 } 1319 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1320 1321 tcg_temp_free_i32(t0); 1322 tcg_temp_free_i32(t1); 1323 if (unlikely(Rc(ctx->opcode) != 0)) 1324 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1325 } 1326 1327 /* mulli */ 1328 static void gen_mulli(DisasContext *ctx) 1329 { 1330 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1331 SIMM(ctx->opcode)); 1332 } 1333 1334 #if defined(TARGET_PPC64) 1335 /* mulhd mulhd. */ 1336 static void gen_mulhd(DisasContext *ctx) 1337 { 1338 TCGv lo = tcg_temp_new(); 1339 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1340 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1341 tcg_temp_free(lo); 1342 if (unlikely(Rc(ctx->opcode) != 0)) { 1343 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1344 } 1345 } 1346 1347 /* mulhdu mulhdu. */ 1348 static void gen_mulhdu(DisasContext *ctx) 1349 { 1350 TCGv lo = tcg_temp_new(); 1351 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1352 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1353 tcg_temp_free(lo); 1354 if (unlikely(Rc(ctx->opcode) != 0)) { 1355 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1356 } 1357 } 1358 1359 /* mulld mulld. */ 1360 static void gen_mulld(DisasContext *ctx) 1361 { 1362 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1363 cpu_gpr[rB(ctx->opcode)]); 1364 if (unlikely(Rc(ctx->opcode) != 0)) 1365 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1366 } 1367 1368 /* mulldo mulldo. */ 1369 static void gen_mulldo(DisasContext *ctx) 1370 { 1371 TCGv_i64 t0 = tcg_temp_new_i64(); 1372 TCGv_i64 t1 = tcg_temp_new_i64(); 1373 1374 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1375 cpu_gpr[rB(ctx->opcode)]); 1376 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1377 1378 tcg_gen_sari_i64(t0, t0, 63); 1379 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1380 if (is_isa300(ctx)) { 1381 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1382 } 1383 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1384 1385 tcg_temp_free_i64(t0); 1386 tcg_temp_free_i64(t1); 1387 1388 if (unlikely(Rc(ctx->opcode) != 0)) { 1389 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1390 } 1391 } 1392 #endif 1393 1394 /* Common subf function */ 1395 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1396 TCGv arg2, bool add_ca, bool compute_ca, 1397 bool compute_ov, bool compute_rc0) 1398 { 1399 TCGv t0 = ret; 1400 1401 if (compute_ca || compute_ov) { 1402 t0 = tcg_temp_new(); 1403 } 1404 1405 if (compute_ca) { 1406 /* dest = ~arg1 + arg2 [+ ca]. */ 1407 if (NARROW_MODE(ctx)) { 1408 /* Caution: a non-obvious corner case of the spec is that we 1409 must produce the *entire* 64-bit addition, but produce the 1410 carry into bit 32. */ 1411 TCGv inv1 = tcg_temp_new(); 1412 TCGv t1 = tcg_temp_new(); 1413 tcg_gen_not_tl(inv1, arg1); 1414 if (add_ca) { 1415 tcg_gen_add_tl(t0, arg2, cpu_ca); 1416 } else { 1417 tcg_gen_addi_tl(t0, arg2, 1); 1418 } 1419 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1420 tcg_gen_add_tl(t0, t0, inv1); 1421 tcg_temp_free(inv1); 1422 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1423 tcg_temp_free(t1); 1424 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 1425 if (is_isa300(ctx)) { 1426 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 1427 } 1428 } else if (add_ca) { 1429 TCGv zero, inv1 = tcg_temp_new(); 1430 tcg_gen_not_tl(inv1, arg1); 1431 zero = tcg_const_tl(0); 1432 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1433 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1434 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, 0); 1435 tcg_temp_free(zero); 1436 tcg_temp_free(inv1); 1437 } else { 1438 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1439 tcg_gen_sub_tl(t0, arg2, arg1); 1440 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, 1); 1441 } 1442 } else if (add_ca) { 1443 /* Since we're ignoring carry-out, we can simplify the 1444 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1445 tcg_gen_sub_tl(t0, arg2, arg1); 1446 tcg_gen_add_tl(t0, t0, cpu_ca); 1447 tcg_gen_subi_tl(t0, t0, 1); 1448 } else { 1449 tcg_gen_sub_tl(t0, arg2, arg1); 1450 } 1451 1452 if (compute_ov) { 1453 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1454 } 1455 if (unlikely(compute_rc0)) { 1456 gen_set_Rc0(ctx, t0); 1457 } 1458 1459 if (t0 != ret) { 1460 tcg_gen_mov_tl(ret, t0); 1461 tcg_temp_free(t0); 1462 } 1463 } 1464 /* Sub functions with Two operands functions */ 1465 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1466 static void glue(gen_, name)(DisasContext *ctx) \ 1467 { \ 1468 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1469 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1470 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1471 } 1472 /* Sub functions with one operand and one immediate */ 1473 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1474 add_ca, compute_ca, compute_ov) \ 1475 static void glue(gen_, name)(DisasContext *ctx) \ 1476 { \ 1477 TCGv t0 = tcg_const_tl(const_val); \ 1478 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1479 cpu_gpr[rA(ctx->opcode)], t0, \ 1480 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1481 tcg_temp_free(t0); \ 1482 } 1483 /* subf subf. subfo subfo. */ 1484 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1485 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1486 /* subfc subfc. subfco subfco. */ 1487 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1488 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1489 /* subfe subfe. subfeo subfo. */ 1490 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1491 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1492 /* subfme subfme. subfmeo subfmeo. */ 1493 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1494 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1495 /* subfze subfze. subfzeo subfzeo.*/ 1496 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1497 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1498 1499 /* subfic */ 1500 static void gen_subfic(DisasContext *ctx) 1501 { 1502 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1503 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1504 c, 0, 1, 0, 0); 1505 tcg_temp_free(c); 1506 } 1507 1508 /* neg neg. nego nego. */ 1509 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1510 { 1511 TCGv zero = tcg_const_tl(0); 1512 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1513 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1514 tcg_temp_free(zero); 1515 } 1516 1517 static void gen_neg(DisasContext *ctx) 1518 { 1519 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 1520 if (unlikely(Rc(ctx->opcode))) { 1521 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1522 } 1523 } 1524 1525 static void gen_nego(DisasContext *ctx) 1526 { 1527 gen_op_arith_neg(ctx, 1); 1528 } 1529 1530 /*** Integer logical ***/ 1531 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1532 static void glue(gen_, name)(DisasContext *ctx) \ 1533 { \ 1534 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1535 cpu_gpr[rB(ctx->opcode)]); \ 1536 if (unlikely(Rc(ctx->opcode) != 0)) \ 1537 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1538 } 1539 1540 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1541 static void glue(gen_, name)(DisasContext *ctx) \ 1542 { \ 1543 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1544 if (unlikely(Rc(ctx->opcode) != 0)) \ 1545 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1546 } 1547 1548 /* and & and. */ 1549 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1550 /* andc & andc. */ 1551 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1552 1553 /* andi. */ 1554 static void gen_andi_(DisasContext *ctx) 1555 { 1556 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1557 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1558 } 1559 1560 /* andis. */ 1561 static void gen_andis_(DisasContext *ctx) 1562 { 1563 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1564 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1565 } 1566 1567 /* cntlzw */ 1568 static void gen_cntlzw(DisasContext *ctx) 1569 { 1570 TCGv_i32 t = tcg_temp_new_i32(); 1571 1572 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1573 tcg_gen_clzi_i32(t, t, 32); 1574 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1575 tcg_temp_free_i32(t); 1576 1577 if (unlikely(Rc(ctx->opcode) != 0)) 1578 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1579 } 1580 1581 /* cnttzw */ 1582 static void gen_cnttzw(DisasContext *ctx) 1583 { 1584 TCGv_i32 t = tcg_temp_new_i32(); 1585 1586 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1587 tcg_gen_ctzi_i32(t, t, 32); 1588 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1589 tcg_temp_free_i32(t); 1590 1591 if (unlikely(Rc(ctx->opcode) != 0)) { 1592 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1593 } 1594 } 1595 1596 /* eqv & eqv. */ 1597 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1598 /* extsb & extsb. */ 1599 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1600 /* extsh & extsh. */ 1601 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1602 /* nand & nand. */ 1603 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1604 /* nor & nor. */ 1605 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1606 1607 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1608 static void gen_pause(DisasContext *ctx) 1609 { 1610 TCGv_i32 t0 = tcg_const_i32(0); 1611 tcg_gen_st_i32(t0, cpu_env, 1612 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1613 tcg_temp_free_i32(t0); 1614 1615 /* Stop translation, this gives other CPUs a chance to run */ 1616 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 1617 } 1618 #endif /* defined(TARGET_PPC64) */ 1619 1620 /* or & or. */ 1621 static void gen_or(DisasContext *ctx) 1622 { 1623 int rs, ra, rb; 1624 1625 rs = rS(ctx->opcode); 1626 ra = rA(ctx->opcode); 1627 rb = rB(ctx->opcode); 1628 /* Optimisation for mr. ri case */ 1629 if (rs != ra || rs != rb) { 1630 if (rs != rb) 1631 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1632 else 1633 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1634 if (unlikely(Rc(ctx->opcode) != 0)) 1635 gen_set_Rc0(ctx, cpu_gpr[ra]); 1636 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1637 gen_set_Rc0(ctx, cpu_gpr[rs]); 1638 #if defined(TARGET_PPC64) 1639 } else if (rs != 0) { /* 0 is nop */ 1640 int prio = 0; 1641 1642 switch (rs) { 1643 case 1: 1644 /* Set process priority to low */ 1645 prio = 2; 1646 break; 1647 case 6: 1648 /* Set process priority to medium-low */ 1649 prio = 3; 1650 break; 1651 case 2: 1652 /* Set process priority to normal */ 1653 prio = 4; 1654 break; 1655 #if !defined(CONFIG_USER_ONLY) 1656 case 31: 1657 if (!ctx->pr) { 1658 /* Set process priority to very low */ 1659 prio = 1; 1660 } 1661 break; 1662 case 5: 1663 if (!ctx->pr) { 1664 /* Set process priority to medium-hight */ 1665 prio = 5; 1666 } 1667 break; 1668 case 3: 1669 if (!ctx->pr) { 1670 /* Set process priority to high */ 1671 prio = 6; 1672 } 1673 break; 1674 case 7: 1675 if (ctx->hv && !ctx->pr) { 1676 /* Set process priority to very high */ 1677 prio = 7; 1678 } 1679 break; 1680 #endif 1681 default: 1682 break; 1683 } 1684 if (prio) { 1685 TCGv t0 = tcg_temp_new(); 1686 gen_load_spr(t0, SPR_PPR); 1687 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1688 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1689 gen_store_spr(SPR_PPR, t0); 1690 tcg_temp_free(t0); 1691 } 1692 #if !defined(CONFIG_USER_ONLY) 1693 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1694 * CPU and the kernel hangs. This applies to all encodings other 1695 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1696 * and all currently undefined. 1697 */ 1698 gen_pause(ctx); 1699 #endif 1700 #endif 1701 } 1702 } 1703 /* orc & orc. */ 1704 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1705 1706 /* xor & xor. */ 1707 static void gen_xor(DisasContext *ctx) 1708 { 1709 /* Optimisation for "set to zero" case */ 1710 if (rS(ctx->opcode) != rB(ctx->opcode)) 1711 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1712 else 1713 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1714 if (unlikely(Rc(ctx->opcode) != 0)) 1715 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1716 } 1717 1718 /* ori */ 1719 static void gen_ori(DisasContext *ctx) 1720 { 1721 target_ulong uimm = UIMM(ctx->opcode); 1722 1723 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1724 return; 1725 } 1726 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1727 } 1728 1729 /* oris */ 1730 static void gen_oris(DisasContext *ctx) 1731 { 1732 target_ulong uimm = UIMM(ctx->opcode); 1733 1734 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1735 /* NOP */ 1736 return; 1737 } 1738 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1739 } 1740 1741 /* xori */ 1742 static void gen_xori(DisasContext *ctx) 1743 { 1744 target_ulong uimm = UIMM(ctx->opcode); 1745 1746 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1747 /* NOP */ 1748 return; 1749 } 1750 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1751 } 1752 1753 /* xoris */ 1754 static void gen_xoris(DisasContext *ctx) 1755 { 1756 target_ulong uimm = UIMM(ctx->opcode); 1757 1758 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1759 /* NOP */ 1760 return; 1761 } 1762 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1763 } 1764 1765 /* popcntb : PowerPC 2.03 specification */ 1766 static void gen_popcntb(DisasContext *ctx) 1767 { 1768 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1769 } 1770 1771 static void gen_popcntw(DisasContext *ctx) 1772 { 1773 #if defined(TARGET_PPC64) 1774 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1775 #else 1776 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1777 #endif 1778 } 1779 1780 #if defined(TARGET_PPC64) 1781 /* popcntd: PowerPC 2.06 specification */ 1782 static void gen_popcntd(DisasContext *ctx) 1783 { 1784 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1785 } 1786 #endif 1787 1788 /* prtyw: PowerPC 2.05 specification */ 1789 static void gen_prtyw(DisasContext *ctx) 1790 { 1791 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1792 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1793 TCGv t0 = tcg_temp_new(); 1794 tcg_gen_shri_tl(t0, rs, 16); 1795 tcg_gen_xor_tl(ra, rs, t0); 1796 tcg_gen_shri_tl(t0, ra, 8); 1797 tcg_gen_xor_tl(ra, ra, t0); 1798 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1799 tcg_temp_free(t0); 1800 } 1801 1802 #if defined(TARGET_PPC64) 1803 /* prtyd: PowerPC 2.05 specification */ 1804 static void gen_prtyd(DisasContext *ctx) 1805 { 1806 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1807 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1808 TCGv t0 = tcg_temp_new(); 1809 tcg_gen_shri_tl(t0, rs, 32); 1810 tcg_gen_xor_tl(ra, rs, t0); 1811 tcg_gen_shri_tl(t0, ra, 16); 1812 tcg_gen_xor_tl(ra, ra, t0); 1813 tcg_gen_shri_tl(t0, ra, 8); 1814 tcg_gen_xor_tl(ra, ra, t0); 1815 tcg_gen_andi_tl(ra, ra, 1); 1816 tcg_temp_free(t0); 1817 } 1818 #endif 1819 1820 #if defined(TARGET_PPC64) 1821 /* bpermd */ 1822 static void gen_bpermd(DisasContext *ctx) 1823 { 1824 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1825 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1826 } 1827 #endif 1828 1829 #if defined(TARGET_PPC64) 1830 /* extsw & extsw. */ 1831 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1832 1833 /* cntlzd */ 1834 static void gen_cntlzd(DisasContext *ctx) 1835 { 1836 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1837 if (unlikely(Rc(ctx->opcode) != 0)) 1838 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1839 } 1840 1841 /* cnttzd */ 1842 static void gen_cnttzd(DisasContext *ctx) 1843 { 1844 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1845 if (unlikely(Rc(ctx->opcode) != 0)) { 1846 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1847 } 1848 } 1849 1850 /* darn */ 1851 static void gen_darn(DisasContext *ctx) 1852 { 1853 int l = L(ctx->opcode); 1854 1855 if (l == 0) { 1856 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1857 } else if (l <= 2) { 1858 /* Return 64-bit random for both CRN and RRN */ 1859 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1860 } else { 1861 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1862 } 1863 } 1864 #endif 1865 1866 /*** Integer rotate ***/ 1867 1868 /* rlwimi & rlwimi. */ 1869 static void gen_rlwimi(DisasContext *ctx) 1870 { 1871 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1872 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1873 uint32_t sh = SH(ctx->opcode); 1874 uint32_t mb = MB(ctx->opcode); 1875 uint32_t me = ME(ctx->opcode); 1876 1877 if (sh == (31-me) && mb <= me) { 1878 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1879 } else { 1880 target_ulong mask; 1881 TCGv t1; 1882 1883 #if defined(TARGET_PPC64) 1884 mb += 32; 1885 me += 32; 1886 #endif 1887 mask = MASK(mb, me); 1888 1889 t1 = tcg_temp_new(); 1890 if (mask <= 0xffffffffu) { 1891 TCGv_i32 t0 = tcg_temp_new_i32(); 1892 tcg_gen_trunc_tl_i32(t0, t_rs); 1893 tcg_gen_rotli_i32(t0, t0, sh); 1894 tcg_gen_extu_i32_tl(t1, t0); 1895 tcg_temp_free_i32(t0); 1896 } else { 1897 #if defined(TARGET_PPC64) 1898 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1899 tcg_gen_rotli_i64(t1, t1, sh); 1900 #else 1901 g_assert_not_reached(); 1902 #endif 1903 } 1904 1905 tcg_gen_andi_tl(t1, t1, mask); 1906 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1907 tcg_gen_or_tl(t_ra, t_ra, t1); 1908 tcg_temp_free(t1); 1909 } 1910 if (unlikely(Rc(ctx->opcode) != 0)) { 1911 gen_set_Rc0(ctx, t_ra); 1912 } 1913 } 1914 1915 /* rlwinm & rlwinm. */ 1916 static void gen_rlwinm(DisasContext *ctx) 1917 { 1918 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1919 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1920 int sh = SH(ctx->opcode); 1921 int mb = MB(ctx->opcode); 1922 int me = ME(ctx->opcode); 1923 int len = me - mb + 1; 1924 int rsh = (32 - sh) & 31; 1925 1926 if (sh != 0 && len > 0 && me == (31 - sh)) { 1927 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 1928 } else if (me == 31 && rsh + len <= 32) { 1929 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 1930 } else { 1931 target_ulong mask; 1932 #if defined(TARGET_PPC64) 1933 mb += 32; 1934 me += 32; 1935 #endif 1936 mask = MASK(mb, me); 1937 if (sh == 0) { 1938 tcg_gen_andi_tl(t_ra, t_rs, mask); 1939 } else if (mask <= 0xffffffffu) { 1940 TCGv_i32 t0 = tcg_temp_new_i32(); 1941 tcg_gen_trunc_tl_i32(t0, t_rs); 1942 tcg_gen_rotli_i32(t0, t0, sh); 1943 tcg_gen_andi_i32(t0, t0, mask); 1944 tcg_gen_extu_i32_tl(t_ra, t0); 1945 tcg_temp_free_i32(t0); 1946 } else { 1947 #if defined(TARGET_PPC64) 1948 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1949 tcg_gen_rotli_i64(t_ra, t_ra, sh); 1950 tcg_gen_andi_i64(t_ra, t_ra, mask); 1951 #else 1952 g_assert_not_reached(); 1953 #endif 1954 } 1955 } 1956 if (unlikely(Rc(ctx->opcode) != 0)) { 1957 gen_set_Rc0(ctx, t_ra); 1958 } 1959 } 1960 1961 /* rlwnm & rlwnm. */ 1962 static void gen_rlwnm(DisasContext *ctx) 1963 { 1964 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1965 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1966 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 1967 uint32_t mb = MB(ctx->opcode); 1968 uint32_t me = ME(ctx->opcode); 1969 target_ulong mask; 1970 1971 #if defined(TARGET_PPC64) 1972 mb += 32; 1973 me += 32; 1974 #endif 1975 mask = MASK(mb, me); 1976 1977 if (mask <= 0xffffffffu) { 1978 TCGv_i32 t0 = tcg_temp_new_i32(); 1979 TCGv_i32 t1 = tcg_temp_new_i32(); 1980 tcg_gen_trunc_tl_i32(t0, t_rb); 1981 tcg_gen_trunc_tl_i32(t1, t_rs); 1982 tcg_gen_andi_i32(t0, t0, 0x1f); 1983 tcg_gen_rotl_i32(t1, t1, t0); 1984 tcg_gen_extu_i32_tl(t_ra, t1); 1985 tcg_temp_free_i32(t0); 1986 tcg_temp_free_i32(t1); 1987 } else { 1988 #if defined(TARGET_PPC64) 1989 TCGv_i64 t0 = tcg_temp_new_i64(); 1990 tcg_gen_andi_i64(t0, t_rb, 0x1f); 1991 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 1992 tcg_gen_rotl_i64(t_ra, t_ra, t0); 1993 tcg_temp_free_i64(t0); 1994 #else 1995 g_assert_not_reached(); 1996 #endif 1997 } 1998 1999 tcg_gen_andi_tl(t_ra, t_ra, mask); 2000 2001 if (unlikely(Rc(ctx->opcode) != 0)) { 2002 gen_set_Rc0(ctx, t_ra); 2003 } 2004 } 2005 2006 #if defined(TARGET_PPC64) 2007 #define GEN_PPC64_R2(name, opc1, opc2) \ 2008 static void glue(gen_, name##0)(DisasContext *ctx) \ 2009 { \ 2010 gen_##name(ctx, 0); \ 2011 } \ 2012 \ 2013 static void glue(gen_, name##1)(DisasContext *ctx) \ 2014 { \ 2015 gen_##name(ctx, 1); \ 2016 } 2017 #define GEN_PPC64_R4(name, opc1, opc2) \ 2018 static void glue(gen_, name##0)(DisasContext *ctx) \ 2019 { \ 2020 gen_##name(ctx, 0, 0); \ 2021 } \ 2022 \ 2023 static void glue(gen_, name##1)(DisasContext *ctx) \ 2024 { \ 2025 gen_##name(ctx, 0, 1); \ 2026 } \ 2027 \ 2028 static void glue(gen_, name##2)(DisasContext *ctx) \ 2029 { \ 2030 gen_##name(ctx, 1, 0); \ 2031 } \ 2032 \ 2033 static void glue(gen_, name##3)(DisasContext *ctx) \ 2034 { \ 2035 gen_##name(ctx, 1, 1); \ 2036 } 2037 2038 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2039 { 2040 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2041 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2042 int len = me - mb + 1; 2043 int rsh = (64 - sh) & 63; 2044 2045 if (sh != 0 && len > 0 && me == (63 - sh)) { 2046 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2047 } else if (me == 63 && rsh + len <= 64) { 2048 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2049 } else { 2050 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2051 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2052 } 2053 if (unlikely(Rc(ctx->opcode) != 0)) { 2054 gen_set_Rc0(ctx, t_ra); 2055 } 2056 } 2057 2058 /* rldicl - rldicl. */ 2059 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2060 { 2061 uint32_t sh, mb; 2062 2063 sh = SH(ctx->opcode) | (shn << 5); 2064 mb = MB(ctx->opcode) | (mbn << 5); 2065 gen_rldinm(ctx, mb, 63, sh); 2066 } 2067 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2068 2069 /* rldicr - rldicr. */ 2070 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2071 { 2072 uint32_t sh, me; 2073 2074 sh = SH(ctx->opcode) | (shn << 5); 2075 me = MB(ctx->opcode) | (men << 5); 2076 gen_rldinm(ctx, 0, me, sh); 2077 } 2078 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2079 2080 /* rldic - rldic. */ 2081 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2082 { 2083 uint32_t sh, mb; 2084 2085 sh = SH(ctx->opcode) | (shn << 5); 2086 mb = MB(ctx->opcode) | (mbn << 5); 2087 gen_rldinm(ctx, mb, 63 - sh, sh); 2088 } 2089 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2090 2091 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2092 { 2093 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2094 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2095 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2096 TCGv t0; 2097 2098 t0 = tcg_temp_new(); 2099 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2100 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2101 tcg_temp_free(t0); 2102 2103 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2104 if (unlikely(Rc(ctx->opcode) != 0)) { 2105 gen_set_Rc0(ctx, t_ra); 2106 } 2107 } 2108 2109 /* rldcl - rldcl. */ 2110 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2111 { 2112 uint32_t mb; 2113 2114 mb = MB(ctx->opcode) | (mbn << 5); 2115 gen_rldnm(ctx, mb, 63); 2116 } 2117 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2118 2119 /* rldcr - rldcr. */ 2120 static inline void gen_rldcr(DisasContext *ctx, int men) 2121 { 2122 uint32_t me; 2123 2124 me = MB(ctx->opcode) | (men << 5); 2125 gen_rldnm(ctx, 0, me); 2126 } 2127 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2128 2129 /* rldimi - rldimi. */ 2130 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2131 { 2132 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2133 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2134 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2135 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2136 uint32_t me = 63 - sh; 2137 2138 if (mb <= me) { 2139 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2140 } else { 2141 target_ulong mask = MASK(mb, me); 2142 TCGv t1 = tcg_temp_new(); 2143 2144 tcg_gen_rotli_tl(t1, t_rs, sh); 2145 tcg_gen_andi_tl(t1, t1, mask); 2146 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2147 tcg_gen_or_tl(t_ra, t_ra, t1); 2148 tcg_temp_free(t1); 2149 } 2150 if (unlikely(Rc(ctx->opcode) != 0)) { 2151 gen_set_Rc0(ctx, t_ra); 2152 } 2153 } 2154 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2155 #endif 2156 2157 /*** Integer shift ***/ 2158 2159 /* slw & slw. */ 2160 static void gen_slw(DisasContext *ctx) 2161 { 2162 TCGv t0, t1; 2163 2164 t0 = tcg_temp_new(); 2165 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2166 #if defined(TARGET_PPC64) 2167 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2168 tcg_gen_sari_tl(t0, t0, 0x3f); 2169 #else 2170 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2171 tcg_gen_sari_tl(t0, t0, 0x1f); 2172 #endif 2173 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2174 t1 = tcg_temp_new(); 2175 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2176 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2177 tcg_temp_free(t1); 2178 tcg_temp_free(t0); 2179 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2180 if (unlikely(Rc(ctx->opcode) != 0)) 2181 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2182 } 2183 2184 /* sraw & sraw. */ 2185 static void gen_sraw(DisasContext *ctx) 2186 { 2187 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2188 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2189 if (unlikely(Rc(ctx->opcode) != 0)) 2190 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2191 } 2192 2193 /* srawi & srawi. */ 2194 static void gen_srawi(DisasContext *ctx) 2195 { 2196 int sh = SH(ctx->opcode); 2197 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2198 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2199 if (sh == 0) { 2200 tcg_gen_ext32s_tl(dst, src); 2201 tcg_gen_movi_tl(cpu_ca, 0); 2202 if (is_isa300(ctx)) { 2203 tcg_gen_movi_tl(cpu_ca32, 0); 2204 } 2205 } else { 2206 TCGv t0; 2207 tcg_gen_ext32s_tl(dst, src); 2208 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2209 t0 = tcg_temp_new(); 2210 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2211 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2212 tcg_temp_free(t0); 2213 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2214 if (is_isa300(ctx)) { 2215 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2216 } 2217 tcg_gen_sari_tl(dst, dst, sh); 2218 } 2219 if (unlikely(Rc(ctx->opcode) != 0)) { 2220 gen_set_Rc0(ctx, dst); 2221 } 2222 } 2223 2224 /* srw & srw. */ 2225 static void gen_srw(DisasContext *ctx) 2226 { 2227 TCGv t0, t1; 2228 2229 t0 = tcg_temp_new(); 2230 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2231 #if defined(TARGET_PPC64) 2232 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2233 tcg_gen_sari_tl(t0, t0, 0x3f); 2234 #else 2235 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2236 tcg_gen_sari_tl(t0, t0, 0x1f); 2237 #endif 2238 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2239 tcg_gen_ext32u_tl(t0, t0); 2240 t1 = tcg_temp_new(); 2241 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2242 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2243 tcg_temp_free(t1); 2244 tcg_temp_free(t0); 2245 if (unlikely(Rc(ctx->opcode) != 0)) 2246 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2247 } 2248 2249 #if defined(TARGET_PPC64) 2250 /* sld & sld. */ 2251 static void gen_sld(DisasContext *ctx) 2252 { 2253 TCGv t0, t1; 2254 2255 t0 = tcg_temp_new(); 2256 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2257 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2258 tcg_gen_sari_tl(t0, t0, 0x3f); 2259 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2260 t1 = tcg_temp_new(); 2261 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2262 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2263 tcg_temp_free(t1); 2264 tcg_temp_free(t0); 2265 if (unlikely(Rc(ctx->opcode) != 0)) 2266 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2267 } 2268 2269 /* srad & srad. */ 2270 static void gen_srad(DisasContext *ctx) 2271 { 2272 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2273 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2274 if (unlikely(Rc(ctx->opcode) != 0)) 2275 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2276 } 2277 /* sradi & sradi. */ 2278 static inline void gen_sradi(DisasContext *ctx, int n) 2279 { 2280 int sh = SH(ctx->opcode) + (n << 5); 2281 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2282 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2283 if (sh == 0) { 2284 tcg_gen_mov_tl(dst, src); 2285 tcg_gen_movi_tl(cpu_ca, 0); 2286 if (is_isa300(ctx)) { 2287 tcg_gen_movi_tl(cpu_ca32, 0); 2288 } 2289 } else { 2290 TCGv t0; 2291 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2292 t0 = tcg_temp_new(); 2293 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2294 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2295 tcg_temp_free(t0); 2296 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2297 if (is_isa300(ctx)) { 2298 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2299 } 2300 tcg_gen_sari_tl(dst, src, sh); 2301 } 2302 if (unlikely(Rc(ctx->opcode) != 0)) { 2303 gen_set_Rc0(ctx, dst); 2304 } 2305 } 2306 2307 static void gen_sradi0(DisasContext *ctx) 2308 { 2309 gen_sradi(ctx, 0); 2310 } 2311 2312 static void gen_sradi1(DisasContext *ctx) 2313 { 2314 gen_sradi(ctx, 1); 2315 } 2316 2317 /* extswsli & extswsli. */ 2318 static inline void gen_extswsli(DisasContext *ctx, int n) 2319 { 2320 int sh = SH(ctx->opcode) + (n << 5); 2321 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2322 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2323 2324 tcg_gen_ext32s_tl(dst, src); 2325 tcg_gen_shli_tl(dst, dst, sh); 2326 if (unlikely(Rc(ctx->opcode) != 0)) { 2327 gen_set_Rc0(ctx, dst); 2328 } 2329 } 2330 2331 static void gen_extswsli0(DisasContext *ctx) 2332 { 2333 gen_extswsli(ctx, 0); 2334 } 2335 2336 static void gen_extswsli1(DisasContext *ctx) 2337 { 2338 gen_extswsli(ctx, 1); 2339 } 2340 2341 /* srd & srd. */ 2342 static void gen_srd(DisasContext *ctx) 2343 { 2344 TCGv t0, t1; 2345 2346 t0 = tcg_temp_new(); 2347 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2348 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2349 tcg_gen_sari_tl(t0, t0, 0x3f); 2350 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2351 t1 = tcg_temp_new(); 2352 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2353 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2354 tcg_temp_free(t1); 2355 tcg_temp_free(t0); 2356 if (unlikely(Rc(ctx->opcode) != 0)) 2357 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2358 } 2359 #endif 2360 2361 /*** Addressing modes ***/ 2362 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2363 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2364 target_long maskl) 2365 { 2366 target_long simm = SIMM(ctx->opcode); 2367 2368 simm &= ~maskl; 2369 if (rA(ctx->opcode) == 0) { 2370 if (NARROW_MODE(ctx)) { 2371 simm = (uint32_t)simm; 2372 } 2373 tcg_gen_movi_tl(EA, simm); 2374 } else if (likely(simm != 0)) { 2375 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2376 if (NARROW_MODE(ctx)) { 2377 tcg_gen_ext32u_tl(EA, EA); 2378 } 2379 } else { 2380 if (NARROW_MODE(ctx)) { 2381 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2382 } else { 2383 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2384 } 2385 } 2386 } 2387 2388 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2389 { 2390 if (rA(ctx->opcode) == 0) { 2391 if (NARROW_MODE(ctx)) { 2392 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2393 } else { 2394 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2395 } 2396 } else { 2397 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2398 if (NARROW_MODE(ctx)) { 2399 tcg_gen_ext32u_tl(EA, EA); 2400 } 2401 } 2402 } 2403 2404 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2405 { 2406 if (rA(ctx->opcode) == 0) { 2407 tcg_gen_movi_tl(EA, 0); 2408 } else if (NARROW_MODE(ctx)) { 2409 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2410 } else { 2411 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2412 } 2413 } 2414 2415 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2416 target_long val) 2417 { 2418 tcg_gen_addi_tl(ret, arg1, val); 2419 if (NARROW_MODE(ctx)) { 2420 tcg_gen_ext32u_tl(ret, ret); 2421 } 2422 } 2423 2424 static inline void gen_align_no_le(DisasContext *ctx) 2425 { 2426 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2427 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2428 } 2429 2430 /*** Integer load ***/ 2431 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2432 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2433 2434 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2435 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2436 TCGv val, \ 2437 TCGv addr) \ 2438 { \ 2439 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2440 } 2441 2442 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2443 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2444 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2445 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2446 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2447 2448 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2449 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2450 2451 #define GEN_QEMU_LOAD_64(ldop, op) \ 2452 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2453 TCGv_i64 val, \ 2454 TCGv addr) \ 2455 { \ 2456 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2457 } 2458 2459 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2460 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2461 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2462 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2463 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2464 2465 #if defined(TARGET_PPC64) 2466 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2467 #endif 2468 2469 #define GEN_QEMU_STORE_TL(stop, op) \ 2470 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2471 TCGv val, \ 2472 TCGv addr) \ 2473 { \ 2474 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2475 } 2476 2477 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2478 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2479 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2480 2481 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2482 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2483 2484 #define GEN_QEMU_STORE_64(stop, op) \ 2485 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2486 TCGv_i64 val, \ 2487 TCGv addr) \ 2488 { \ 2489 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2490 } 2491 2492 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2493 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2494 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2495 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2496 2497 #if defined(TARGET_PPC64) 2498 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2499 #endif 2500 2501 #define GEN_LD(name, ldop, opc, type) \ 2502 static void glue(gen_, name)(DisasContext *ctx) \ 2503 { \ 2504 TCGv EA; \ 2505 gen_set_access_type(ctx, ACCESS_INT); \ 2506 EA = tcg_temp_new(); \ 2507 gen_addr_imm_index(ctx, EA, 0); \ 2508 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2509 tcg_temp_free(EA); \ 2510 } 2511 2512 #define GEN_LDU(name, ldop, opc, type) \ 2513 static void glue(gen_, name##u)(DisasContext *ctx) \ 2514 { \ 2515 TCGv EA; \ 2516 if (unlikely(rA(ctx->opcode) == 0 || \ 2517 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2518 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2519 return; \ 2520 } \ 2521 gen_set_access_type(ctx, ACCESS_INT); \ 2522 EA = tcg_temp_new(); \ 2523 if (type == PPC_64B) \ 2524 gen_addr_imm_index(ctx, EA, 0x03); \ 2525 else \ 2526 gen_addr_imm_index(ctx, EA, 0); \ 2527 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2528 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2529 tcg_temp_free(EA); \ 2530 } 2531 2532 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2533 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2534 { \ 2535 TCGv EA; \ 2536 if (unlikely(rA(ctx->opcode) == 0 || \ 2537 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2538 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2539 return; \ 2540 } \ 2541 gen_set_access_type(ctx, ACCESS_INT); \ 2542 EA = tcg_temp_new(); \ 2543 gen_addr_reg_index(ctx, EA); \ 2544 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2545 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2546 tcg_temp_free(EA); \ 2547 } 2548 2549 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2550 static void glue(gen_, name##x)(DisasContext *ctx) \ 2551 { \ 2552 TCGv EA; \ 2553 chk; \ 2554 gen_set_access_type(ctx, ACCESS_INT); \ 2555 EA = tcg_temp_new(); \ 2556 gen_addr_reg_index(ctx, EA); \ 2557 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2558 tcg_temp_free(EA); \ 2559 } 2560 2561 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2562 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2563 2564 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2565 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2566 2567 #define GEN_LDS(name, ldop, op, type) \ 2568 GEN_LD(name, ldop, op | 0x20, type); \ 2569 GEN_LDU(name, ldop, op | 0x21, type); \ 2570 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2571 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2572 2573 /* lbz lbzu lbzux lbzx */ 2574 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2575 /* lha lhau lhaux lhax */ 2576 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2577 /* lhz lhzu lhzux lhzx */ 2578 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2579 /* lwz lwzu lwzux lwzx */ 2580 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2581 #if defined(TARGET_PPC64) 2582 /* lwaux */ 2583 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2584 /* lwax */ 2585 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2586 /* ldux */ 2587 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2588 /* ldx */ 2589 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2590 2591 /* CI load/store variants */ 2592 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2593 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2594 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2595 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2596 2597 static void gen_ld(DisasContext *ctx) 2598 { 2599 TCGv EA; 2600 if (Rc(ctx->opcode)) { 2601 if (unlikely(rA(ctx->opcode) == 0 || 2602 rA(ctx->opcode) == rD(ctx->opcode))) { 2603 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2604 return; 2605 } 2606 } 2607 gen_set_access_type(ctx, ACCESS_INT); 2608 EA = tcg_temp_new(); 2609 gen_addr_imm_index(ctx, EA, 0x03); 2610 if (ctx->opcode & 0x02) { 2611 /* lwa (lwau is undefined) */ 2612 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2613 } else { 2614 /* ld - ldu */ 2615 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2616 } 2617 if (Rc(ctx->opcode)) 2618 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2619 tcg_temp_free(EA); 2620 } 2621 2622 /* lq */ 2623 static void gen_lq(DisasContext *ctx) 2624 { 2625 int ra, rd; 2626 TCGv EA, hi, lo; 2627 2628 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2629 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2630 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2631 2632 if (!legal_in_user_mode && ctx->pr) { 2633 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2634 return; 2635 } 2636 2637 if (!le_is_supported && ctx->le_mode) { 2638 gen_align_no_le(ctx); 2639 return; 2640 } 2641 ra = rA(ctx->opcode); 2642 rd = rD(ctx->opcode); 2643 if (unlikely((rd & 1) || rd == ra)) { 2644 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2645 return; 2646 } 2647 2648 gen_set_access_type(ctx, ACCESS_INT); 2649 EA = tcg_temp_new(); 2650 gen_addr_imm_index(ctx, EA, 0x0F); 2651 2652 /* Note that the low part is always in RD+1, even in LE mode. */ 2653 lo = cpu_gpr[rd + 1]; 2654 hi = cpu_gpr[rd]; 2655 2656 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2657 #ifdef CONFIG_ATOMIC128 2658 TCGv_i32 oi = tcg_temp_new_i32(); 2659 if (ctx->le_mode) { 2660 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2661 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 2662 } else { 2663 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2664 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 2665 } 2666 tcg_temp_free_i32(oi); 2667 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 2668 #else 2669 /* Restart with exclusive lock. */ 2670 gen_helper_exit_atomic(cpu_env); 2671 ctx->base.is_jmp = DISAS_NORETURN; 2672 #endif 2673 } else if (ctx->le_mode) { 2674 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2675 gen_addr_add(ctx, EA, EA, 8); 2676 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2677 } else { 2678 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2679 gen_addr_add(ctx, EA, EA, 8); 2680 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2681 } 2682 tcg_temp_free(EA); 2683 } 2684 #endif 2685 2686 /*** Integer store ***/ 2687 #define GEN_ST(name, stop, opc, type) \ 2688 static void glue(gen_, name)(DisasContext *ctx) \ 2689 { \ 2690 TCGv EA; \ 2691 gen_set_access_type(ctx, ACCESS_INT); \ 2692 EA = tcg_temp_new(); \ 2693 gen_addr_imm_index(ctx, EA, 0); \ 2694 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2695 tcg_temp_free(EA); \ 2696 } 2697 2698 #define GEN_STU(name, stop, opc, type) \ 2699 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2700 { \ 2701 TCGv EA; \ 2702 if (unlikely(rA(ctx->opcode) == 0)) { \ 2703 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2704 return; \ 2705 } \ 2706 gen_set_access_type(ctx, ACCESS_INT); \ 2707 EA = tcg_temp_new(); \ 2708 if (type == PPC_64B) \ 2709 gen_addr_imm_index(ctx, EA, 0x03); \ 2710 else \ 2711 gen_addr_imm_index(ctx, EA, 0); \ 2712 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2713 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2714 tcg_temp_free(EA); \ 2715 } 2716 2717 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2718 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2719 { \ 2720 TCGv EA; \ 2721 if (unlikely(rA(ctx->opcode) == 0)) { \ 2722 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2723 return; \ 2724 } \ 2725 gen_set_access_type(ctx, ACCESS_INT); \ 2726 EA = tcg_temp_new(); \ 2727 gen_addr_reg_index(ctx, EA); \ 2728 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2729 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2730 tcg_temp_free(EA); \ 2731 } 2732 2733 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2734 static void glue(gen_, name##x)(DisasContext *ctx) \ 2735 { \ 2736 TCGv EA; \ 2737 chk; \ 2738 gen_set_access_type(ctx, ACCESS_INT); \ 2739 EA = tcg_temp_new(); \ 2740 gen_addr_reg_index(ctx, EA); \ 2741 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2742 tcg_temp_free(EA); \ 2743 } 2744 #define GEN_STX(name, stop, opc2, opc3, type) \ 2745 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2746 2747 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2748 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2749 2750 #define GEN_STS(name, stop, op, type) \ 2751 GEN_ST(name, stop, op | 0x20, type); \ 2752 GEN_STU(name, stop, op | 0x21, type); \ 2753 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2754 GEN_STX(name, stop, 0x17, op | 0x00, type) 2755 2756 /* stb stbu stbux stbx */ 2757 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2758 /* sth sthu sthux sthx */ 2759 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2760 /* stw stwu stwux stwx */ 2761 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2762 #if defined(TARGET_PPC64) 2763 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2764 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2765 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2766 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2767 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2768 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2769 2770 static void gen_std(DisasContext *ctx) 2771 { 2772 int rs; 2773 TCGv EA; 2774 2775 rs = rS(ctx->opcode); 2776 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2777 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2778 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2779 TCGv hi, lo; 2780 2781 if (!(ctx->insns_flags & PPC_64BX)) { 2782 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2783 } 2784 2785 if (!legal_in_user_mode && ctx->pr) { 2786 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2787 return; 2788 } 2789 2790 if (!le_is_supported && ctx->le_mode) { 2791 gen_align_no_le(ctx); 2792 return; 2793 } 2794 2795 if (unlikely(rs & 1)) { 2796 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2797 return; 2798 } 2799 gen_set_access_type(ctx, ACCESS_INT); 2800 EA = tcg_temp_new(); 2801 gen_addr_imm_index(ctx, EA, 0x03); 2802 2803 /* Note that the low part is always in RS+1, even in LE mode. */ 2804 lo = cpu_gpr[rs + 1]; 2805 hi = cpu_gpr[rs]; 2806 2807 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 2808 #ifdef CONFIG_ATOMIC128 2809 TCGv_i32 oi = tcg_temp_new_i32(); 2810 if (ctx->le_mode) { 2811 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx)); 2812 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi); 2813 } else { 2814 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx)); 2815 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi); 2816 } 2817 tcg_temp_free_i32(oi); 2818 #else 2819 /* Restart with exclusive lock. */ 2820 gen_helper_exit_atomic(cpu_env); 2821 ctx->base.is_jmp = DISAS_NORETURN; 2822 #endif 2823 } else if (ctx->le_mode) { 2824 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ); 2825 gen_addr_add(ctx, EA, EA, 8); 2826 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ); 2827 } else { 2828 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ); 2829 gen_addr_add(ctx, EA, EA, 8); 2830 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ); 2831 } 2832 tcg_temp_free(EA); 2833 } else { 2834 /* std / stdu */ 2835 if (Rc(ctx->opcode)) { 2836 if (unlikely(rA(ctx->opcode) == 0)) { 2837 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2838 return; 2839 } 2840 } 2841 gen_set_access_type(ctx, ACCESS_INT); 2842 EA = tcg_temp_new(); 2843 gen_addr_imm_index(ctx, EA, 0x03); 2844 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2845 if (Rc(ctx->opcode)) 2846 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2847 tcg_temp_free(EA); 2848 } 2849 } 2850 #endif 2851 /*** Integer load and store with byte reverse ***/ 2852 2853 /* lhbrx */ 2854 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2855 2856 /* lwbrx */ 2857 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2858 2859 #if defined(TARGET_PPC64) 2860 /* ldbrx */ 2861 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2862 /* stdbrx */ 2863 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2864 #endif /* TARGET_PPC64 */ 2865 2866 /* sthbrx */ 2867 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2868 /* stwbrx */ 2869 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2870 2871 /*** Integer load and store multiple ***/ 2872 2873 /* lmw */ 2874 static void gen_lmw(DisasContext *ctx) 2875 { 2876 TCGv t0; 2877 TCGv_i32 t1; 2878 2879 if (ctx->le_mode) { 2880 gen_align_no_le(ctx); 2881 return; 2882 } 2883 gen_set_access_type(ctx, ACCESS_INT); 2884 t0 = tcg_temp_new(); 2885 t1 = tcg_const_i32(rD(ctx->opcode)); 2886 gen_addr_imm_index(ctx, t0, 0); 2887 gen_helper_lmw(cpu_env, t0, t1); 2888 tcg_temp_free(t0); 2889 tcg_temp_free_i32(t1); 2890 } 2891 2892 /* stmw */ 2893 static void gen_stmw(DisasContext *ctx) 2894 { 2895 TCGv t0; 2896 TCGv_i32 t1; 2897 2898 if (ctx->le_mode) { 2899 gen_align_no_le(ctx); 2900 return; 2901 } 2902 gen_set_access_type(ctx, ACCESS_INT); 2903 t0 = tcg_temp_new(); 2904 t1 = tcg_const_i32(rS(ctx->opcode)); 2905 gen_addr_imm_index(ctx, t0, 0); 2906 gen_helper_stmw(cpu_env, t0, t1); 2907 tcg_temp_free(t0); 2908 tcg_temp_free_i32(t1); 2909 } 2910 2911 /*** Integer load and store strings ***/ 2912 2913 /* lswi */ 2914 /* PowerPC32 specification says we must generate an exception if 2915 * rA is in the range of registers to be loaded. 2916 * In an other hand, IBM says this is valid, but rA won't be loaded. 2917 * For now, I'll follow the spec... 2918 */ 2919 static void gen_lswi(DisasContext *ctx) 2920 { 2921 TCGv t0; 2922 TCGv_i32 t1, t2; 2923 int nb = NB(ctx->opcode); 2924 int start = rD(ctx->opcode); 2925 int ra = rA(ctx->opcode); 2926 int nr; 2927 2928 if (ctx->le_mode) { 2929 gen_align_no_le(ctx); 2930 return; 2931 } 2932 if (nb == 0) 2933 nb = 32; 2934 nr = DIV_ROUND_UP(nb, 4); 2935 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2936 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2937 return; 2938 } 2939 gen_set_access_type(ctx, ACCESS_INT); 2940 t0 = tcg_temp_new(); 2941 gen_addr_register(ctx, t0); 2942 t1 = tcg_const_i32(nb); 2943 t2 = tcg_const_i32(start); 2944 gen_helper_lsw(cpu_env, t0, t1, t2); 2945 tcg_temp_free(t0); 2946 tcg_temp_free_i32(t1); 2947 tcg_temp_free_i32(t2); 2948 } 2949 2950 /* lswx */ 2951 static void gen_lswx(DisasContext *ctx) 2952 { 2953 TCGv t0; 2954 TCGv_i32 t1, t2, t3; 2955 2956 if (ctx->le_mode) { 2957 gen_align_no_le(ctx); 2958 return; 2959 } 2960 gen_set_access_type(ctx, ACCESS_INT); 2961 t0 = tcg_temp_new(); 2962 gen_addr_reg_index(ctx, t0); 2963 t1 = tcg_const_i32(rD(ctx->opcode)); 2964 t2 = tcg_const_i32(rA(ctx->opcode)); 2965 t3 = tcg_const_i32(rB(ctx->opcode)); 2966 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 2967 tcg_temp_free(t0); 2968 tcg_temp_free_i32(t1); 2969 tcg_temp_free_i32(t2); 2970 tcg_temp_free_i32(t3); 2971 } 2972 2973 /* stswi */ 2974 static void gen_stswi(DisasContext *ctx) 2975 { 2976 TCGv t0; 2977 TCGv_i32 t1, t2; 2978 int nb = NB(ctx->opcode); 2979 2980 if (ctx->le_mode) { 2981 gen_align_no_le(ctx); 2982 return; 2983 } 2984 gen_set_access_type(ctx, ACCESS_INT); 2985 t0 = tcg_temp_new(); 2986 gen_addr_register(ctx, t0); 2987 if (nb == 0) 2988 nb = 32; 2989 t1 = tcg_const_i32(nb); 2990 t2 = tcg_const_i32(rS(ctx->opcode)); 2991 gen_helper_stsw(cpu_env, t0, t1, t2); 2992 tcg_temp_free(t0); 2993 tcg_temp_free_i32(t1); 2994 tcg_temp_free_i32(t2); 2995 } 2996 2997 /* stswx */ 2998 static void gen_stswx(DisasContext *ctx) 2999 { 3000 TCGv t0; 3001 TCGv_i32 t1, t2; 3002 3003 if (ctx->le_mode) { 3004 gen_align_no_le(ctx); 3005 return; 3006 } 3007 gen_set_access_type(ctx, ACCESS_INT); 3008 t0 = tcg_temp_new(); 3009 gen_addr_reg_index(ctx, t0); 3010 t1 = tcg_temp_new_i32(); 3011 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3012 tcg_gen_andi_i32(t1, t1, 0x7F); 3013 t2 = tcg_const_i32(rS(ctx->opcode)); 3014 gen_helper_stsw(cpu_env, t0, t1, t2); 3015 tcg_temp_free(t0); 3016 tcg_temp_free_i32(t1); 3017 tcg_temp_free_i32(t2); 3018 } 3019 3020 /*** Memory synchronisation ***/ 3021 /* eieio */ 3022 static void gen_eieio(DisasContext *ctx) 3023 { 3024 TCGBar bar = TCG_MO_LD_ST; 3025 3026 /* 3027 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3028 * tell the CPU it is a store-forwarding barrier. 3029 */ 3030 if (ctx->opcode & 0x2000000) { 3031 /* 3032 * ISA says that "Reserved fields in instructions are ignored 3033 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3034 * as this is not an instruction software should be using, 3035 * complain to the user. 3036 */ 3037 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3038 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3039 TARGET_FMT_lx "\n", ctx->base.pc_next - 4); 3040 } else { 3041 bar = TCG_MO_ST_LD; 3042 } 3043 } 3044 3045 tcg_gen_mb(bar | TCG_BAR_SC); 3046 } 3047 3048 #if !defined(CONFIG_USER_ONLY) 3049 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3050 { 3051 TCGv_i32 t; 3052 TCGLabel *l; 3053 3054 if (!ctx->lazy_tlb_flush) { 3055 return; 3056 } 3057 l = gen_new_label(); 3058 t = tcg_temp_new_i32(); 3059 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3060 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3061 if (global) { 3062 gen_helper_check_tlb_flush_global(cpu_env); 3063 } else { 3064 gen_helper_check_tlb_flush_local(cpu_env); 3065 } 3066 gen_set_label(l); 3067 tcg_temp_free_i32(t); 3068 } 3069 #else 3070 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3071 #endif 3072 3073 /* isync */ 3074 static void gen_isync(DisasContext *ctx) 3075 { 3076 /* 3077 * We need to check for a pending TLB flush. This can only happen in 3078 * kernel mode however so check MSR_PR 3079 */ 3080 if (!ctx->pr) { 3081 gen_check_tlb_flush(ctx, false); 3082 } 3083 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3084 gen_stop_exception(ctx); 3085 } 3086 3087 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3088 3089 static void gen_load_locked(DisasContext *ctx, TCGMemOp memop) 3090 { 3091 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3092 TCGv t0 = tcg_temp_new(); 3093 3094 gen_set_access_type(ctx, ACCESS_RES); 3095 gen_addr_reg_index(ctx, t0); 3096 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3097 tcg_gen_mov_tl(cpu_reserve, t0); 3098 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3099 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3100 tcg_temp_free(t0); 3101 } 3102 3103 #define LARX(name, memop) \ 3104 static void gen_##name(DisasContext *ctx) \ 3105 { \ 3106 gen_load_locked(ctx, memop); \ 3107 } 3108 3109 /* lwarx */ 3110 LARX(lbarx, DEF_MEMOP(MO_UB)) 3111 LARX(lharx, DEF_MEMOP(MO_UW)) 3112 LARX(lwarx, DEF_MEMOP(MO_UL)) 3113 3114 static void gen_fetch_inc_conditional(DisasContext *ctx, TCGMemOp memop, 3115 TCGv EA, TCGCond cond, int addend) 3116 { 3117 TCGv t = tcg_temp_new(); 3118 TCGv t2 = tcg_temp_new(); 3119 TCGv u = tcg_temp_new(); 3120 3121 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3122 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3123 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3124 tcg_gen_addi_tl(u, t, addend); 3125 3126 /* E.g. for fetch and increment bounded... */ 3127 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3128 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3129 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3130 3131 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3132 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3133 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3134 3135 tcg_temp_free(t); 3136 tcg_temp_free(t2); 3137 tcg_temp_free(u); 3138 } 3139 3140 static void gen_ld_atomic(DisasContext *ctx, TCGMemOp memop) 3141 { 3142 uint32_t gpr_FC = FC(ctx->opcode); 3143 TCGv EA = tcg_temp_new(); 3144 int rt = rD(ctx->opcode); 3145 bool need_serial; 3146 TCGv src, dst; 3147 3148 gen_addr_register(ctx, EA); 3149 dst = cpu_gpr[rt]; 3150 src = cpu_gpr[(rt + 1) & 31]; 3151 3152 need_serial = false; 3153 memop |= MO_ALIGN; 3154 switch (gpr_FC) { 3155 case 0: /* Fetch and add */ 3156 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3157 break; 3158 case 1: /* Fetch and xor */ 3159 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3160 break; 3161 case 2: /* Fetch and or */ 3162 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3163 break; 3164 case 3: /* Fetch and 'and' */ 3165 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3166 break; 3167 case 4: /* Fetch and max unsigned */ 3168 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3169 break; 3170 case 5: /* Fetch and max signed */ 3171 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3172 break; 3173 case 6: /* Fetch and min unsigned */ 3174 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3175 break; 3176 case 7: /* Fetch and min signed */ 3177 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3178 break; 3179 case 8: /* Swap */ 3180 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3181 break; 3182 3183 case 16: /* Compare and swap not equal */ 3184 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3185 need_serial = true; 3186 } else { 3187 TCGv t0 = tcg_temp_new(); 3188 TCGv t1 = tcg_temp_new(); 3189 3190 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3191 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3192 tcg_gen_mov_tl(t1, src); 3193 } else { 3194 tcg_gen_ext32u_tl(t1, src); 3195 } 3196 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3197 cpu_gpr[(rt + 2) & 31], t0); 3198 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3199 tcg_gen_mov_tl(dst, t0); 3200 3201 tcg_temp_free(t0); 3202 tcg_temp_free(t1); 3203 } 3204 break; 3205 3206 case 24: /* Fetch and increment bounded */ 3207 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3208 need_serial = true; 3209 } else { 3210 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3211 } 3212 break; 3213 case 25: /* Fetch and increment equal */ 3214 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3215 need_serial = true; 3216 } else { 3217 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3218 } 3219 break; 3220 case 28: /* Fetch and decrement bounded */ 3221 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3222 need_serial = true; 3223 } else { 3224 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3225 } 3226 break; 3227 3228 default: 3229 /* invoke data storage error handler */ 3230 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3231 } 3232 tcg_temp_free(EA); 3233 3234 if (need_serial) { 3235 /* Restart with exclusive lock. */ 3236 gen_helper_exit_atomic(cpu_env); 3237 ctx->base.is_jmp = DISAS_NORETURN; 3238 } 3239 } 3240 3241 static void gen_lwat(DisasContext *ctx) 3242 { 3243 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3244 } 3245 3246 #ifdef TARGET_PPC64 3247 static void gen_ldat(DisasContext *ctx) 3248 { 3249 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3250 } 3251 #endif 3252 3253 static void gen_st_atomic(DisasContext *ctx, TCGMemOp memop) 3254 { 3255 uint32_t gpr_FC = FC(ctx->opcode); 3256 TCGv EA = tcg_temp_new(); 3257 TCGv src, discard; 3258 3259 gen_addr_register(ctx, EA); 3260 src = cpu_gpr[rD(ctx->opcode)]; 3261 discard = tcg_temp_new(); 3262 3263 memop |= MO_ALIGN; 3264 switch (gpr_FC) { 3265 case 0: /* add and Store */ 3266 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3267 break; 3268 case 1: /* xor and Store */ 3269 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3270 break; 3271 case 2: /* Or and Store */ 3272 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3273 break; 3274 case 3: /* 'and' and Store */ 3275 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3276 break; 3277 case 4: /* Store max unsigned */ 3278 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3279 break; 3280 case 5: /* Store max signed */ 3281 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3282 break; 3283 case 6: /* Store min unsigned */ 3284 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3285 break; 3286 case 7: /* Store min signed */ 3287 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3288 break; 3289 case 24: /* Store twin */ 3290 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3291 /* Restart with exclusive lock. */ 3292 gen_helper_exit_atomic(cpu_env); 3293 ctx->base.is_jmp = DISAS_NORETURN; 3294 } else { 3295 TCGv t = tcg_temp_new(); 3296 TCGv t2 = tcg_temp_new(); 3297 TCGv s = tcg_temp_new(); 3298 TCGv s2 = tcg_temp_new(); 3299 TCGv ea_plus_s = tcg_temp_new(); 3300 3301 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3302 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3303 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3304 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3305 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3306 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3307 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3308 3309 tcg_temp_free(ea_plus_s); 3310 tcg_temp_free(s2); 3311 tcg_temp_free(s); 3312 tcg_temp_free(t2); 3313 tcg_temp_free(t); 3314 } 3315 break; 3316 default: 3317 /* invoke data storage error handler */ 3318 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3319 } 3320 tcg_temp_free(discard); 3321 tcg_temp_free(EA); 3322 } 3323 3324 static void gen_stwat(DisasContext *ctx) 3325 { 3326 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3327 } 3328 3329 #ifdef TARGET_PPC64 3330 static void gen_stdat(DisasContext *ctx) 3331 { 3332 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3333 } 3334 #endif 3335 3336 static void gen_conditional_store(DisasContext *ctx, TCGMemOp memop) 3337 { 3338 TCGLabel *l1 = gen_new_label(); 3339 TCGLabel *l2 = gen_new_label(); 3340 TCGv t0 = tcg_temp_new(); 3341 int reg = rS(ctx->opcode); 3342 3343 gen_set_access_type(ctx, ACCESS_RES); 3344 gen_addr_reg_index(ctx, t0); 3345 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 3346 tcg_temp_free(t0); 3347 3348 t0 = tcg_temp_new(); 3349 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3350 cpu_gpr[reg], ctx->mem_idx, 3351 DEF_MEMOP(memop) | MO_ALIGN); 3352 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3353 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3354 tcg_gen_or_tl(t0, t0, cpu_so); 3355 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3356 tcg_temp_free(t0); 3357 tcg_gen_br(l2); 3358 3359 gen_set_label(l1); 3360 3361 /* Address mismatch implies failure. But we still need to provide the 3362 memory barrier semantics of the instruction. */ 3363 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3364 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3365 3366 gen_set_label(l2); 3367 tcg_gen_movi_tl(cpu_reserve, -1); 3368 } 3369 3370 #define STCX(name, memop) \ 3371 static void gen_##name(DisasContext *ctx) \ 3372 { \ 3373 gen_conditional_store(ctx, memop); \ 3374 } 3375 3376 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3377 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3378 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3379 3380 #if defined(TARGET_PPC64) 3381 /* ldarx */ 3382 LARX(ldarx, DEF_MEMOP(MO_Q)) 3383 /* stdcx. */ 3384 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3385 3386 /* lqarx */ 3387 static void gen_lqarx(DisasContext *ctx) 3388 { 3389 int rd = rD(ctx->opcode); 3390 TCGv EA, hi, lo; 3391 3392 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3393 (rd == rB(ctx->opcode)))) { 3394 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3395 return; 3396 } 3397 3398 gen_set_access_type(ctx, ACCESS_RES); 3399 EA = tcg_temp_new(); 3400 gen_addr_reg_index(ctx, EA); 3401 3402 /* Note that the low part is always in RD+1, even in LE mode. */ 3403 lo = cpu_gpr[rd + 1]; 3404 hi = cpu_gpr[rd]; 3405 3406 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3407 #ifdef CONFIG_ATOMIC128 3408 TCGv_i32 oi = tcg_temp_new_i32(); 3409 if (ctx->le_mode) { 3410 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16, 3411 ctx->mem_idx)); 3412 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3413 } else { 3414 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16, 3415 ctx->mem_idx)); 3416 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3417 } 3418 tcg_temp_free_i32(oi); 3419 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3420 #else 3421 /* Restart with exclusive lock. */ 3422 gen_helper_exit_atomic(cpu_env); 3423 ctx->base.is_jmp = DISAS_NORETURN; 3424 tcg_temp_free(EA); 3425 return; 3426 #endif 3427 } else if (ctx->le_mode) { 3428 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 3429 tcg_gen_mov_tl(cpu_reserve, EA); 3430 gen_addr_add(ctx, EA, EA, 8); 3431 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3432 } else { 3433 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 3434 tcg_gen_mov_tl(cpu_reserve, EA); 3435 gen_addr_add(ctx, EA, EA, 8); 3436 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3437 } 3438 tcg_temp_free(EA); 3439 3440 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 3441 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3442 } 3443 3444 /* stqcx. */ 3445 static void gen_stqcx_(DisasContext *ctx) 3446 { 3447 int rs = rS(ctx->opcode); 3448 TCGv EA, hi, lo; 3449 3450 if (unlikely(rs & 1)) { 3451 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3452 return; 3453 } 3454 3455 gen_set_access_type(ctx, ACCESS_RES); 3456 EA = tcg_temp_new(); 3457 gen_addr_reg_index(ctx, EA); 3458 3459 /* Note that the low part is always in RS+1, even in LE mode. */ 3460 lo = cpu_gpr[rs + 1]; 3461 hi = cpu_gpr[rs]; 3462 3463 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3464 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16); 3465 #ifdef CONFIG_ATOMIC128 3466 if (ctx->le_mode) { 3467 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, EA, lo, hi, oi); 3468 } else { 3469 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, EA, lo, hi, oi); 3470 } 3471 #else 3472 /* Restart with exclusive lock. */ 3473 gen_helper_exit_atomic(cpu_env); 3474 ctx->base.is_jmp = DISAS_NORETURN; 3475 #endif 3476 tcg_temp_free(EA); 3477 tcg_temp_free_i32(oi); 3478 } else { 3479 TCGLabel *lab_fail = gen_new_label(); 3480 TCGLabel *lab_over = gen_new_label(); 3481 TCGv_i64 t0 = tcg_temp_new_i64(); 3482 TCGv_i64 t1 = tcg_temp_new_i64(); 3483 3484 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 3485 tcg_temp_free(EA); 3486 3487 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 3488 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3489 ? offsetof(CPUPPCState, reserve_val2) 3490 : offsetof(CPUPPCState, reserve_val))); 3491 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3492 3493 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3494 gen_qemu_ld64_i64(ctx, t0, t0); 3495 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 3496 ? offsetof(CPUPPCState, reserve_val) 3497 : offsetof(CPUPPCState, reserve_val2))); 3498 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 3499 3500 /* Success */ 3501 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 3502 tcg_gen_addi_i64(t0, cpu_reserve, 8); 3503 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 3504 3505 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3506 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 3507 tcg_gen_br(lab_over); 3508 3509 gen_set_label(lab_fail); 3510 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3511 3512 gen_set_label(lab_over); 3513 tcg_gen_movi_tl(cpu_reserve, -1); 3514 tcg_temp_free_i64(t0); 3515 tcg_temp_free_i64(t1); 3516 } 3517 } 3518 #endif /* defined(TARGET_PPC64) */ 3519 3520 /* sync */ 3521 static void gen_sync(DisasContext *ctx) 3522 { 3523 uint32_t l = (ctx->opcode >> 21) & 3; 3524 3525 /* 3526 * We may need to check for a pending TLB flush. 3527 * 3528 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3529 * 3530 * Additionally, this can only happen in kernel mode however so 3531 * check MSR_PR as well. 3532 */ 3533 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3534 gen_check_tlb_flush(ctx, true); 3535 } 3536 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3537 } 3538 3539 /* wait */ 3540 static void gen_wait(DisasContext *ctx) 3541 { 3542 TCGv_i32 t0 = tcg_const_i32(1); 3543 tcg_gen_st_i32(t0, cpu_env, 3544 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3545 tcg_temp_free_i32(t0); 3546 /* Stop translation, as the CPU is supposed to sleep from now */ 3547 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 3548 } 3549 3550 #if defined(TARGET_PPC64) 3551 static void gen_doze(DisasContext *ctx) 3552 { 3553 #if defined(CONFIG_USER_ONLY) 3554 GEN_PRIV; 3555 #else 3556 TCGv_i32 t; 3557 3558 CHK_HV; 3559 t = tcg_const_i32(PPC_PM_DOZE); 3560 gen_helper_pminsn(cpu_env, t); 3561 tcg_temp_free_i32(t); 3562 gen_stop_exception(ctx); 3563 #endif /* defined(CONFIG_USER_ONLY) */ 3564 } 3565 3566 static void gen_nap(DisasContext *ctx) 3567 { 3568 #if defined(CONFIG_USER_ONLY) 3569 GEN_PRIV; 3570 #else 3571 TCGv_i32 t; 3572 3573 CHK_HV; 3574 t = tcg_const_i32(PPC_PM_NAP); 3575 gen_helper_pminsn(cpu_env, t); 3576 tcg_temp_free_i32(t); 3577 gen_stop_exception(ctx); 3578 #endif /* defined(CONFIG_USER_ONLY) */ 3579 } 3580 3581 static void gen_stop(DisasContext *ctx) 3582 { 3583 gen_nap(ctx); 3584 } 3585 3586 static void gen_sleep(DisasContext *ctx) 3587 { 3588 #if defined(CONFIG_USER_ONLY) 3589 GEN_PRIV; 3590 #else 3591 TCGv_i32 t; 3592 3593 CHK_HV; 3594 t = tcg_const_i32(PPC_PM_SLEEP); 3595 gen_helper_pminsn(cpu_env, t); 3596 tcg_temp_free_i32(t); 3597 gen_stop_exception(ctx); 3598 #endif /* defined(CONFIG_USER_ONLY) */ 3599 } 3600 3601 static void gen_rvwinkle(DisasContext *ctx) 3602 { 3603 #if defined(CONFIG_USER_ONLY) 3604 GEN_PRIV; 3605 #else 3606 TCGv_i32 t; 3607 3608 CHK_HV; 3609 t = tcg_const_i32(PPC_PM_RVWINKLE); 3610 gen_helper_pminsn(cpu_env, t); 3611 tcg_temp_free_i32(t); 3612 gen_stop_exception(ctx); 3613 #endif /* defined(CONFIG_USER_ONLY) */ 3614 } 3615 #endif /* #if defined(TARGET_PPC64) */ 3616 3617 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3618 { 3619 #if defined(TARGET_PPC64) 3620 if (ctx->has_cfar) 3621 tcg_gen_movi_tl(cpu_cfar, nip); 3622 #endif 3623 } 3624 3625 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3626 { 3627 if (unlikely(ctx->singlestep_enabled)) { 3628 return false; 3629 } 3630 3631 #ifndef CONFIG_USER_ONLY 3632 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3633 #else 3634 return true; 3635 #endif 3636 } 3637 3638 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 3639 { 3640 int sse = ctx->singlestep_enabled; 3641 if (unlikely(sse)) { 3642 if (sse & GDBSTUB_SINGLE_STEP) { 3643 gen_debug_exception(ctx); 3644 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) { 3645 uint32_t excp = gen_prep_dbgex(ctx, POWERPC_EXCP_BRANCH); 3646 if (excp != POWERPC_EXCP_NONE) { 3647 gen_exception(ctx, excp); 3648 } 3649 } 3650 tcg_gen_exit_tb(NULL, 0); 3651 } else { 3652 tcg_gen_lookup_and_goto_ptr(); 3653 } 3654 } 3655 3656 /*** Branch ***/ 3657 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3658 { 3659 if (NARROW_MODE(ctx)) { 3660 dest = (uint32_t) dest; 3661 } 3662 if (use_goto_tb(ctx, dest)) { 3663 tcg_gen_goto_tb(n); 3664 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3665 tcg_gen_exit_tb(ctx->base.tb, n); 3666 } else { 3667 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3668 gen_lookup_and_goto_ptr(ctx); 3669 } 3670 } 3671 3672 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3673 { 3674 if (NARROW_MODE(ctx)) { 3675 nip = (uint32_t)nip; 3676 } 3677 tcg_gen_movi_tl(cpu_lr, nip); 3678 } 3679 3680 /* b ba bl bla */ 3681 static void gen_b(DisasContext *ctx) 3682 { 3683 target_ulong li, target; 3684 3685 ctx->exception = POWERPC_EXCP_BRANCH; 3686 /* sign extend LI */ 3687 li = LI(ctx->opcode); 3688 li = (li ^ 0x02000000) - 0x02000000; 3689 if (likely(AA(ctx->opcode) == 0)) { 3690 target = ctx->base.pc_next + li - 4; 3691 } else { 3692 target = li; 3693 } 3694 if (LK(ctx->opcode)) { 3695 gen_setlr(ctx, ctx->base.pc_next); 3696 } 3697 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3698 gen_goto_tb(ctx, 0, target); 3699 } 3700 3701 #define BCOND_IM 0 3702 #define BCOND_LR 1 3703 #define BCOND_CTR 2 3704 #define BCOND_TAR 3 3705 3706 static void gen_bcond(DisasContext *ctx, int type) 3707 { 3708 uint32_t bo = BO(ctx->opcode); 3709 TCGLabel *l1; 3710 TCGv target; 3711 ctx->exception = POWERPC_EXCP_BRANCH; 3712 3713 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3714 target = tcg_temp_local_new(); 3715 if (type == BCOND_CTR) 3716 tcg_gen_mov_tl(target, cpu_ctr); 3717 else if (type == BCOND_TAR) 3718 gen_load_spr(target, SPR_TAR); 3719 else 3720 tcg_gen_mov_tl(target, cpu_lr); 3721 } else { 3722 target = NULL; 3723 } 3724 if (LK(ctx->opcode)) 3725 gen_setlr(ctx, ctx->base.pc_next); 3726 l1 = gen_new_label(); 3727 if ((bo & 0x4) == 0) { 3728 /* Decrement and test CTR */ 3729 TCGv temp = tcg_temp_new(); 3730 if (unlikely(type == BCOND_CTR)) { 3731 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3732 return; 3733 } 3734 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3735 if (NARROW_MODE(ctx)) { 3736 tcg_gen_ext32u_tl(temp, cpu_ctr); 3737 } else { 3738 tcg_gen_mov_tl(temp, cpu_ctr); 3739 } 3740 if (bo & 0x2) { 3741 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3742 } else { 3743 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3744 } 3745 tcg_temp_free(temp); 3746 } 3747 if ((bo & 0x10) == 0) { 3748 /* Test CR */ 3749 uint32_t bi = BI(ctx->opcode); 3750 uint32_t mask = 0x08 >> (bi & 0x03); 3751 TCGv_i32 temp = tcg_temp_new_i32(); 3752 3753 if (bo & 0x8) { 3754 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3755 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3756 } else { 3757 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3758 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3759 } 3760 tcg_temp_free_i32(temp); 3761 } 3762 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3763 if (type == BCOND_IM) { 3764 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3765 if (likely(AA(ctx->opcode) == 0)) { 3766 gen_goto_tb(ctx, 0, ctx->base.pc_next + li - 4); 3767 } else { 3768 gen_goto_tb(ctx, 0, li); 3769 } 3770 } else { 3771 if (NARROW_MODE(ctx)) { 3772 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3773 } else { 3774 tcg_gen_andi_tl(cpu_nip, target, ~3); 3775 } 3776 gen_lookup_and_goto_ptr(ctx); 3777 tcg_temp_free(target); 3778 } 3779 if ((bo & 0x14) != 0x14) { 3780 /* fallthrough case */ 3781 gen_set_label(l1); 3782 gen_goto_tb(ctx, 1, ctx->base.pc_next); 3783 } 3784 } 3785 3786 static void gen_bc(DisasContext *ctx) 3787 { 3788 gen_bcond(ctx, BCOND_IM); 3789 } 3790 3791 static void gen_bcctr(DisasContext *ctx) 3792 { 3793 gen_bcond(ctx, BCOND_CTR); 3794 } 3795 3796 static void gen_bclr(DisasContext *ctx) 3797 { 3798 gen_bcond(ctx, BCOND_LR); 3799 } 3800 3801 static void gen_bctar(DisasContext *ctx) 3802 { 3803 gen_bcond(ctx, BCOND_TAR); 3804 } 3805 3806 /*** Condition register logical ***/ 3807 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3808 static void glue(gen_, name)(DisasContext *ctx) \ 3809 { \ 3810 uint8_t bitmask; \ 3811 int sh; \ 3812 TCGv_i32 t0, t1; \ 3813 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3814 t0 = tcg_temp_new_i32(); \ 3815 if (sh > 0) \ 3816 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3817 else if (sh < 0) \ 3818 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3819 else \ 3820 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3821 t1 = tcg_temp_new_i32(); \ 3822 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3823 if (sh > 0) \ 3824 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3825 else if (sh < 0) \ 3826 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3827 else \ 3828 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3829 tcg_op(t0, t0, t1); \ 3830 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3831 tcg_gen_andi_i32(t0, t0, bitmask); \ 3832 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3833 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3834 tcg_temp_free_i32(t0); \ 3835 tcg_temp_free_i32(t1); \ 3836 } 3837 3838 /* crand */ 3839 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3840 /* crandc */ 3841 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3842 /* creqv */ 3843 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3844 /* crnand */ 3845 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3846 /* crnor */ 3847 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3848 /* cror */ 3849 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3850 /* crorc */ 3851 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3852 /* crxor */ 3853 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3854 3855 /* mcrf */ 3856 static void gen_mcrf(DisasContext *ctx) 3857 { 3858 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3859 } 3860 3861 /*** System linkage ***/ 3862 3863 /* rfi (supervisor only) */ 3864 static void gen_rfi(DisasContext *ctx) 3865 { 3866 #if defined(CONFIG_USER_ONLY) 3867 GEN_PRIV; 3868 #else 3869 /* This instruction doesn't exist anymore on 64-bit server 3870 * processors compliant with arch 2.x 3871 */ 3872 if (ctx->insns_flags & PPC_SEGMENT_64B) { 3873 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3874 return; 3875 } 3876 /* Restore CPU state */ 3877 CHK_SV; 3878 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3879 gen_helper_rfi(cpu_env); 3880 gen_sync_exception(ctx); 3881 #endif 3882 } 3883 3884 #if defined(TARGET_PPC64) 3885 static void gen_rfid(DisasContext *ctx) 3886 { 3887 #if defined(CONFIG_USER_ONLY) 3888 GEN_PRIV; 3889 #else 3890 /* Restore CPU state */ 3891 CHK_SV; 3892 gen_update_cfar(ctx, ctx->base.pc_next - 4); 3893 gen_helper_rfid(cpu_env); 3894 gen_sync_exception(ctx); 3895 #endif 3896 } 3897 3898 static void gen_hrfid(DisasContext *ctx) 3899 { 3900 #if defined(CONFIG_USER_ONLY) 3901 GEN_PRIV; 3902 #else 3903 /* Restore CPU state */ 3904 CHK_HV; 3905 gen_helper_hrfid(cpu_env); 3906 gen_sync_exception(ctx); 3907 #endif 3908 } 3909 #endif 3910 3911 /* sc */ 3912 #if defined(CONFIG_USER_ONLY) 3913 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3914 #else 3915 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3916 #endif 3917 static void gen_sc(DisasContext *ctx) 3918 { 3919 uint32_t lev; 3920 3921 lev = (ctx->opcode >> 5) & 0x7F; 3922 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3923 } 3924 3925 /*** Trap ***/ 3926 3927 /* Check for unconditional traps (always or never) */ 3928 static bool check_unconditional_trap(DisasContext *ctx) 3929 { 3930 /* Trap never */ 3931 if (TO(ctx->opcode) == 0) { 3932 return true; 3933 } 3934 /* Trap always */ 3935 if (TO(ctx->opcode) == 31) { 3936 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 3937 return true; 3938 } 3939 return false; 3940 } 3941 3942 /* tw */ 3943 static void gen_tw(DisasContext *ctx) 3944 { 3945 TCGv_i32 t0; 3946 3947 if (check_unconditional_trap(ctx)) { 3948 return; 3949 } 3950 t0 = tcg_const_i32(TO(ctx->opcode)); 3951 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3952 t0); 3953 tcg_temp_free_i32(t0); 3954 } 3955 3956 /* twi */ 3957 static void gen_twi(DisasContext *ctx) 3958 { 3959 TCGv t0; 3960 TCGv_i32 t1; 3961 3962 if (check_unconditional_trap(ctx)) { 3963 return; 3964 } 3965 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3966 t1 = tcg_const_i32(TO(ctx->opcode)); 3967 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3968 tcg_temp_free(t0); 3969 tcg_temp_free_i32(t1); 3970 } 3971 3972 #if defined(TARGET_PPC64) 3973 /* td */ 3974 static void gen_td(DisasContext *ctx) 3975 { 3976 TCGv_i32 t0; 3977 3978 if (check_unconditional_trap(ctx)) { 3979 return; 3980 } 3981 t0 = tcg_const_i32(TO(ctx->opcode)); 3982 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3983 t0); 3984 tcg_temp_free_i32(t0); 3985 } 3986 3987 /* tdi */ 3988 static void gen_tdi(DisasContext *ctx) 3989 { 3990 TCGv t0; 3991 TCGv_i32 t1; 3992 3993 if (check_unconditional_trap(ctx)) { 3994 return; 3995 } 3996 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3997 t1 = tcg_const_i32(TO(ctx->opcode)); 3998 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3999 tcg_temp_free(t0); 4000 tcg_temp_free_i32(t1); 4001 } 4002 #endif 4003 4004 /*** Processor control ***/ 4005 4006 static void gen_read_xer(DisasContext *ctx, TCGv dst) 4007 { 4008 TCGv t0 = tcg_temp_new(); 4009 TCGv t1 = tcg_temp_new(); 4010 TCGv t2 = tcg_temp_new(); 4011 tcg_gen_mov_tl(dst, cpu_xer); 4012 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 4013 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 4014 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 4015 tcg_gen_or_tl(t0, t0, t1); 4016 tcg_gen_or_tl(dst, dst, t2); 4017 tcg_gen_or_tl(dst, dst, t0); 4018 if (is_isa300(ctx)) { 4019 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 4020 tcg_gen_or_tl(dst, dst, t0); 4021 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 4022 tcg_gen_or_tl(dst, dst, t0); 4023 } 4024 tcg_temp_free(t0); 4025 tcg_temp_free(t1); 4026 tcg_temp_free(t2); 4027 } 4028 4029 static void gen_write_xer(TCGv src) 4030 { 4031 /* Write all flags, while reading back check for isa300 */ 4032 tcg_gen_andi_tl(cpu_xer, src, 4033 ~((1u << XER_SO) | 4034 (1u << XER_OV) | (1u << XER_OV32) | 4035 (1u << XER_CA) | (1u << XER_CA32))); 4036 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 4037 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 4038 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 4039 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 4040 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 4041 } 4042 4043 /* mcrxr */ 4044 static void gen_mcrxr(DisasContext *ctx) 4045 { 4046 TCGv_i32 t0 = tcg_temp_new_i32(); 4047 TCGv_i32 t1 = tcg_temp_new_i32(); 4048 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4049 4050 tcg_gen_trunc_tl_i32(t0, cpu_so); 4051 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4052 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4053 tcg_gen_shli_i32(t0, t0, 3); 4054 tcg_gen_shli_i32(t1, t1, 2); 4055 tcg_gen_shli_i32(dst, dst, 1); 4056 tcg_gen_or_i32(dst, dst, t0); 4057 tcg_gen_or_i32(dst, dst, t1); 4058 tcg_temp_free_i32(t0); 4059 tcg_temp_free_i32(t1); 4060 4061 tcg_gen_movi_tl(cpu_so, 0); 4062 tcg_gen_movi_tl(cpu_ov, 0); 4063 tcg_gen_movi_tl(cpu_ca, 0); 4064 } 4065 4066 #ifdef TARGET_PPC64 4067 /* mcrxrx */ 4068 static void gen_mcrxrx(DisasContext *ctx) 4069 { 4070 TCGv t0 = tcg_temp_new(); 4071 TCGv t1 = tcg_temp_new(); 4072 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4073 4074 /* copy OV and OV32 */ 4075 tcg_gen_shli_tl(t0, cpu_ov, 1); 4076 tcg_gen_or_tl(t0, t0, cpu_ov32); 4077 tcg_gen_shli_tl(t0, t0, 2); 4078 /* copy CA and CA32 */ 4079 tcg_gen_shli_tl(t1, cpu_ca, 1); 4080 tcg_gen_or_tl(t1, t1, cpu_ca32); 4081 tcg_gen_or_tl(t0, t0, t1); 4082 tcg_gen_trunc_tl_i32(dst, t0); 4083 tcg_temp_free(t0); 4084 tcg_temp_free(t1); 4085 } 4086 #endif 4087 4088 /* mfcr mfocrf */ 4089 static void gen_mfcr(DisasContext *ctx) 4090 { 4091 uint32_t crm, crn; 4092 4093 if (likely(ctx->opcode & 0x00100000)) { 4094 crm = CRM(ctx->opcode); 4095 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4096 crn = ctz32 (crm); 4097 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4098 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4099 cpu_gpr[rD(ctx->opcode)], crn * 4); 4100 } 4101 } else { 4102 TCGv_i32 t0 = tcg_temp_new_i32(); 4103 tcg_gen_mov_i32(t0, cpu_crf[0]); 4104 tcg_gen_shli_i32(t0, t0, 4); 4105 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4106 tcg_gen_shli_i32(t0, t0, 4); 4107 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4108 tcg_gen_shli_i32(t0, t0, 4); 4109 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4110 tcg_gen_shli_i32(t0, t0, 4); 4111 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4112 tcg_gen_shli_i32(t0, t0, 4); 4113 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4114 tcg_gen_shli_i32(t0, t0, 4); 4115 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4116 tcg_gen_shli_i32(t0, t0, 4); 4117 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4118 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4119 tcg_temp_free_i32(t0); 4120 } 4121 } 4122 4123 /* mfmsr */ 4124 static void gen_mfmsr(DisasContext *ctx) 4125 { 4126 CHK_SV; 4127 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4128 } 4129 4130 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 4131 { 4132 #if 0 4133 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 4134 printf("ERROR: try to access SPR %d !\n", sprn); 4135 #endif 4136 } 4137 #define SPR_NOACCESS (&spr_noaccess) 4138 4139 /* mfspr */ 4140 static inline void gen_op_mfspr(DisasContext *ctx) 4141 { 4142 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4143 uint32_t sprn = SPR(ctx->opcode); 4144 4145 #if defined(CONFIG_USER_ONLY) 4146 read_cb = ctx->spr_cb[sprn].uea_read; 4147 #else 4148 if (ctx->pr) { 4149 read_cb = ctx->spr_cb[sprn].uea_read; 4150 } else if (ctx->hv) { 4151 read_cb = ctx->spr_cb[sprn].hea_read; 4152 } else { 4153 read_cb = ctx->spr_cb[sprn].oea_read; 4154 } 4155 #endif 4156 if (likely(read_cb != NULL)) { 4157 if (likely(read_cb != SPR_NOACCESS)) { 4158 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4159 } else { 4160 /* Privilege exception */ 4161 /* This is a hack to avoid warnings when running Linux: 4162 * this OS breaks the PowerPC virtualisation model, 4163 * allowing userland application to read the PVR 4164 */ 4165 if (sprn != SPR_PVR) { 4166 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4167 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4168 ctx->base.pc_next - 4); 4169 } 4170 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4171 } 4172 } else { 4173 /* ISA 2.07 defines these as no-ops */ 4174 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4175 (sprn >= 808 && sprn <= 811)) { 4176 /* This is a nop */ 4177 return; 4178 } 4179 /* Not defined */ 4180 qemu_log_mask(LOG_GUEST_ERROR, 4181 "Trying to read invalid spr %d (0x%03x) at " 4182 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4183 4184 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4185 * it can generate a priv, a hv emu or a no-op 4186 */ 4187 if (sprn & 0x10) { 4188 if (ctx->pr) { 4189 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4190 } 4191 } else { 4192 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4193 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4194 } 4195 } 4196 } 4197 } 4198 4199 static void gen_mfspr(DisasContext *ctx) 4200 { 4201 gen_op_mfspr(ctx); 4202 } 4203 4204 /* mftb */ 4205 static void gen_mftb(DisasContext *ctx) 4206 { 4207 gen_op_mfspr(ctx); 4208 } 4209 4210 /* mtcrf mtocrf*/ 4211 static void gen_mtcrf(DisasContext *ctx) 4212 { 4213 uint32_t crm, crn; 4214 4215 crm = CRM(ctx->opcode); 4216 if (likely((ctx->opcode & 0x00100000))) { 4217 if (crm && ((crm & (crm - 1)) == 0)) { 4218 TCGv_i32 temp = tcg_temp_new_i32(); 4219 crn = ctz32 (crm); 4220 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4221 tcg_gen_shri_i32(temp, temp, crn * 4); 4222 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4223 tcg_temp_free_i32(temp); 4224 } 4225 } else { 4226 TCGv_i32 temp = tcg_temp_new_i32(); 4227 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4228 for (crn = 0 ; crn < 8 ; crn++) { 4229 if (crm & (1 << crn)) { 4230 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4231 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4232 } 4233 } 4234 tcg_temp_free_i32(temp); 4235 } 4236 } 4237 4238 /* mtmsr */ 4239 #if defined(TARGET_PPC64) 4240 static void gen_mtmsrd(DisasContext *ctx) 4241 { 4242 CHK_SV; 4243 4244 #if !defined(CONFIG_USER_ONLY) 4245 if (ctx->opcode & 0x00010000) { 4246 /* Special form that does not need any synchronisation */ 4247 TCGv t0 = tcg_temp_new(); 4248 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4249 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4250 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4251 tcg_temp_free(t0); 4252 } else { 4253 /* XXX: we need to update nip before the store 4254 * if we enter power saving mode, we will exit the loop 4255 * directly from ppc_store_msr 4256 */ 4257 gen_update_nip(ctx, ctx->base.pc_next); 4258 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 4259 /* Must stop the translation as machine state (may have) changed */ 4260 /* Note that mtmsr is not always defined as context-synchronizing */ 4261 gen_stop_exception(ctx); 4262 } 4263 #endif /* !defined(CONFIG_USER_ONLY) */ 4264 } 4265 #endif /* defined(TARGET_PPC64) */ 4266 4267 static void gen_mtmsr(DisasContext *ctx) 4268 { 4269 CHK_SV; 4270 4271 #if !defined(CONFIG_USER_ONLY) 4272 if (ctx->opcode & 0x00010000) { 4273 /* Special form that does not need any synchronisation */ 4274 TCGv t0 = tcg_temp_new(); 4275 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 4276 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 4277 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 4278 tcg_temp_free(t0); 4279 } else { 4280 TCGv msr = tcg_temp_new(); 4281 4282 /* XXX: we need to update nip before the store 4283 * if we enter power saving mode, we will exit the loop 4284 * directly from ppc_store_msr 4285 */ 4286 gen_update_nip(ctx, ctx->base.pc_next); 4287 #if defined(TARGET_PPC64) 4288 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4289 #else 4290 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4291 #endif 4292 gen_helper_store_msr(cpu_env, msr); 4293 tcg_temp_free(msr); 4294 /* Must stop the translation as machine state (may have) changed */ 4295 /* Note that mtmsr is not always defined as context-synchronizing */ 4296 gen_stop_exception(ctx); 4297 } 4298 #endif 4299 } 4300 4301 /* mtspr */ 4302 static void gen_mtspr(DisasContext *ctx) 4303 { 4304 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4305 uint32_t sprn = SPR(ctx->opcode); 4306 4307 #if defined(CONFIG_USER_ONLY) 4308 write_cb = ctx->spr_cb[sprn].uea_write; 4309 #else 4310 if (ctx->pr) { 4311 write_cb = ctx->spr_cb[sprn].uea_write; 4312 } else if (ctx->hv) { 4313 write_cb = ctx->spr_cb[sprn].hea_write; 4314 } else { 4315 write_cb = ctx->spr_cb[sprn].oea_write; 4316 } 4317 #endif 4318 if (likely(write_cb != NULL)) { 4319 if (likely(write_cb != SPR_NOACCESS)) { 4320 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4321 } else { 4322 /* Privilege exception */ 4323 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 4324 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4325 ctx->base.pc_next - 4); 4326 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4327 } 4328 } else { 4329 /* ISA 2.07 defines these as no-ops */ 4330 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4331 (sprn >= 808 && sprn <= 811)) { 4332 /* This is a nop */ 4333 return; 4334 } 4335 4336 /* Not defined */ 4337 qemu_log_mask(LOG_GUEST_ERROR, 4338 "Trying to write invalid spr %d (0x%03x) at " 4339 TARGET_FMT_lx "\n", sprn, sprn, ctx->base.pc_next - 4); 4340 4341 4342 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4343 * it can generate a priv, a hv emu or a no-op 4344 */ 4345 if (sprn & 0x10) { 4346 if (ctx->pr) { 4347 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4348 } 4349 } else { 4350 if (ctx->pr || sprn == 0) { 4351 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4352 } 4353 } 4354 } 4355 } 4356 4357 #if defined(TARGET_PPC64) 4358 /* setb */ 4359 static void gen_setb(DisasContext *ctx) 4360 { 4361 TCGv_i32 t0 = tcg_temp_new_i32(); 4362 TCGv_i32 t8 = tcg_temp_new_i32(); 4363 TCGv_i32 tm1 = tcg_temp_new_i32(); 4364 int crf = crfS(ctx->opcode); 4365 4366 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4367 tcg_gen_movi_i32(t8, 8); 4368 tcg_gen_movi_i32(tm1, -1); 4369 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4370 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4371 4372 tcg_temp_free_i32(t0); 4373 tcg_temp_free_i32(t8); 4374 tcg_temp_free_i32(tm1); 4375 } 4376 #endif 4377 4378 /*** Cache management ***/ 4379 4380 /* dcbf */ 4381 static void gen_dcbf(DisasContext *ctx) 4382 { 4383 /* XXX: specification says this is treated as a load by the MMU */ 4384 TCGv t0; 4385 gen_set_access_type(ctx, ACCESS_CACHE); 4386 t0 = tcg_temp_new(); 4387 gen_addr_reg_index(ctx, t0); 4388 gen_qemu_ld8u(ctx, t0, t0); 4389 tcg_temp_free(t0); 4390 } 4391 4392 /* dcbi (Supervisor only) */ 4393 static void gen_dcbi(DisasContext *ctx) 4394 { 4395 #if defined(CONFIG_USER_ONLY) 4396 GEN_PRIV; 4397 #else 4398 TCGv EA, val; 4399 4400 CHK_SV; 4401 EA = tcg_temp_new(); 4402 gen_set_access_type(ctx, ACCESS_CACHE); 4403 gen_addr_reg_index(ctx, EA); 4404 val = tcg_temp_new(); 4405 /* XXX: specification says this should be treated as a store by the MMU */ 4406 gen_qemu_ld8u(ctx, val, EA); 4407 gen_qemu_st8(ctx, val, EA); 4408 tcg_temp_free(val); 4409 tcg_temp_free(EA); 4410 #endif /* defined(CONFIG_USER_ONLY) */ 4411 } 4412 4413 /* dcdst */ 4414 static void gen_dcbst(DisasContext *ctx) 4415 { 4416 /* XXX: specification say this is treated as a load by the MMU */ 4417 TCGv t0; 4418 gen_set_access_type(ctx, ACCESS_CACHE); 4419 t0 = tcg_temp_new(); 4420 gen_addr_reg_index(ctx, t0); 4421 gen_qemu_ld8u(ctx, t0, t0); 4422 tcg_temp_free(t0); 4423 } 4424 4425 /* dcbt */ 4426 static void gen_dcbt(DisasContext *ctx) 4427 { 4428 /* interpreted as no-op */ 4429 /* XXX: specification say this is treated as a load by the MMU 4430 * but does not generate any exception 4431 */ 4432 } 4433 4434 /* dcbtst */ 4435 static void gen_dcbtst(DisasContext *ctx) 4436 { 4437 /* interpreted as no-op */ 4438 /* XXX: specification say this is treated as a load by the MMU 4439 * but does not generate any exception 4440 */ 4441 } 4442 4443 /* dcbtls */ 4444 static void gen_dcbtls(DisasContext *ctx) 4445 { 4446 /* Always fails locking the cache */ 4447 TCGv t0 = tcg_temp_new(); 4448 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4449 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4450 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4451 tcg_temp_free(t0); 4452 } 4453 4454 /* dcbz */ 4455 static void gen_dcbz(DisasContext *ctx) 4456 { 4457 TCGv tcgv_addr; 4458 TCGv_i32 tcgv_op; 4459 4460 gen_set_access_type(ctx, ACCESS_CACHE); 4461 tcgv_addr = tcg_temp_new(); 4462 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4463 gen_addr_reg_index(ctx, tcgv_addr); 4464 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4465 tcg_temp_free(tcgv_addr); 4466 tcg_temp_free_i32(tcgv_op); 4467 } 4468 4469 /* dst / dstt */ 4470 static void gen_dst(DisasContext *ctx) 4471 { 4472 if (rA(ctx->opcode) == 0) { 4473 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4474 } else { 4475 /* interpreted as no-op */ 4476 } 4477 } 4478 4479 /* dstst /dststt */ 4480 static void gen_dstst(DisasContext *ctx) 4481 { 4482 if (rA(ctx->opcode) == 0) { 4483 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4484 } else { 4485 /* interpreted as no-op */ 4486 } 4487 4488 } 4489 4490 /* dss / dssall */ 4491 static void gen_dss(DisasContext *ctx) 4492 { 4493 /* interpreted as no-op */ 4494 } 4495 4496 /* icbi */ 4497 static void gen_icbi(DisasContext *ctx) 4498 { 4499 TCGv t0; 4500 gen_set_access_type(ctx, ACCESS_CACHE); 4501 t0 = tcg_temp_new(); 4502 gen_addr_reg_index(ctx, t0); 4503 gen_helper_icbi(cpu_env, t0); 4504 tcg_temp_free(t0); 4505 } 4506 4507 /* Optional: */ 4508 /* dcba */ 4509 static void gen_dcba(DisasContext *ctx) 4510 { 4511 /* interpreted as no-op */ 4512 /* XXX: specification say this is treated as a store by the MMU 4513 * but does not generate any exception 4514 */ 4515 } 4516 4517 /*** Segment register manipulation ***/ 4518 /* Supervisor only: */ 4519 4520 /* mfsr */ 4521 static void gen_mfsr(DisasContext *ctx) 4522 { 4523 #if defined(CONFIG_USER_ONLY) 4524 GEN_PRIV; 4525 #else 4526 TCGv t0; 4527 4528 CHK_SV; 4529 t0 = tcg_const_tl(SR(ctx->opcode)); 4530 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4531 tcg_temp_free(t0); 4532 #endif /* defined(CONFIG_USER_ONLY) */ 4533 } 4534 4535 /* mfsrin */ 4536 static void gen_mfsrin(DisasContext *ctx) 4537 { 4538 #if defined(CONFIG_USER_ONLY) 4539 GEN_PRIV; 4540 #else 4541 TCGv t0; 4542 4543 CHK_SV; 4544 t0 = tcg_temp_new(); 4545 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4546 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4547 tcg_temp_free(t0); 4548 #endif /* defined(CONFIG_USER_ONLY) */ 4549 } 4550 4551 /* mtsr */ 4552 static void gen_mtsr(DisasContext *ctx) 4553 { 4554 #if defined(CONFIG_USER_ONLY) 4555 GEN_PRIV; 4556 #else 4557 TCGv t0; 4558 4559 CHK_SV; 4560 t0 = tcg_const_tl(SR(ctx->opcode)); 4561 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4562 tcg_temp_free(t0); 4563 #endif /* defined(CONFIG_USER_ONLY) */ 4564 } 4565 4566 /* mtsrin */ 4567 static void gen_mtsrin(DisasContext *ctx) 4568 { 4569 #if defined(CONFIG_USER_ONLY) 4570 GEN_PRIV; 4571 #else 4572 TCGv t0; 4573 CHK_SV; 4574 4575 t0 = tcg_temp_new(); 4576 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4577 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4578 tcg_temp_free(t0); 4579 #endif /* defined(CONFIG_USER_ONLY) */ 4580 } 4581 4582 #if defined(TARGET_PPC64) 4583 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4584 4585 /* mfsr */ 4586 static void gen_mfsr_64b(DisasContext *ctx) 4587 { 4588 #if defined(CONFIG_USER_ONLY) 4589 GEN_PRIV; 4590 #else 4591 TCGv t0; 4592 4593 CHK_SV; 4594 t0 = tcg_const_tl(SR(ctx->opcode)); 4595 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4596 tcg_temp_free(t0); 4597 #endif /* defined(CONFIG_USER_ONLY) */ 4598 } 4599 4600 /* mfsrin */ 4601 static void gen_mfsrin_64b(DisasContext *ctx) 4602 { 4603 #if defined(CONFIG_USER_ONLY) 4604 GEN_PRIV; 4605 #else 4606 TCGv t0; 4607 4608 CHK_SV; 4609 t0 = tcg_temp_new(); 4610 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4611 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4612 tcg_temp_free(t0); 4613 #endif /* defined(CONFIG_USER_ONLY) */ 4614 } 4615 4616 /* mtsr */ 4617 static void gen_mtsr_64b(DisasContext *ctx) 4618 { 4619 #if defined(CONFIG_USER_ONLY) 4620 GEN_PRIV; 4621 #else 4622 TCGv t0; 4623 4624 CHK_SV; 4625 t0 = tcg_const_tl(SR(ctx->opcode)); 4626 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4627 tcg_temp_free(t0); 4628 #endif /* defined(CONFIG_USER_ONLY) */ 4629 } 4630 4631 /* mtsrin */ 4632 static void gen_mtsrin_64b(DisasContext *ctx) 4633 { 4634 #if defined(CONFIG_USER_ONLY) 4635 GEN_PRIV; 4636 #else 4637 TCGv t0; 4638 4639 CHK_SV; 4640 t0 = tcg_temp_new(); 4641 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 4642 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4643 tcg_temp_free(t0); 4644 #endif /* defined(CONFIG_USER_ONLY) */ 4645 } 4646 4647 /* slbmte */ 4648 static void gen_slbmte(DisasContext *ctx) 4649 { 4650 #if defined(CONFIG_USER_ONLY) 4651 GEN_PRIV; 4652 #else 4653 CHK_SV; 4654 4655 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4656 cpu_gpr[rS(ctx->opcode)]); 4657 #endif /* defined(CONFIG_USER_ONLY) */ 4658 } 4659 4660 static void gen_slbmfee(DisasContext *ctx) 4661 { 4662 #if defined(CONFIG_USER_ONLY) 4663 GEN_PRIV; 4664 #else 4665 CHK_SV; 4666 4667 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4668 cpu_gpr[rB(ctx->opcode)]); 4669 #endif /* defined(CONFIG_USER_ONLY) */ 4670 } 4671 4672 static void gen_slbmfev(DisasContext *ctx) 4673 { 4674 #if defined(CONFIG_USER_ONLY) 4675 GEN_PRIV; 4676 #else 4677 CHK_SV; 4678 4679 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4680 cpu_gpr[rB(ctx->opcode)]); 4681 #endif /* defined(CONFIG_USER_ONLY) */ 4682 } 4683 4684 static void gen_slbfee_(DisasContext *ctx) 4685 { 4686 #if defined(CONFIG_USER_ONLY) 4687 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4688 #else 4689 TCGLabel *l1, *l2; 4690 4691 if (unlikely(ctx->pr)) { 4692 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4693 return; 4694 } 4695 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4696 cpu_gpr[rB(ctx->opcode)]); 4697 l1 = gen_new_label(); 4698 l2 = gen_new_label(); 4699 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4700 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4701 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4702 tcg_gen_br(l2); 4703 gen_set_label(l1); 4704 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4705 gen_set_label(l2); 4706 #endif 4707 } 4708 #endif /* defined(TARGET_PPC64) */ 4709 4710 /*** Lookaside buffer management ***/ 4711 /* Optional & supervisor only: */ 4712 4713 /* tlbia */ 4714 static void gen_tlbia(DisasContext *ctx) 4715 { 4716 #if defined(CONFIG_USER_ONLY) 4717 GEN_PRIV; 4718 #else 4719 CHK_HV; 4720 4721 gen_helper_tlbia(cpu_env); 4722 #endif /* defined(CONFIG_USER_ONLY) */ 4723 } 4724 4725 /* tlbiel */ 4726 static void gen_tlbiel(DisasContext *ctx) 4727 { 4728 #if defined(CONFIG_USER_ONLY) 4729 GEN_PRIV; 4730 #else 4731 CHK_SV; 4732 4733 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4734 #endif /* defined(CONFIG_USER_ONLY) */ 4735 } 4736 4737 /* tlbie */ 4738 static void gen_tlbie(DisasContext *ctx) 4739 { 4740 #if defined(CONFIG_USER_ONLY) 4741 GEN_PRIV; 4742 #else 4743 TCGv_i32 t1; 4744 4745 if (ctx->gtse) { 4746 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */ 4747 } else { 4748 CHK_HV; /* Else hypervisor privileged */ 4749 } 4750 4751 if (NARROW_MODE(ctx)) { 4752 TCGv t0 = tcg_temp_new(); 4753 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4754 gen_helper_tlbie(cpu_env, t0); 4755 tcg_temp_free(t0); 4756 } else { 4757 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4758 } 4759 t1 = tcg_temp_new_i32(); 4760 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4761 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4762 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4763 tcg_temp_free_i32(t1); 4764 #endif /* defined(CONFIG_USER_ONLY) */ 4765 } 4766 4767 /* tlbsync */ 4768 static void gen_tlbsync(DisasContext *ctx) 4769 { 4770 #if defined(CONFIG_USER_ONLY) 4771 GEN_PRIV; 4772 #else 4773 4774 if (ctx->gtse) { 4775 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 4776 } else { 4777 CHK_HV; /* Else hypervisor privileged */ 4778 } 4779 4780 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4781 if (ctx->insns_flags & PPC_BOOKE) { 4782 gen_check_tlb_flush(ctx, true); 4783 } 4784 #endif /* defined(CONFIG_USER_ONLY) */ 4785 } 4786 4787 #if defined(TARGET_PPC64) 4788 /* slbia */ 4789 static void gen_slbia(DisasContext *ctx) 4790 { 4791 #if defined(CONFIG_USER_ONLY) 4792 GEN_PRIV; 4793 #else 4794 CHK_SV; 4795 4796 gen_helper_slbia(cpu_env); 4797 #endif /* defined(CONFIG_USER_ONLY) */ 4798 } 4799 4800 /* slbie */ 4801 static void gen_slbie(DisasContext *ctx) 4802 { 4803 #if defined(CONFIG_USER_ONLY) 4804 GEN_PRIV; 4805 #else 4806 CHK_SV; 4807 4808 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4809 #endif /* defined(CONFIG_USER_ONLY) */ 4810 } 4811 4812 /* slbieg */ 4813 static void gen_slbieg(DisasContext *ctx) 4814 { 4815 #if defined(CONFIG_USER_ONLY) 4816 GEN_PRIV; 4817 #else 4818 CHK_SV; 4819 4820 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4821 #endif /* defined(CONFIG_USER_ONLY) */ 4822 } 4823 4824 /* slbsync */ 4825 static void gen_slbsync(DisasContext *ctx) 4826 { 4827 #if defined(CONFIG_USER_ONLY) 4828 GEN_PRIV; 4829 #else 4830 CHK_SV; 4831 gen_check_tlb_flush(ctx, true); 4832 #endif /* defined(CONFIG_USER_ONLY) */ 4833 } 4834 4835 #endif /* defined(TARGET_PPC64) */ 4836 4837 /*** External control ***/ 4838 /* Optional: */ 4839 4840 /* eciwx */ 4841 static void gen_eciwx(DisasContext *ctx) 4842 { 4843 TCGv t0; 4844 /* Should check EAR[E] ! */ 4845 gen_set_access_type(ctx, ACCESS_EXT); 4846 t0 = tcg_temp_new(); 4847 gen_addr_reg_index(ctx, t0); 4848 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4849 DEF_MEMOP(MO_UL | MO_ALIGN)); 4850 tcg_temp_free(t0); 4851 } 4852 4853 /* ecowx */ 4854 static void gen_ecowx(DisasContext *ctx) 4855 { 4856 TCGv t0; 4857 /* Should check EAR[E] ! */ 4858 gen_set_access_type(ctx, ACCESS_EXT); 4859 t0 = tcg_temp_new(); 4860 gen_addr_reg_index(ctx, t0); 4861 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 4862 DEF_MEMOP(MO_UL | MO_ALIGN)); 4863 tcg_temp_free(t0); 4864 } 4865 4866 /* PowerPC 601 specific instructions */ 4867 4868 /* abs - abs. */ 4869 static void gen_abs(DisasContext *ctx) 4870 { 4871 TCGLabel *l1 = gen_new_label(); 4872 TCGLabel *l2 = gen_new_label(); 4873 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 4874 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4875 tcg_gen_br(l2); 4876 gen_set_label(l1); 4877 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4878 gen_set_label(l2); 4879 if (unlikely(Rc(ctx->opcode) != 0)) 4880 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4881 } 4882 4883 /* abso - abso. */ 4884 static void gen_abso(DisasContext *ctx) 4885 { 4886 TCGLabel *l1 = gen_new_label(); 4887 TCGLabel *l2 = gen_new_label(); 4888 TCGLabel *l3 = gen_new_label(); 4889 /* Start with XER OV disabled, the most likely case */ 4890 tcg_gen_movi_tl(cpu_ov, 0); 4891 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 4892 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 4893 tcg_gen_movi_tl(cpu_ov, 1); 4894 tcg_gen_movi_tl(cpu_so, 1); 4895 tcg_gen_br(l2); 4896 gen_set_label(l1); 4897 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4898 tcg_gen_br(l3); 4899 gen_set_label(l2); 4900 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4901 gen_set_label(l3); 4902 if (unlikely(Rc(ctx->opcode) != 0)) 4903 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4904 } 4905 4906 /* clcs */ 4907 static void gen_clcs(DisasContext *ctx) 4908 { 4909 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 4910 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4911 tcg_temp_free_i32(t0); 4912 /* Rc=1 sets CR0 to an undefined state */ 4913 } 4914 4915 /* div - div. */ 4916 static void gen_div(DisasContext *ctx) 4917 { 4918 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4919 cpu_gpr[rB(ctx->opcode)]); 4920 if (unlikely(Rc(ctx->opcode) != 0)) 4921 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4922 } 4923 4924 /* divo - divo. */ 4925 static void gen_divo(DisasContext *ctx) 4926 { 4927 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4928 cpu_gpr[rB(ctx->opcode)]); 4929 if (unlikely(Rc(ctx->opcode) != 0)) 4930 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4931 } 4932 4933 /* divs - divs. */ 4934 static void gen_divs(DisasContext *ctx) 4935 { 4936 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4937 cpu_gpr[rB(ctx->opcode)]); 4938 if (unlikely(Rc(ctx->opcode) != 0)) 4939 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4940 } 4941 4942 /* divso - divso. */ 4943 static void gen_divso(DisasContext *ctx) 4944 { 4945 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 4946 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4947 if (unlikely(Rc(ctx->opcode) != 0)) 4948 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4949 } 4950 4951 /* doz - doz. */ 4952 static void gen_doz(DisasContext *ctx) 4953 { 4954 TCGLabel *l1 = gen_new_label(); 4955 TCGLabel *l2 = gen_new_label(); 4956 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4957 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4958 tcg_gen_br(l2); 4959 gen_set_label(l1); 4960 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4961 gen_set_label(l2); 4962 if (unlikely(Rc(ctx->opcode) != 0)) 4963 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4964 } 4965 4966 /* dozo - dozo. */ 4967 static void gen_dozo(DisasContext *ctx) 4968 { 4969 TCGLabel *l1 = gen_new_label(); 4970 TCGLabel *l2 = gen_new_label(); 4971 TCGv t0 = tcg_temp_new(); 4972 TCGv t1 = tcg_temp_new(); 4973 TCGv t2 = tcg_temp_new(); 4974 /* Start with XER OV disabled, the most likely case */ 4975 tcg_gen_movi_tl(cpu_ov, 0); 4976 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4977 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4978 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4979 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 4980 tcg_gen_andc_tl(t1, t1, t2); 4981 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 4982 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 4983 tcg_gen_movi_tl(cpu_ov, 1); 4984 tcg_gen_movi_tl(cpu_so, 1); 4985 tcg_gen_br(l2); 4986 gen_set_label(l1); 4987 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4988 gen_set_label(l2); 4989 tcg_temp_free(t0); 4990 tcg_temp_free(t1); 4991 tcg_temp_free(t2); 4992 if (unlikely(Rc(ctx->opcode) != 0)) 4993 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4994 } 4995 4996 /* dozi */ 4997 static void gen_dozi(DisasContext *ctx) 4998 { 4999 target_long simm = SIMM(ctx->opcode); 5000 TCGLabel *l1 = gen_new_label(); 5001 TCGLabel *l2 = gen_new_label(); 5002 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5003 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5004 tcg_gen_br(l2); 5005 gen_set_label(l1); 5006 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5007 gen_set_label(l2); 5008 if (unlikely(Rc(ctx->opcode) != 0)) 5009 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5010 } 5011 5012 /* lscbx - lscbx. */ 5013 static void gen_lscbx(DisasContext *ctx) 5014 { 5015 TCGv t0 = tcg_temp_new(); 5016 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5017 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5018 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5019 5020 gen_addr_reg_index(ctx, t0); 5021 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5022 tcg_temp_free_i32(t1); 5023 tcg_temp_free_i32(t2); 5024 tcg_temp_free_i32(t3); 5025 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5026 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5027 if (unlikely(Rc(ctx->opcode) != 0)) 5028 gen_set_Rc0(ctx, t0); 5029 tcg_temp_free(t0); 5030 } 5031 5032 /* maskg - maskg. */ 5033 static void gen_maskg(DisasContext *ctx) 5034 { 5035 TCGLabel *l1 = gen_new_label(); 5036 TCGv t0 = tcg_temp_new(); 5037 TCGv t1 = tcg_temp_new(); 5038 TCGv t2 = tcg_temp_new(); 5039 TCGv t3 = tcg_temp_new(); 5040 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5041 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5042 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5043 tcg_gen_addi_tl(t2, t0, 1); 5044 tcg_gen_shr_tl(t2, t3, t2); 5045 tcg_gen_shr_tl(t3, t3, t1); 5046 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5047 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5048 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5049 gen_set_label(l1); 5050 tcg_temp_free(t0); 5051 tcg_temp_free(t1); 5052 tcg_temp_free(t2); 5053 tcg_temp_free(t3); 5054 if (unlikely(Rc(ctx->opcode) != 0)) 5055 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5056 } 5057 5058 /* maskir - maskir. */ 5059 static void gen_maskir(DisasContext *ctx) 5060 { 5061 TCGv t0 = tcg_temp_new(); 5062 TCGv t1 = tcg_temp_new(); 5063 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5064 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5065 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5066 tcg_temp_free(t0); 5067 tcg_temp_free(t1); 5068 if (unlikely(Rc(ctx->opcode) != 0)) 5069 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5070 } 5071 5072 /* mul - mul. */ 5073 static void gen_mul(DisasContext *ctx) 5074 { 5075 TCGv_i64 t0 = tcg_temp_new_i64(); 5076 TCGv_i64 t1 = tcg_temp_new_i64(); 5077 TCGv t2 = tcg_temp_new(); 5078 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5079 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5080 tcg_gen_mul_i64(t0, t0, t1); 5081 tcg_gen_trunc_i64_tl(t2, t0); 5082 gen_store_spr(SPR_MQ, t2); 5083 tcg_gen_shri_i64(t1, t0, 32); 5084 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5085 tcg_temp_free_i64(t0); 5086 tcg_temp_free_i64(t1); 5087 tcg_temp_free(t2); 5088 if (unlikely(Rc(ctx->opcode) != 0)) 5089 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5090 } 5091 5092 /* mulo - mulo. */ 5093 static void gen_mulo(DisasContext *ctx) 5094 { 5095 TCGLabel *l1 = gen_new_label(); 5096 TCGv_i64 t0 = tcg_temp_new_i64(); 5097 TCGv_i64 t1 = tcg_temp_new_i64(); 5098 TCGv t2 = tcg_temp_new(); 5099 /* Start with XER OV disabled, the most likely case */ 5100 tcg_gen_movi_tl(cpu_ov, 0); 5101 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5102 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5103 tcg_gen_mul_i64(t0, t0, t1); 5104 tcg_gen_trunc_i64_tl(t2, t0); 5105 gen_store_spr(SPR_MQ, t2); 5106 tcg_gen_shri_i64(t1, t0, 32); 5107 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5108 tcg_gen_ext32s_i64(t1, t0); 5109 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5110 tcg_gen_movi_tl(cpu_ov, 1); 5111 tcg_gen_movi_tl(cpu_so, 1); 5112 gen_set_label(l1); 5113 tcg_temp_free_i64(t0); 5114 tcg_temp_free_i64(t1); 5115 tcg_temp_free(t2); 5116 if (unlikely(Rc(ctx->opcode) != 0)) 5117 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5118 } 5119 5120 /* nabs - nabs. */ 5121 static void gen_nabs(DisasContext *ctx) 5122 { 5123 TCGLabel *l1 = gen_new_label(); 5124 TCGLabel *l2 = gen_new_label(); 5125 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5126 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5127 tcg_gen_br(l2); 5128 gen_set_label(l1); 5129 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5130 gen_set_label(l2); 5131 if (unlikely(Rc(ctx->opcode) != 0)) 5132 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5133 } 5134 5135 /* nabso - nabso. */ 5136 static void gen_nabso(DisasContext *ctx) 5137 { 5138 TCGLabel *l1 = gen_new_label(); 5139 TCGLabel *l2 = gen_new_label(); 5140 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 5141 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5142 tcg_gen_br(l2); 5143 gen_set_label(l1); 5144 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5145 gen_set_label(l2); 5146 /* nabs never overflows */ 5147 tcg_gen_movi_tl(cpu_ov, 0); 5148 if (unlikely(Rc(ctx->opcode) != 0)) 5149 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5150 } 5151 5152 /* rlmi - rlmi. */ 5153 static void gen_rlmi(DisasContext *ctx) 5154 { 5155 uint32_t mb = MB(ctx->opcode); 5156 uint32_t me = ME(ctx->opcode); 5157 TCGv t0 = tcg_temp_new(); 5158 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5159 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5160 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5161 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 5162 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5163 tcg_temp_free(t0); 5164 if (unlikely(Rc(ctx->opcode) != 0)) 5165 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5166 } 5167 5168 /* rrib - rrib. */ 5169 static void gen_rrib(DisasContext *ctx) 5170 { 5171 TCGv t0 = tcg_temp_new(); 5172 TCGv t1 = tcg_temp_new(); 5173 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5174 tcg_gen_movi_tl(t1, 0x80000000); 5175 tcg_gen_shr_tl(t1, t1, t0); 5176 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5177 tcg_gen_and_tl(t0, t0, t1); 5178 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5179 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5180 tcg_temp_free(t0); 5181 tcg_temp_free(t1); 5182 if (unlikely(Rc(ctx->opcode) != 0)) 5183 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5184 } 5185 5186 /* sle - sle. */ 5187 static void gen_sle(DisasContext *ctx) 5188 { 5189 TCGv t0 = tcg_temp_new(); 5190 TCGv t1 = tcg_temp_new(); 5191 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5192 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5193 tcg_gen_subfi_tl(t1, 32, t1); 5194 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5195 tcg_gen_or_tl(t1, t0, t1); 5196 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5197 gen_store_spr(SPR_MQ, t1); 5198 tcg_temp_free(t0); 5199 tcg_temp_free(t1); 5200 if (unlikely(Rc(ctx->opcode) != 0)) 5201 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5202 } 5203 5204 /* sleq - sleq. */ 5205 static void gen_sleq(DisasContext *ctx) 5206 { 5207 TCGv t0 = tcg_temp_new(); 5208 TCGv t1 = tcg_temp_new(); 5209 TCGv t2 = tcg_temp_new(); 5210 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5211 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 5212 tcg_gen_shl_tl(t2, t2, t0); 5213 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5214 gen_load_spr(t1, SPR_MQ); 5215 gen_store_spr(SPR_MQ, t0); 5216 tcg_gen_and_tl(t0, t0, t2); 5217 tcg_gen_andc_tl(t1, t1, t2); 5218 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5219 tcg_temp_free(t0); 5220 tcg_temp_free(t1); 5221 tcg_temp_free(t2); 5222 if (unlikely(Rc(ctx->opcode) != 0)) 5223 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5224 } 5225 5226 /* sliq - sliq. */ 5227 static void gen_sliq(DisasContext *ctx) 5228 { 5229 int sh = SH(ctx->opcode); 5230 TCGv t0 = tcg_temp_new(); 5231 TCGv t1 = tcg_temp_new(); 5232 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5233 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5234 tcg_gen_or_tl(t1, t0, t1); 5235 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5236 gen_store_spr(SPR_MQ, t1); 5237 tcg_temp_free(t0); 5238 tcg_temp_free(t1); 5239 if (unlikely(Rc(ctx->opcode) != 0)) 5240 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5241 } 5242 5243 /* slliq - slliq. */ 5244 static void gen_slliq(DisasContext *ctx) 5245 { 5246 int sh = SH(ctx->opcode); 5247 TCGv t0 = tcg_temp_new(); 5248 TCGv t1 = tcg_temp_new(); 5249 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5250 gen_load_spr(t1, SPR_MQ); 5251 gen_store_spr(SPR_MQ, t0); 5252 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 5253 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 5254 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5255 tcg_temp_free(t0); 5256 tcg_temp_free(t1); 5257 if (unlikely(Rc(ctx->opcode) != 0)) 5258 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5259 } 5260 5261 /* sllq - sllq. */ 5262 static void gen_sllq(DisasContext *ctx) 5263 { 5264 TCGLabel *l1 = gen_new_label(); 5265 TCGLabel *l2 = gen_new_label(); 5266 TCGv t0 = tcg_temp_local_new(); 5267 TCGv t1 = tcg_temp_local_new(); 5268 TCGv t2 = tcg_temp_local_new(); 5269 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5270 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5271 tcg_gen_shl_tl(t1, t1, t2); 5272 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5273 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5274 gen_load_spr(t0, SPR_MQ); 5275 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5276 tcg_gen_br(l2); 5277 gen_set_label(l1); 5278 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5279 gen_load_spr(t2, SPR_MQ); 5280 tcg_gen_andc_tl(t1, t2, t1); 5281 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5282 gen_set_label(l2); 5283 tcg_temp_free(t0); 5284 tcg_temp_free(t1); 5285 tcg_temp_free(t2); 5286 if (unlikely(Rc(ctx->opcode) != 0)) 5287 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5288 } 5289 5290 /* slq - slq. */ 5291 static void gen_slq(DisasContext *ctx) 5292 { 5293 TCGLabel *l1 = gen_new_label(); 5294 TCGv t0 = tcg_temp_new(); 5295 TCGv t1 = tcg_temp_new(); 5296 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5297 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5298 tcg_gen_subfi_tl(t1, 32, t1); 5299 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5300 tcg_gen_or_tl(t1, t0, t1); 5301 gen_store_spr(SPR_MQ, t1); 5302 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5303 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5304 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5305 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5306 gen_set_label(l1); 5307 tcg_temp_free(t0); 5308 tcg_temp_free(t1); 5309 if (unlikely(Rc(ctx->opcode) != 0)) 5310 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5311 } 5312 5313 /* sraiq - sraiq. */ 5314 static void gen_sraiq(DisasContext *ctx) 5315 { 5316 int sh = SH(ctx->opcode); 5317 TCGLabel *l1 = gen_new_label(); 5318 TCGv t0 = tcg_temp_new(); 5319 TCGv t1 = tcg_temp_new(); 5320 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5321 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5322 tcg_gen_or_tl(t0, t0, t1); 5323 gen_store_spr(SPR_MQ, t0); 5324 tcg_gen_movi_tl(cpu_ca, 0); 5325 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5326 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5327 tcg_gen_movi_tl(cpu_ca, 1); 5328 gen_set_label(l1); 5329 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5330 tcg_temp_free(t0); 5331 tcg_temp_free(t1); 5332 if (unlikely(Rc(ctx->opcode) != 0)) 5333 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5334 } 5335 5336 /* sraq - sraq. */ 5337 static void gen_sraq(DisasContext *ctx) 5338 { 5339 TCGLabel *l1 = gen_new_label(); 5340 TCGLabel *l2 = gen_new_label(); 5341 TCGv t0 = tcg_temp_new(); 5342 TCGv t1 = tcg_temp_local_new(); 5343 TCGv t2 = tcg_temp_local_new(); 5344 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5345 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5346 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5347 tcg_gen_subfi_tl(t2, 32, t2); 5348 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5349 tcg_gen_or_tl(t0, t0, t2); 5350 gen_store_spr(SPR_MQ, t0); 5351 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5352 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5353 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5354 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5355 gen_set_label(l1); 5356 tcg_temp_free(t0); 5357 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5358 tcg_gen_movi_tl(cpu_ca, 0); 5359 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5360 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5361 tcg_gen_movi_tl(cpu_ca, 1); 5362 gen_set_label(l2); 5363 tcg_temp_free(t1); 5364 tcg_temp_free(t2); 5365 if (unlikely(Rc(ctx->opcode) != 0)) 5366 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5367 } 5368 5369 /* sre - sre. */ 5370 static void gen_sre(DisasContext *ctx) 5371 { 5372 TCGv t0 = tcg_temp_new(); 5373 TCGv t1 = tcg_temp_new(); 5374 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5375 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5376 tcg_gen_subfi_tl(t1, 32, t1); 5377 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5378 tcg_gen_or_tl(t1, t0, t1); 5379 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5380 gen_store_spr(SPR_MQ, t1); 5381 tcg_temp_free(t0); 5382 tcg_temp_free(t1); 5383 if (unlikely(Rc(ctx->opcode) != 0)) 5384 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5385 } 5386 5387 /* srea - srea. */ 5388 static void gen_srea(DisasContext *ctx) 5389 { 5390 TCGv t0 = tcg_temp_new(); 5391 TCGv t1 = tcg_temp_new(); 5392 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5393 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5394 gen_store_spr(SPR_MQ, t0); 5395 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5396 tcg_temp_free(t0); 5397 tcg_temp_free(t1); 5398 if (unlikely(Rc(ctx->opcode) != 0)) 5399 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5400 } 5401 5402 /* sreq */ 5403 static void gen_sreq(DisasContext *ctx) 5404 { 5405 TCGv t0 = tcg_temp_new(); 5406 TCGv t1 = tcg_temp_new(); 5407 TCGv t2 = tcg_temp_new(); 5408 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5409 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5410 tcg_gen_shr_tl(t1, t1, t0); 5411 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5412 gen_load_spr(t2, SPR_MQ); 5413 gen_store_spr(SPR_MQ, t0); 5414 tcg_gen_and_tl(t0, t0, t1); 5415 tcg_gen_andc_tl(t2, t2, t1); 5416 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5417 tcg_temp_free(t0); 5418 tcg_temp_free(t1); 5419 tcg_temp_free(t2); 5420 if (unlikely(Rc(ctx->opcode) != 0)) 5421 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5422 } 5423 5424 /* sriq */ 5425 static void gen_sriq(DisasContext *ctx) 5426 { 5427 int sh = SH(ctx->opcode); 5428 TCGv t0 = tcg_temp_new(); 5429 TCGv t1 = tcg_temp_new(); 5430 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5431 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5432 tcg_gen_or_tl(t1, t0, t1); 5433 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5434 gen_store_spr(SPR_MQ, t1); 5435 tcg_temp_free(t0); 5436 tcg_temp_free(t1); 5437 if (unlikely(Rc(ctx->opcode) != 0)) 5438 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5439 } 5440 5441 /* srliq */ 5442 static void gen_srliq(DisasContext *ctx) 5443 { 5444 int sh = SH(ctx->opcode); 5445 TCGv t0 = tcg_temp_new(); 5446 TCGv t1 = tcg_temp_new(); 5447 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5448 gen_load_spr(t1, SPR_MQ); 5449 gen_store_spr(SPR_MQ, t0); 5450 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5451 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5452 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5453 tcg_temp_free(t0); 5454 tcg_temp_free(t1); 5455 if (unlikely(Rc(ctx->opcode) != 0)) 5456 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5457 } 5458 5459 /* srlq */ 5460 static void gen_srlq(DisasContext *ctx) 5461 { 5462 TCGLabel *l1 = gen_new_label(); 5463 TCGLabel *l2 = gen_new_label(); 5464 TCGv t0 = tcg_temp_local_new(); 5465 TCGv t1 = tcg_temp_local_new(); 5466 TCGv t2 = tcg_temp_local_new(); 5467 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5468 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5469 tcg_gen_shr_tl(t2, t1, t2); 5470 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5471 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5472 gen_load_spr(t0, SPR_MQ); 5473 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5474 tcg_gen_br(l2); 5475 gen_set_label(l1); 5476 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5477 tcg_gen_and_tl(t0, t0, t2); 5478 gen_load_spr(t1, SPR_MQ); 5479 tcg_gen_andc_tl(t1, t1, t2); 5480 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5481 gen_set_label(l2); 5482 tcg_temp_free(t0); 5483 tcg_temp_free(t1); 5484 tcg_temp_free(t2); 5485 if (unlikely(Rc(ctx->opcode) != 0)) 5486 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5487 } 5488 5489 /* srq */ 5490 static void gen_srq(DisasContext *ctx) 5491 { 5492 TCGLabel *l1 = gen_new_label(); 5493 TCGv t0 = tcg_temp_new(); 5494 TCGv t1 = tcg_temp_new(); 5495 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5496 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5497 tcg_gen_subfi_tl(t1, 32, t1); 5498 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5499 tcg_gen_or_tl(t1, t0, t1); 5500 gen_store_spr(SPR_MQ, t1); 5501 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5502 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5503 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5504 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5505 gen_set_label(l1); 5506 tcg_temp_free(t0); 5507 tcg_temp_free(t1); 5508 if (unlikely(Rc(ctx->opcode) != 0)) 5509 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5510 } 5511 5512 /* PowerPC 602 specific instructions */ 5513 5514 /* dsa */ 5515 static void gen_dsa(DisasContext *ctx) 5516 { 5517 /* XXX: TODO */ 5518 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5519 } 5520 5521 /* esa */ 5522 static void gen_esa(DisasContext *ctx) 5523 { 5524 /* XXX: TODO */ 5525 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5526 } 5527 5528 /* mfrom */ 5529 static void gen_mfrom(DisasContext *ctx) 5530 { 5531 #if defined(CONFIG_USER_ONLY) 5532 GEN_PRIV; 5533 #else 5534 CHK_SV; 5535 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5536 #endif /* defined(CONFIG_USER_ONLY) */ 5537 } 5538 5539 /* 602 - 603 - G2 TLB management */ 5540 5541 /* tlbld */ 5542 static void gen_tlbld_6xx(DisasContext *ctx) 5543 { 5544 #if defined(CONFIG_USER_ONLY) 5545 GEN_PRIV; 5546 #else 5547 CHK_SV; 5548 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5549 #endif /* defined(CONFIG_USER_ONLY) */ 5550 } 5551 5552 /* tlbli */ 5553 static void gen_tlbli_6xx(DisasContext *ctx) 5554 { 5555 #if defined(CONFIG_USER_ONLY) 5556 GEN_PRIV; 5557 #else 5558 CHK_SV; 5559 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5560 #endif /* defined(CONFIG_USER_ONLY) */ 5561 } 5562 5563 /* 74xx TLB management */ 5564 5565 /* tlbld */ 5566 static void gen_tlbld_74xx(DisasContext *ctx) 5567 { 5568 #if defined(CONFIG_USER_ONLY) 5569 GEN_PRIV; 5570 #else 5571 CHK_SV; 5572 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5573 #endif /* defined(CONFIG_USER_ONLY) */ 5574 } 5575 5576 /* tlbli */ 5577 static void gen_tlbli_74xx(DisasContext *ctx) 5578 { 5579 #if defined(CONFIG_USER_ONLY) 5580 GEN_PRIV; 5581 #else 5582 CHK_SV; 5583 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5584 #endif /* defined(CONFIG_USER_ONLY) */ 5585 } 5586 5587 /* POWER instructions not in PowerPC 601 */ 5588 5589 /* clf */ 5590 static void gen_clf(DisasContext *ctx) 5591 { 5592 /* Cache line flush: implemented as no-op */ 5593 } 5594 5595 /* cli */ 5596 static void gen_cli(DisasContext *ctx) 5597 { 5598 #if defined(CONFIG_USER_ONLY) 5599 GEN_PRIV; 5600 #else 5601 /* Cache line invalidate: privileged and treated as no-op */ 5602 CHK_SV; 5603 #endif /* defined(CONFIG_USER_ONLY) */ 5604 } 5605 5606 /* dclst */ 5607 static void gen_dclst(DisasContext *ctx) 5608 { 5609 /* Data cache line store: treated as no-op */ 5610 } 5611 5612 static void gen_mfsri(DisasContext *ctx) 5613 { 5614 #if defined(CONFIG_USER_ONLY) 5615 GEN_PRIV; 5616 #else 5617 int ra = rA(ctx->opcode); 5618 int rd = rD(ctx->opcode); 5619 TCGv t0; 5620 5621 CHK_SV; 5622 t0 = tcg_temp_new(); 5623 gen_addr_reg_index(ctx, t0); 5624 tcg_gen_extract_tl(t0, t0, 28, 4); 5625 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5626 tcg_temp_free(t0); 5627 if (ra != 0 && ra != rd) 5628 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5629 #endif /* defined(CONFIG_USER_ONLY) */ 5630 } 5631 5632 static void gen_rac(DisasContext *ctx) 5633 { 5634 #if defined(CONFIG_USER_ONLY) 5635 GEN_PRIV; 5636 #else 5637 TCGv t0; 5638 5639 CHK_SV; 5640 t0 = tcg_temp_new(); 5641 gen_addr_reg_index(ctx, t0); 5642 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5643 tcg_temp_free(t0); 5644 #endif /* defined(CONFIG_USER_ONLY) */ 5645 } 5646 5647 static void gen_rfsvc(DisasContext *ctx) 5648 { 5649 #if defined(CONFIG_USER_ONLY) 5650 GEN_PRIV; 5651 #else 5652 CHK_SV; 5653 5654 gen_helper_rfsvc(cpu_env); 5655 gen_sync_exception(ctx); 5656 #endif /* defined(CONFIG_USER_ONLY) */ 5657 } 5658 5659 /* svc is not implemented for now */ 5660 5661 /* BookE specific instructions */ 5662 5663 /* XXX: not implemented on 440 ? */ 5664 static void gen_mfapidi(DisasContext *ctx) 5665 { 5666 /* XXX: TODO */ 5667 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5668 } 5669 5670 /* XXX: not implemented on 440 ? */ 5671 static void gen_tlbiva(DisasContext *ctx) 5672 { 5673 #if defined(CONFIG_USER_ONLY) 5674 GEN_PRIV; 5675 #else 5676 TCGv t0; 5677 5678 CHK_SV; 5679 t0 = tcg_temp_new(); 5680 gen_addr_reg_index(ctx, t0); 5681 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5682 tcg_temp_free(t0); 5683 #endif /* defined(CONFIG_USER_ONLY) */ 5684 } 5685 5686 /* All 405 MAC instructions are translated here */ 5687 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5688 int ra, int rb, int rt, int Rc) 5689 { 5690 TCGv t0, t1; 5691 5692 t0 = tcg_temp_local_new(); 5693 t1 = tcg_temp_local_new(); 5694 5695 switch (opc3 & 0x0D) { 5696 case 0x05: 5697 /* macchw - macchw. - macchwo - macchwo. */ 5698 /* macchws - macchws. - macchwso - macchwso. */ 5699 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5700 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5701 /* mulchw - mulchw. */ 5702 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5703 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5704 tcg_gen_ext16s_tl(t1, t1); 5705 break; 5706 case 0x04: 5707 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5708 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5709 /* mulchwu - mulchwu. */ 5710 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5711 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5712 tcg_gen_ext16u_tl(t1, t1); 5713 break; 5714 case 0x01: 5715 /* machhw - machhw. - machhwo - machhwo. */ 5716 /* machhws - machhws. - machhwso - machhwso. */ 5717 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5718 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5719 /* mulhhw - mulhhw. */ 5720 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5721 tcg_gen_ext16s_tl(t0, t0); 5722 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5723 tcg_gen_ext16s_tl(t1, t1); 5724 break; 5725 case 0x00: 5726 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5727 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5728 /* mulhhwu - mulhhwu. */ 5729 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5730 tcg_gen_ext16u_tl(t0, t0); 5731 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5732 tcg_gen_ext16u_tl(t1, t1); 5733 break; 5734 case 0x0D: 5735 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5736 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5737 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5738 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5739 /* mullhw - mullhw. */ 5740 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5741 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5742 break; 5743 case 0x0C: 5744 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5745 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5746 /* mullhwu - mullhwu. */ 5747 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5748 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5749 break; 5750 } 5751 if (opc2 & 0x04) { 5752 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5753 tcg_gen_mul_tl(t1, t0, t1); 5754 if (opc2 & 0x02) { 5755 /* nmultiply-and-accumulate (0x0E) */ 5756 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5757 } else { 5758 /* multiply-and-accumulate (0x0C) */ 5759 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5760 } 5761 5762 if (opc3 & 0x12) { 5763 /* Check overflow and/or saturate */ 5764 TCGLabel *l1 = gen_new_label(); 5765 5766 if (opc3 & 0x10) { 5767 /* Start with XER OV disabled, the most likely case */ 5768 tcg_gen_movi_tl(cpu_ov, 0); 5769 } 5770 if (opc3 & 0x01) { 5771 /* Signed */ 5772 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5773 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5774 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5775 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5776 if (opc3 & 0x02) { 5777 /* Saturate */ 5778 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5779 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5780 } 5781 } else { 5782 /* Unsigned */ 5783 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5784 if (opc3 & 0x02) { 5785 /* Saturate */ 5786 tcg_gen_movi_tl(t0, UINT32_MAX); 5787 } 5788 } 5789 if (opc3 & 0x10) { 5790 /* Check overflow */ 5791 tcg_gen_movi_tl(cpu_ov, 1); 5792 tcg_gen_movi_tl(cpu_so, 1); 5793 } 5794 gen_set_label(l1); 5795 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5796 } 5797 } else { 5798 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5799 } 5800 tcg_temp_free(t0); 5801 tcg_temp_free(t1); 5802 if (unlikely(Rc) != 0) { 5803 /* Update Rc0 */ 5804 gen_set_Rc0(ctx, cpu_gpr[rt]); 5805 } 5806 } 5807 5808 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5809 static void glue(gen_, name)(DisasContext *ctx) \ 5810 { \ 5811 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5812 rD(ctx->opcode), Rc(ctx->opcode)); \ 5813 } 5814 5815 /* macchw - macchw. */ 5816 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5817 /* macchwo - macchwo. */ 5818 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5819 /* macchws - macchws. */ 5820 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5821 /* macchwso - macchwso. */ 5822 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5823 /* macchwsu - macchwsu. */ 5824 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5825 /* macchwsuo - macchwsuo. */ 5826 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5827 /* macchwu - macchwu. */ 5828 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5829 /* macchwuo - macchwuo. */ 5830 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5831 /* machhw - machhw. */ 5832 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5833 /* machhwo - machhwo. */ 5834 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5835 /* machhws - machhws. */ 5836 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5837 /* machhwso - machhwso. */ 5838 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5839 /* machhwsu - machhwsu. */ 5840 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5841 /* machhwsuo - machhwsuo. */ 5842 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5843 /* machhwu - machhwu. */ 5844 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5845 /* machhwuo - machhwuo. */ 5846 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5847 /* maclhw - maclhw. */ 5848 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5849 /* maclhwo - maclhwo. */ 5850 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5851 /* maclhws - maclhws. */ 5852 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5853 /* maclhwso - maclhwso. */ 5854 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 5855 /* maclhwu - maclhwu. */ 5856 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 5857 /* maclhwuo - maclhwuo. */ 5858 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 5859 /* maclhwsu - maclhwsu. */ 5860 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 5861 /* maclhwsuo - maclhwsuo. */ 5862 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 5863 /* nmacchw - nmacchw. */ 5864 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 5865 /* nmacchwo - nmacchwo. */ 5866 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 5867 /* nmacchws - nmacchws. */ 5868 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 5869 /* nmacchwso - nmacchwso. */ 5870 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 5871 /* nmachhw - nmachhw. */ 5872 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 5873 /* nmachhwo - nmachhwo. */ 5874 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 5875 /* nmachhws - nmachhws. */ 5876 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 5877 /* nmachhwso - nmachhwso. */ 5878 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 5879 /* nmaclhw - nmaclhw. */ 5880 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 5881 /* nmaclhwo - nmaclhwo. */ 5882 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 5883 /* nmaclhws - nmaclhws. */ 5884 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 5885 /* nmaclhwso - nmaclhwso. */ 5886 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 5887 5888 /* mulchw - mulchw. */ 5889 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 5890 /* mulchwu - mulchwu. */ 5891 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 5892 /* mulhhw - mulhhw. */ 5893 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 5894 /* mulhhwu - mulhhwu. */ 5895 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 5896 /* mullhw - mullhw. */ 5897 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 5898 /* mullhwu - mullhwu. */ 5899 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 5900 5901 /* mfdcr */ 5902 static void gen_mfdcr(DisasContext *ctx) 5903 { 5904 #if defined(CONFIG_USER_ONLY) 5905 GEN_PRIV; 5906 #else 5907 TCGv dcrn; 5908 5909 CHK_SV; 5910 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5911 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 5912 tcg_temp_free(dcrn); 5913 #endif /* defined(CONFIG_USER_ONLY) */ 5914 } 5915 5916 /* mtdcr */ 5917 static void gen_mtdcr(DisasContext *ctx) 5918 { 5919 #if defined(CONFIG_USER_ONLY) 5920 GEN_PRIV; 5921 #else 5922 TCGv dcrn; 5923 5924 CHK_SV; 5925 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5926 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 5927 tcg_temp_free(dcrn); 5928 #endif /* defined(CONFIG_USER_ONLY) */ 5929 } 5930 5931 /* mfdcrx */ 5932 /* XXX: not implemented on 440 ? */ 5933 static void gen_mfdcrx(DisasContext *ctx) 5934 { 5935 #if defined(CONFIG_USER_ONLY) 5936 GEN_PRIV; 5937 #else 5938 CHK_SV; 5939 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5940 cpu_gpr[rA(ctx->opcode)]); 5941 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5942 #endif /* defined(CONFIG_USER_ONLY) */ 5943 } 5944 5945 /* mtdcrx */ 5946 /* XXX: not implemented on 440 ? */ 5947 static void gen_mtdcrx(DisasContext *ctx) 5948 { 5949 #if defined(CONFIG_USER_ONLY) 5950 GEN_PRIV; 5951 #else 5952 CHK_SV; 5953 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5954 cpu_gpr[rS(ctx->opcode)]); 5955 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5956 #endif /* defined(CONFIG_USER_ONLY) */ 5957 } 5958 5959 /* mfdcrux (PPC 460) : user-mode access to DCR */ 5960 static void gen_mfdcrux(DisasContext *ctx) 5961 { 5962 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5963 cpu_gpr[rA(ctx->opcode)]); 5964 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5965 } 5966 5967 /* mtdcrux (PPC 460) : user-mode access to DCR */ 5968 static void gen_mtdcrux(DisasContext *ctx) 5969 { 5970 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5971 cpu_gpr[rS(ctx->opcode)]); 5972 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5973 } 5974 5975 /* dccci */ 5976 static void gen_dccci(DisasContext *ctx) 5977 { 5978 CHK_SV; 5979 /* interpreted as no-op */ 5980 } 5981 5982 /* dcread */ 5983 static void gen_dcread(DisasContext *ctx) 5984 { 5985 #if defined(CONFIG_USER_ONLY) 5986 GEN_PRIV; 5987 #else 5988 TCGv EA, val; 5989 5990 CHK_SV; 5991 gen_set_access_type(ctx, ACCESS_CACHE); 5992 EA = tcg_temp_new(); 5993 gen_addr_reg_index(ctx, EA); 5994 val = tcg_temp_new(); 5995 gen_qemu_ld32u(ctx, val, EA); 5996 tcg_temp_free(val); 5997 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 5998 tcg_temp_free(EA); 5999 #endif /* defined(CONFIG_USER_ONLY) */ 6000 } 6001 6002 /* icbt */ 6003 static void gen_icbt_40x(DisasContext *ctx) 6004 { 6005 /* interpreted as no-op */ 6006 /* XXX: specification say this is treated as a load by the MMU 6007 * but does not generate any exception 6008 */ 6009 } 6010 6011 /* iccci */ 6012 static void gen_iccci(DisasContext *ctx) 6013 { 6014 CHK_SV; 6015 /* interpreted as no-op */ 6016 } 6017 6018 /* icread */ 6019 static void gen_icread(DisasContext *ctx) 6020 { 6021 CHK_SV; 6022 /* interpreted as no-op */ 6023 } 6024 6025 /* rfci (supervisor only) */ 6026 static void gen_rfci_40x(DisasContext *ctx) 6027 { 6028 #if defined(CONFIG_USER_ONLY) 6029 GEN_PRIV; 6030 #else 6031 CHK_SV; 6032 /* Restore CPU state */ 6033 gen_helper_40x_rfci(cpu_env); 6034 gen_sync_exception(ctx); 6035 #endif /* defined(CONFIG_USER_ONLY) */ 6036 } 6037 6038 static void gen_rfci(DisasContext *ctx) 6039 { 6040 #if defined(CONFIG_USER_ONLY) 6041 GEN_PRIV; 6042 #else 6043 CHK_SV; 6044 /* Restore CPU state */ 6045 gen_helper_rfci(cpu_env); 6046 gen_sync_exception(ctx); 6047 #endif /* defined(CONFIG_USER_ONLY) */ 6048 } 6049 6050 /* BookE specific */ 6051 6052 /* XXX: not implemented on 440 ? */ 6053 static void gen_rfdi(DisasContext *ctx) 6054 { 6055 #if defined(CONFIG_USER_ONLY) 6056 GEN_PRIV; 6057 #else 6058 CHK_SV; 6059 /* Restore CPU state */ 6060 gen_helper_rfdi(cpu_env); 6061 gen_sync_exception(ctx); 6062 #endif /* defined(CONFIG_USER_ONLY) */ 6063 } 6064 6065 /* XXX: not implemented on 440 ? */ 6066 static void gen_rfmci(DisasContext *ctx) 6067 { 6068 #if defined(CONFIG_USER_ONLY) 6069 GEN_PRIV; 6070 #else 6071 CHK_SV; 6072 /* Restore CPU state */ 6073 gen_helper_rfmci(cpu_env); 6074 gen_sync_exception(ctx); 6075 #endif /* defined(CONFIG_USER_ONLY) */ 6076 } 6077 6078 /* TLB management - PowerPC 405 implementation */ 6079 6080 /* tlbre */ 6081 static void gen_tlbre_40x(DisasContext *ctx) 6082 { 6083 #if defined(CONFIG_USER_ONLY) 6084 GEN_PRIV; 6085 #else 6086 CHK_SV; 6087 switch (rB(ctx->opcode)) { 6088 case 0: 6089 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6090 cpu_gpr[rA(ctx->opcode)]); 6091 break; 6092 case 1: 6093 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6094 cpu_gpr[rA(ctx->opcode)]); 6095 break; 6096 default: 6097 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6098 break; 6099 } 6100 #endif /* defined(CONFIG_USER_ONLY) */ 6101 } 6102 6103 /* tlbsx - tlbsx. */ 6104 static void gen_tlbsx_40x(DisasContext *ctx) 6105 { 6106 #if defined(CONFIG_USER_ONLY) 6107 GEN_PRIV; 6108 #else 6109 TCGv t0; 6110 6111 CHK_SV; 6112 t0 = tcg_temp_new(); 6113 gen_addr_reg_index(ctx, t0); 6114 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6115 tcg_temp_free(t0); 6116 if (Rc(ctx->opcode)) { 6117 TCGLabel *l1 = gen_new_label(); 6118 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6119 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6120 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6121 gen_set_label(l1); 6122 } 6123 #endif /* defined(CONFIG_USER_ONLY) */ 6124 } 6125 6126 /* tlbwe */ 6127 static void gen_tlbwe_40x(DisasContext *ctx) 6128 { 6129 #if defined(CONFIG_USER_ONLY) 6130 GEN_PRIV; 6131 #else 6132 CHK_SV; 6133 6134 switch (rB(ctx->opcode)) { 6135 case 0: 6136 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6137 cpu_gpr[rS(ctx->opcode)]); 6138 break; 6139 case 1: 6140 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6141 cpu_gpr[rS(ctx->opcode)]); 6142 break; 6143 default: 6144 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6145 break; 6146 } 6147 #endif /* defined(CONFIG_USER_ONLY) */ 6148 } 6149 6150 /* TLB management - PowerPC 440 implementation */ 6151 6152 /* tlbre */ 6153 static void gen_tlbre_440(DisasContext *ctx) 6154 { 6155 #if defined(CONFIG_USER_ONLY) 6156 GEN_PRIV; 6157 #else 6158 CHK_SV; 6159 6160 switch (rB(ctx->opcode)) { 6161 case 0: 6162 case 1: 6163 case 2: 6164 { 6165 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6166 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6167 t0, cpu_gpr[rA(ctx->opcode)]); 6168 tcg_temp_free_i32(t0); 6169 } 6170 break; 6171 default: 6172 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6173 break; 6174 } 6175 #endif /* defined(CONFIG_USER_ONLY) */ 6176 } 6177 6178 /* tlbsx - tlbsx. */ 6179 static void gen_tlbsx_440(DisasContext *ctx) 6180 { 6181 #if defined(CONFIG_USER_ONLY) 6182 GEN_PRIV; 6183 #else 6184 TCGv t0; 6185 6186 CHK_SV; 6187 t0 = tcg_temp_new(); 6188 gen_addr_reg_index(ctx, t0); 6189 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6190 tcg_temp_free(t0); 6191 if (Rc(ctx->opcode)) { 6192 TCGLabel *l1 = gen_new_label(); 6193 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6194 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6195 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6196 gen_set_label(l1); 6197 } 6198 #endif /* defined(CONFIG_USER_ONLY) */ 6199 } 6200 6201 /* tlbwe */ 6202 static void gen_tlbwe_440(DisasContext *ctx) 6203 { 6204 #if defined(CONFIG_USER_ONLY) 6205 GEN_PRIV; 6206 #else 6207 CHK_SV; 6208 switch (rB(ctx->opcode)) { 6209 case 0: 6210 case 1: 6211 case 2: 6212 { 6213 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6214 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 6215 cpu_gpr[rS(ctx->opcode)]); 6216 tcg_temp_free_i32(t0); 6217 } 6218 break; 6219 default: 6220 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6221 break; 6222 } 6223 #endif /* defined(CONFIG_USER_ONLY) */ 6224 } 6225 6226 /* TLB management - PowerPC BookE 2.06 implementation */ 6227 6228 /* tlbre */ 6229 static void gen_tlbre_booke206(DisasContext *ctx) 6230 { 6231 #if defined(CONFIG_USER_ONLY) 6232 GEN_PRIV; 6233 #else 6234 CHK_SV; 6235 gen_helper_booke206_tlbre(cpu_env); 6236 #endif /* defined(CONFIG_USER_ONLY) */ 6237 } 6238 6239 /* tlbsx - tlbsx. */ 6240 static void gen_tlbsx_booke206(DisasContext *ctx) 6241 { 6242 #if defined(CONFIG_USER_ONLY) 6243 GEN_PRIV; 6244 #else 6245 TCGv t0; 6246 6247 CHK_SV; 6248 if (rA(ctx->opcode)) { 6249 t0 = tcg_temp_new(); 6250 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6251 } else { 6252 t0 = tcg_const_tl(0); 6253 } 6254 6255 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6256 gen_helper_booke206_tlbsx(cpu_env, t0); 6257 tcg_temp_free(t0); 6258 #endif /* defined(CONFIG_USER_ONLY) */ 6259 } 6260 6261 /* tlbwe */ 6262 static void gen_tlbwe_booke206(DisasContext *ctx) 6263 { 6264 #if defined(CONFIG_USER_ONLY) 6265 GEN_PRIV; 6266 #else 6267 CHK_SV; 6268 gen_helper_booke206_tlbwe(cpu_env); 6269 #endif /* defined(CONFIG_USER_ONLY) */ 6270 } 6271 6272 static void gen_tlbivax_booke206(DisasContext *ctx) 6273 { 6274 #if defined(CONFIG_USER_ONLY) 6275 GEN_PRIV; 6276 #else 6277 TCGv t0; 6278 6279 CHK_SV; 6280 t0 = tcg_temp_new(); 6281 gen_addr_reg_index(ctx, t0); 6282 gen_helper_booke206_tlbivax(cpu_env, t0); 6283 tcg_temp_free(t0); 6284 #endif /* defined(CONFIG_USER_ONLY) */ 6285 } 6286 6287 static void gen_tlbilx_booke206(DisasContext *ctx) 6288 { 6289 #if defined(CONFIG_USER_ONLY) 6290 GEN_PRIV; 6291 #else 6292 TCGv t0; 6293 6294 CHK_SV; 6295 t0 = tcg_temp_new(); 6296 gen_addr_reg_index(ctx, t0); 6297 6298 switch((ctx->opcode >> 21) & 0x3) { 6299 case 0: 6300 gen_helper_booke206_tlbilx0(cpu_env, t0); 6301 break; 6302 case 1: 6303 gen_helper_booke206_tlbilx1(cpu_env, t0); 6304 break; 6305 case 3: 6306 gen_helper_booke206_tlbilx3(cpu_env, t0); 6307 break; 6308 default: 6309 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6310 break; 6311 } 6312 6313 tcg_temp_free(t0); 6314 #endif /* defined(CONFIG_USER_ONLY) */ 6315 } 6316 6317 6318 /* wrtee */ 6319 static void gen_wrtee(DisasContext *ctx) 6320 { 6321 #if defined(CONFIG_USER_ONLY) 6322 GEN_PRIV; 6323 #else 6324 TCGv t0; 6325 6326 CHK_SV; 6327 t0 = tcg_temp_new(); 6328 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6329 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6330 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6331 tcg_temp_free(t0); 6332 /* Stop translation to have a chance to raise an exception 6333 * if we just set msr_ee to 1 6334 */ 6335 gen_stop_exception(ctx); 6336 #endif /* defined(CONFIG_USER_ONLY) */ 6337 } 6338 6339 /* wrteei */ 6340 static void gen_wrteei(DisasContext *ctx) 6341 { 6342 #if defined(CONFIG_USER_ONLY) 6343 GEN_PRIV; 6344 #else 6345 CHK_SV; 6346 if (ctx->opcode & 0x00008000) { 6347 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6348 /* Stop translation to have a chance to raise an exception */ 6349 gen_stop_exception(ctx); 6350 } else { 6351 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6352 } 6353 #endif /* defined(CONFIG_USER_ONLY) */ 6354 } 6355 6356 /* PowerPC 440 specific instructions */ 6357 6358 /* dlmzb */ 6359 static void gen_dlmzb(DisasContext *ctx) 6360 { 6361 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6362 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6363 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6364 tcg_temp_free_i32(t0); 6365 } 6366 6367 /* mbar replaces eieio on 440 */ 6368 static void gen_mbar(DisasContext *ctx) 6369 { 6370 /* interpreted as no-op */ 6371 } 6372 6373 /* msync replaces sync on 440 */ 6374 static void gen_msync_4xx(DisasContext *ctx) 6375 { 6376 /* interpreted as no-op */ 6377 } 6378 6379 /* icbt */ 6380 static void gen_icbt_440(DisasContext *ctx) 6381 { 6382 /* interpreted as no-op */ 6383 /* XXX: specification say this is treated as a load by the MMU 6384 * but does not generate any exception 6385 */ 6386 } 6387 6388 /* Embedded.Processor Control */ 6389 6390 static void gen_msgclr(DisasContext *ctx) 6391 { 6392 #if defined(CONFIG_USER_ONLY) 6393 GEN_PRIV; 6394 #else 6395 CHK_HV; 6396 /* 64-bit server processors compliant with arch 2.x */ 6397 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6398 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6399 } else { 6400 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6401 } 6402 #endif /* defined(CONFIG_USER_ONLY) */ 6403 } 6404 6405 static void gen_msgsnd(DisasContext *ctx) 6406 { 6407 #if defined(CONFIG_USER_ONLY) 6408 GEN_PRIV; 6409 #else 6410 CHK_HV; 6411 /* 64-bit server processors compliant with arch 2.x */ 6412 if (ctx->insns_flags & PPC_SEGMENT_64B) { 6413 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6414 } else { 6415 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6416 } 6417 #endif /* defined(CONFIG_USER_ONLY) */ 6418 } 6419 6420 static void gen_msgsync(DisasContext *ctx) 6421 { 6422 #if defined(CONFIG_USER_ONLY) 6423 GEN_PRIV; 6424 #else 6425 CHK_HV; 6426 #endif /* defined(CONFIG_USER_ONLY) */ 6427 /* interpreted as no-op */ 6428 } 6429 6430 #if defined(TARGET_PPC64) 6431 static void gen_maddld(DisasContext *ctx) 6432 { 6433 TCGv_i64 t1 = tcg_temp_new_i64(); 6434 6435 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6436 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6437 tcg_temp_free_i64(t1); 6438 } 6439 6440 /* maddhd maddhdu */ 6441 static void gen_maddhd_maddhdu(DisasContext *ctx) 6442 { 6443 TCGv_i64 lo = tcg_temp_new_i64(); 6444 TCGv_i64 hi = tcg_temp_new_i64(); 6445 TCGv_i64 t1 = tcg_temp_new_i64(); 6446 6447 if (Rc(ctx->opcode)) { 6448 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6449 cpu_gpr[rB(ctx->opcode)]); 6450 tcg_gen_movi_i64(t1, 0); 6451 } else { 6452 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6453 cpu_gpr[rB(ctx->opcode)]); 6454 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6455 } 6456 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6457 cpu_gpr[rC(ctx->opcode)], t1); 6458 tcg_temp_free_i64(lo); 6459 tcg_temp_free_i64(hi); 6460 tcg_temp_free_i64(t1); 6461 } 6462 #endif /* defined(TARGET_PPC64) */ 6463 6464 static void gen_tbegin(DisasContext *ctx) 6465 { 6466 if (unlikely(!ctx->tm_enabled)) { 6467 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6468 return; 6469 } 6470 gen_helper_tbegin(cpu_env); 6471 } 6472 6473 #define GEN_TM_NOOP(name) \ 6474 static inline void gen_##name(DisasContext *ctx) \ 6475 { \ 6476 if (unlikely(!ctx->tm_enabled)) { \ 6477 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6478 return; \ 6479 } \ 6480 /* Because tbegin always fails in QEMU, these user \ 6481 * space instructions all have a simple implementation: \ 6482 * \ 6483 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6484 * = 0b0 || 0b00 || 0b0 \ 6485 */ \ 6486 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6487 } 6488 6489 GEN_TM_NOOP(tend); 6490 GEN_TM_NOOP(tabort); 6491 GEN_TM_NOOP(tabortwc); 6492 GEN_TM_NOOP(tabortwci); 6493 GEN_TM_NOOP(tabortdc); 6494 GEN_TM_NOOP(tabortdci); 6495 GEN_TM_NOOP(tsr); 6496 static inline void gen_cp_abort(DisasContext *ctx) 6497 { 6498 // Do Nothing 6499 } 6500 6501 #define GEN_CP_PASTE_NOOP(name) \ 6502 static inline void gen_##name(DisasContext *ctx) \ 6503 { \ 6504 /* Generate invalid exception until \ 6505 * we have an implementation of the copy \ 6506 * paste facility \ 6507 */ \ 6508 gen_invalid(ctx); \ 6509 } 6510 6511 GEN_CP_PASTE_NOOP(copy) 6512 GEN_CP_PASTE_NOOP(paste) 6513 6514 static void gen_tcheck(DisasContext *ctx) 6515 { 6516 if (unlikely(!ctx->tm_enabled)) { 6517 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6518 return; 6519 } 6520 /* Because tbegin always fails, the tcheck implementation 6521 * is simple: 6522 * 6523 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6524 * = 0b1 || 0b00 || 0b0 6525 */ 6526 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6527 } 6528 6529 #if defined(CONFIG_USER_ONLY) 6530 #define GEN_TM_PRIV_NOOP(name) \ 6531 static inline void gen_##name(DisasContext *ctx) \ 6532 { \ 6533 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6534 } 6535 6536 #else 6537 6538 #define GEN_TM_PRIV_NOOP(name) \ 6539 static inline void gen_##name(DisasContext *ctx) \ 6540 { \ 6541 CHK_SV; \ 6542 if (unlikely(!ctx->tm_enabled)) { \ 6543 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6544 return; \ 6545 } \ 6546 /* Because tbegin always fails, the implementation is \ 6547 * simple: \ 6548 * \ 6549 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6550 * = 0b0 || 0b00 | 0b0 \ 6551 */ \ 6552 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6553 } 6554 6555 #endif 6556 6557 GEN_TM_PRIV_NOOP(treclaim); 6558 GEN_TM_PRIV_NOOP(trechkpt); 6559 6560 #include "translate/fp-impl.inc.c" 6561 6562 #include "translate/vmx-impl.inc.c" 6563 6564 #include "translate/vsx-impl.inc.c" 6565 6566 #include "translate/dfp-impl.inc.c" 6567 6568 #include "translate/spe-impl.inc.c" 6569 6570 /* Handles lfdp, lxsd, lxssp */ 6571 static void gen_dform39(DisasContext *ctx) 6572 { 6573 switch (ctx->opcode & 0x3) { 6574 case 0: /* lfdp */ 6575 if (ctx->insns_flags2 & PPC2_ISA205) { 6576 return gen_lfdp(ctx); 6577 } 6578 break; 6579 case 2: /* lxsd */ 6580 if (ctx->insns_flags2 & PPC2_ISA300) { 6581 return gen_lxsd(ctx); 6582 } 6583 break; 6584 case 3: /* lxssp */ 6585 if (ctx->insns_flags2 & PPC2_ISA300) { 6586 return gen_lxssp(ctx); 6587 } 6588 break; 6589 } 6590 return gen_invalid(ctx); 6591 } 6592 6593 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 6594 static void gen_dform3D(DisasContext *ctx) 6595 { 6596 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */ 6597 switch (ctx->opcode & 0x7) { 6598 case 1: /* lxv */ 6599 if (ctx->insns_flags2 & PPC2_ISA300) { 6600 return gen_lxv(ctx); 6601 } 6602 break; 6603 case 5: /* stxv */ 6604 if (ctx->insns_flags2 & PPC2_ISA300) { 6605 return gen_stxv(ctx); 6606 } 6607 break; 6608 } 6609 } else { /* DS-FORM */ 6610 switch (ctx->opcode & 0x3) { 6611 case 0: /* stfdp */ 6612 if (ctx->insns_flags2 & PPC2_ISA205) { 6613 return gen_stfdp(ctx); 6614 } 6615 break; 6616 case 2: /* stxsd */ 6617 if (ctx->insns_flags2 & PPC2_ISA300) { 6618 return gen_stxsd(ctx); 6619 } 6620 break; 6621 case 3: /* stxssp */ 6622 if (ctx->insns_flags2 & PPC2_ISA300) { 6623 return gen_stxssp(ctx); 6624 } 6625 break; 6626 } 6627 } 6628 return gen_invalid(ctx); 6629 } 6630 6631 static opcode_t opcodes[] = { 6632 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6633 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6634 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6635 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6636 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6637 #if defined(TARGET_PPC64) 6638 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6639 #endif 6640 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6641 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6642 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6643 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6644 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6645 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6646 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6647 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6648 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6649 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6650 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6651 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6652 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6653 #if defined(TARGET_PPC64) 6654 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6655 #endif 6656 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6657 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6658 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6659 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6660 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6661 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6662 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6663 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 6664 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6665 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 6666 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6667 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6668 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6669 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6670 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6671 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6672 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6673 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6674 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6675 #if defined(TARGET_PPC64) 6676 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6677 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6678 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6679 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6680 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6681 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6682 #endif 6683 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6684 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6685 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6686 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6687 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6688 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6689 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6690 #if defined(TARGET_PPC64) 6691 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6692 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6693 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6694 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6695 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6696 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6697 PPC_NONE, PPC2_ISA300), 6698 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6699 PPC_NONE, PPC2_ISA300), 6700 #endif 6701 #if defined(TARGET_PPC64) 6702 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6703 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6704 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6705 #endif 6706 /* handles lfdp, lxsd, lxssp */ 6707 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6708 /* handles stfdp, lxv, stxsd, stxssp, stxv */ 6709 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 6710 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6711 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6712 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6713 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6714 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6715 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6716 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 6717 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6718 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6719 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6720 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6721 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 6722 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 6723 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6724 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6725 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6726 #if defined(TARGET_PPC64) 6727 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 6728 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 6729 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6730 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6731 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6732 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6733 #endif 6734 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6735 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6736 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 6737 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6738 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6739 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6740 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6741 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6742 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6743 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6744 #if defined(TARGET_PPC64) 6745 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6746 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6747 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6748 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6749 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6750 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6751 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6752 #endif 6753 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6754 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6755 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6756 #if defined(TARGET_PPC64) 6757 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6758 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6759 #endif 6760 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6761 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6762 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6763 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6764 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6765 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6766 #if defined(TARGET_PPC64) 6767 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6768 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6769 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 6770 #endif 6771 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6772 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6773 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6774 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6775 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6776 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6777 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6778 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6779 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6780 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6781 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 6782 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6783 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6784 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6785 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6786 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6787 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6788 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6789 #if defined(TARGET_PPC64) 6790 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6791 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6792 PPC_SEGMENT_64B), 6793 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6794 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6795 PPC_SEGMENT_64B), 6796 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6797 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6798 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6799 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6800 #endif 6801 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6802 /* XXX Those instructions will need to be handled differently for 6803 * different ISA versions */ 6804 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6805 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6806 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 6807 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 6808 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6809 #if defined(TARGET_PPC64) 6810 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6811 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6812 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 6813 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 6814 #endif 6815 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6816 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6817 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6818 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6819 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6820 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6821 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6822 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6823 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6824 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6825 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 6826 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6827 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 6828 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 6829 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 6830 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 6831 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 6832 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 6833 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 6834 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6835 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 6836 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 6837 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 6838 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 6839 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 6840 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 6841 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 6842 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 6843 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 6844 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 6845 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 6846 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 6847 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 6848 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 6849 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 6850 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 6851 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 6852 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 6853 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 6854 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 6855 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 6856 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 6857 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 6858 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 6859 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 6860 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 6861 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 6862 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 6863 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 6864 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6865 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6866 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 6867 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 6868 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6869 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6870 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 6871 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 6872 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 6873 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 6874 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 6875 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 6876 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 6877 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 6878 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 6879 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 6880 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 6881 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 6882 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 6883 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 6884 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 6885 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 6886 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 6887 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 6888 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 6889 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 6890 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 6891 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 6892 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 6893 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 6894 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 6895 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 6896 PPC_NONE, PPC2_BOOKE206), 6897 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 6898 PPC_NONE, PPC2_BOOKE206), 6899 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 6900 PPC_NONE, PPC2_BOOKE206), 6901 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 6902 PPC_NONE, PPC2_BOOKE206), 6903 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 6904 PPC_NONE, PPC2_BOOKE206), 6905 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 6906 PPC_NONE, PPC2_PRCNTL), 6907 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 6908 PPC_NONE, PPC2_PRCNTL), 6909 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 6910 PPC_NONE, PPC2_PRCNTL), 6911 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 6912 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 6913 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 6914 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 6915 PPC_BOOKE, PPC2_BOOKE206), 6916 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE), 6917 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 6918 PPC_BOOKE, PPC2_BOOKE206), 6919 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 6920 PPC_440_SPEC), 6921 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 6922 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 6923 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 6924 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 6925 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 6926 #if defined(TARGET_PPC64) 6927 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 6928 PPC2_ISA300), 6929 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6930 #endif 6931 6932 #undef GEN_INT_ARITH_ADD 6933 #undef GEN_INT_ARITH_ADD_CONST 6934 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 6935 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 6936 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 6937 add_ca, compute_ca, compute_ov) \ 6938 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 6939 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 6940 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 6941 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 6942 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 6943 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 6944 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 6945 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 6946 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 6947 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 6948 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 6949 6950 #undef GEN_INT_ARITH_DIVW 6951 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 6952 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 6953 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 6954 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 6955 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 6956 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 6957 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6958 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6959 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6960 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6961 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6962 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6963 6964 #if defined(TARGET_PPC64) 6965 #undef GEN_INT_ARITH_DIVD 6966 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 6967 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6968 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 6969 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 6970 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 6971 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 6972 6973 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6974 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6975 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6976 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6977 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6978 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6979 6980 #undef GEN_INT_ARITH_MUL_HELPER 6981 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 6982 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6983 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 6984 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 6985 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 6986 #endif 6987 6988 #undef GEN_INT_ARITH_SUBF 6989 #undef GEN_INT_ARITH_SUBF_CONST 6990 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 6991 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 6992 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 6993 add_ca, compute_ca, compute_ov) \ 6994 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 6995 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 6996 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 6997 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 6998 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 6999 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7000 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7001 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7002 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7003 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7004 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7005 7006 #undef GEN_LOGICAL1 7007 #undef GEN_LOGICAL2 7008 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7009 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7010 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7011 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7012 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7013 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7014 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7015 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7016 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7017 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7018 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7019 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 7020 #if defined(TARGET_PPC64) 7021 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 7022 #endif 7023 7024 #if defined(TARGET_PPC64) 7025 #undef GEN_PPC64_R2 7026 #undef GEN_PPC64_R4 7027 #define GEN_PPC64_R2(name, opc1, opc2) \ 7028 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7029 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7030 PPC_64B) 7031 #define GEN_PPC64_R4(name, opc1, opc2) \ 7032 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7033 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 7034 PPC_64B), \ 7035 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7036 PPC_64B), \ 7037 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 7038 PPC_64B) 7039 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 7040 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 7041 GEN_PPC64_R4(rldic, 0x1E, 0x04), 7042 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 7043 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 7044 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 7045 #endif 7046 7047 #undef GEN_LD 7048 #undef GEN_LDU 7049 #undef GEN_LDUX 7050 #undef GEN_LDX_E 7051 #undef GEN_LDS 7052 #define GEN_LD(name, ldop, opc, type) \ 7053 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7054 #define GEN_LDU(name, ldop, opc, type) \ 7055 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 7056 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 7057 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7058 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 7059 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 7060 #define GEN_LDS(name, ldop, op, type) \ 7061 GEN_LD(name, ldop, op | 0x20, type) \ 7062 GEN_LDU(name, ldop, op | 0x21, type) \ 7063 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 7064 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 7065 7066 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 7067 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 7068 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 7069 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 7070 #if defined(TARGET_PPC64) 7071 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 7072 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 7073 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 7074 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 7075 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 7076 7077 /* HV/P7 and later only */ 7078 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 7079 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 7080 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 7081 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 7082 #endif 7083 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 7084 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 7085 7086 #undef GEN_ST 7087 #undef GEN_STU 7088 #undef GEN_STUX 7089 #undef GEN_STX_E 7090 #undef GEN_STS 7091 #define GEN_ST(name, stop, opc, type) \ 7092 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 7093 #define GEN_STU(name, stop, opc, type) \ 7094 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 7095 #define GEN_STUX(name, stop, opc2, opc3, type) \ 7096 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 7097 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 7098 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 7099 #define GEN_STS(name, stop, op, type) \ 7100 GEN_ST(name, stop, op | 0x20, type) \ 7101 GEN_STU(name, stop, op | 0x21, type) \ 7102 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 7103 GEN_STX(name, stop, 0x17, op | 0x00, type) 7104 7105 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 7106 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 7107 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 7108 #if defined(TARGET_PPC64) 7109 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 7110 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 7111 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 7112 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 7113 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 7114 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 7115 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 7116 #endif 7117 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 7118 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 7119 7120 #undef GEN_CRLOGIC 7121 #define GEN_CRLOGIC(name, tcg_op, opc) \ 7122 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 7123 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 7124 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 7125 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 7126 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 7127 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 7128 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 7129 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 7130 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 7131 7132 #undef GEN_MAC_HANDLER 7133 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 7134 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 7135 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 7136 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 7137 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 7138 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 7139 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 7140 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 7141 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 7142 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 7143 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 7144 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 7145 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 7146 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 7147 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 7148 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 7149 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 7150 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 7151 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 7152 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 7153 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 7154 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 7155 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 7156 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 7157 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 7158 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 7159 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 7160 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 7161 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 7162 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 7163 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 7164 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 7165 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 7166 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 7167 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 7168 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 7169 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 7170 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 7171 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 7172 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 7173 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 7174 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 7175 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 7176 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 7177 7178 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 7179 PPC_NONE, PPC2_TM), 7180 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 7181 PPC_NONE, PPC2_TM), 7182 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 7183 PPC_NONE, PPC2_TM), 7184 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 7185 PPC_NONE, PPC2_TM), 7186 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 7187 PPC_NONE, PPC2_TM), 7188 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 7189 PPC_NONE, PPC2_TM), 7190 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 7191 PPC_NONE, PPC2_TM), 7192 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 7193 PPC_NONE, PPC2_TM), 7194 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 7195 PPC_NONE, PPC2_TM), 7196 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 7197 PPC_NONE, PPC2_TM), 7198 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 7199 PPC_NONE, PPC2_TM), 7200 7201 #include "translate/fp-ops.inc.c" 7202 7203 #include "translate/vmx-ops.inc.c" 7204 7205 #include "translate/vsx-ops.inc.c" 7206 7207 #include "translate/dfp-ops.inc.c" 7208 7209 #include "translate/spe-ops.inc.c" 7210 }; 7211 7212 #include "helper_regs.h" 7213 #include "translate_init.inc.c" 7214 7215 /*****************************************************************************/ 7216 /* Misc PowerPC helpers */ 7217 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 7218 int flags) 7219 { 7220 #define RGPL 4 7221 #define RFPL 4 7222 7223 PowerPCCPU *cpu = POWERPC_CPU(cs); 7224 CPUPPCState *env = &cpu->env; 7225 int i; 7226 7227 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 7228 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 7229 env->nip, env->lr, env->ctr, cpu_read_xer(env), 7230 cs->cpu_index); 7231 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 7232 TARGET_FMT_lx " iidx %d didx %d\n", 7233 env->msr, env->spr[SPR_HID0], 7234 env->hflags, env->immu_idx, env->dmmu_idx); 7235 #if !defined(NO_TIMER_DUMP) 7236 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 7237 #if !defined(CONFIG_USER_ONLY) 7238 " DECR %08" PRIu32 7239 #endif 7240 "\n", 7241 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 7242 #if !defined(CONFIG_USER_ONLY) 7243 , cpu_ppc_load_decr(env) 7244 #endif 7245 ); 7246 #endif 7247 for (i = 0; i < 32; i++) { 7248 if ((i & (RGPL - 1)) == 0) 7249 cpu_fprintf(f, "GPR%02d", i); 7250 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 7251 if ((i & (RGPL - 1)) == (RGPL - 1)) 7252 cpu_fprintf(f, "\n"); 7253 } 7254 cpu_fprintf(f, "CR "); 7255 for (i = 0; i < 8; i++) 7256 cpu_fprintf(f, "%01x", env->crf[i]); 7257 cpu_fprintf(f, " ["); 7258 for (i = 0; i < 8; i++) { 7259 char a = '-'; 7260 if (env->crf[i] & 0x08) 7261 a = 'L'; 7262 else if (env->crf[i] & 0x04) 7263 a = 'G'; 7264 else if (env->crf[i] & 0x02) 7265 a = 'E'; 7266 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 7267 } 7268 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 7269 env->reserve_addr); 7270 7271 if (flags & CPU_DUMP_FPU) { 7272 for (i = 0; i < 32; i++) { 7273 if ((i & (RFPL - 1)) == 0) { 7274 cpu_fprintf(f, "FPR%02d", i); 7275 } 7276 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i])); 7277 if ((i & (RFPL - 1)) == (RFPL - 1)) { 7278 cpu_fprintf(f, "\n"); 7279 } 7280 } 7281 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 7282 } 7283 7284 #if !defined(CONFIG_USER_ONLY) 7285 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 7286 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 7287 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 7288 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 7289 7290 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 7291 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 7292 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 7293 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 7294 7295 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 7296 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 7297 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 7298 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 7299 7300 #if defined(TARGET_PPC64) 7301 if (env->excp_model == POWERPC_EXCP_POWER7 || 7302 env->excp_model == POWERPC_EXCP_POWER8) { 7303 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 7304 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 7305 } 7306 #endif 7307 if (env->excp_model == POWERPC_EXCP_BOOKE) { 7308 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 7309 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 7310 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 7311 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 7312 7313 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 7314 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 7315 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 7316 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 7317 7318 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 7319 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 7320 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 7321 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 7322 7323 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 7324 " EPR " TARGET_FMT_lx "\n", 7325 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 7326 env->spr[SPR_BOOKE_EPR]); 7327 7328 /* FSL-specific */ 7329 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 7330 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 7331 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 7332 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 7333 7334 /* 7335 * IVORs are left out as they are large and do not change often -- 7336 * they can be read with "p $ivor0", "p $ivor1", etc. 7337 */ 7338 } 7339 7340 #if defined(TARGET_PPC64) 7341 if (env->flags & POWERPC_FLAG_CFAR) { 7342 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 7343 } 7344 #endif 7345 7346 if (env->spr_cb[SPR_LPCR].name) 7347 cpu_fprintf(f, " LPCR " TARGET_FMT_lx "\n", env->spr[SPR_LPCR]); 7348 7349 switch (env->mmu_model) { 7350 case POWERPC_MMU_32B: 7351 case POWERPC_MMU_601: 7352 case POWERPC_MMU_SOFT_6xx: 7353 case POWERPC_MMU_SOFT_74xx: 7354 #if defined(TARGET_PPC64) 7355 case POWERPC_MMU_64B: 7356 case POWERPC_MMU_2_03: 7357 case POWERPC_MMU_2_06: 7358 case POWERPC_MMU_2_07: 7359 case POWERPC_MMU_3_00: 7360 #endif 7361 if (env->spr_cb[SPR_SDR1].name) { /* SDR1 Exists */ 7362 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " ", env->spr[SPR_SDR1]); 7363 } 7364 if (env->spr_cb[SPR_PTCR].name) { /* PTCR Exists */ 7365 cpu_fprintf(f, " PTCR " TARGET_FMT_lx " ", env->spr[SPR_PTCR]); 7366 } 7367 cpu_fprintf(f, " DAR " TARGET_FMT_lx " DSISR " TARGET_FMT_lx "\n", 7368 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 7369 break; 7370 case POWERPC_MMU_BOOKE206: 7371 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 7372 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 7373 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 7374 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 7375 7376 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 7377 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 7378 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 7379 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 7380 7381 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 7382 " TLB1CFG " TARGET_FMT_lx "\n", 7383 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 7384 env->spr[SPR_BOOKE_TLB1CFG]); 7385 break; 7386 default: 7387 break; 7388 } 7389 #endif 7390 7391 #undef RGPL 7392 #undef RFPL 7393 } 7394 7395 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 7396 fprintf_function cpu_fprintf, int flags) 7397 { 7398 #if defined(DO_PPC_STATISTICS) 7399 PowerPCCPU *cpu = POWERPC_CPU(cs); 7400 opc_handler_t **t1, **t2, **t3, *handler; 7401 int op1, op2, op3; 7402 7403 t1 = cpu->env.opcodes; 7404 for (op1 = 0; op1 < 64; op1++) { 7405 handler = t1[op1]; 7406 if (is_indirect_opcode(handler)) { 7407 t2 = ind_table(handler); 7408 for (op2 = 0; op2 < 32; op2++) { 7409 handler = t2[op2]; 7410 if (is_indirect_opcode(handler)) { 7411 t3 = ind_table(handler); 7412 for (op3 = 0; op3 < 32; op3++) { 7413 handler = t3[op3]; 7414 if (handler->count == 0) 7415 continue; 7416 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 7417 "%016" PRIx64 " %" PRId64 "\n", 7418 op1, op2, op3, op1, (op3 << 5) | op2, 7419 handler->oname, 7420 handler->count, handler->count); 7421 } 7422 } else { 7423 if (handler->count == 0) 7424 continue; 7425 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 7426 "%016" PRIx64 " %" PRId64 "\n", 7427 op1, op2, op1, op2, handler->oname, 7428 handler->count, handler->count); 7429 } 7430 } 7431 } else { 7432 if (handler->count == 0) 7433 continue; 7434 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 7435 " %" PRId64 "\n", 7436 op1, op1, handler->oname, 7437 handler->count, handler->count); 7438 } 7439 } 7440 #endif 7441 } 7442 7443 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 7444 { 7445 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7446 CPUPPCState *env = cs->env_ptr; 7447 int bound; 7448 7449 ctx->exception = POWERPC_EXCP_NONE; 7450 ctx->spr_cb = env->spr_cb; 7451 ctx->pr = msr_pr; 7452 ctx->mem_idx = env->dmmu_idx; 7453 ctx->dr = msr_dr; 7454 #if !defined(CONFIG_USER_ONLY) 7455 ctx->hv = msr_hv || !env->has_hv_mode; 7456 #endif 7457 ctx->insns_flags = env->insns_flags; 7458 ctx->insns_flags2 = env->insns_flags2; 7459 ctx->access_type = -1; 7460 ctx->need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7461 ctx->le_mode = !!(env->hflags & (1 << MSR_LE)); 7462 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 7463 ctx->flags = env->flags; 7464 #if defined(TARGET_PPC64) 7465 ctx->sf_mode = msr_is_64bit(env, env->msr); 7466 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7467 #endif 7468 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 7469 || env->mmu_model == POWERPC_MMU_601 7470 || (env->mmu_model & POWERPC_MMU_64B); 7471 7472 ctx->fpu_enabled = !!msr_fp; 7473 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7474 ctx->spe_enabled = !!msr_spe; 7475 else 7476 ctx->spe_enabled = false; 7477 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7478 ctx->altivec_enabled = !!msr_vr; 7479 else 7480 ctx->altivec_enabled = false; 7481 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7482 ctx->vsx_enabled = !!msr_vsx; 7483 } else { 7484 ctx->vsx_enabled = false; 7485 } 7486 #if defined(TARGET_PPC64) 7487 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7488 ctx->tm_enabled = !!msr_tm; 7489 } else { 7490 ctx->tm_enabled = false; 7491 } 7492 #endif 7493 ctx->gtse = !!(env->spr[SPR_LPCR] & LPCR_GTSE); 7494 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7495 ctx->singlestep_enabled = CPU_SINGLE_STEP; 7496 else 7497 ctx->singlestep_enabled = 0; 7498 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7499 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7500 if ((env->flags & POWERPC_FLAG_DE) && msr_de) { 7501 ctx->singlestep_enabled = 0; 7502 target_ulong dbcr0 = env->spr[SPR_BOOKE_DBCR0]; 7503 if (dbcr0 & DBCR0_ICMP) { 7504 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 7505 } 7506 if (dbcr0 & DBCR0_BRT) { 7507 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 7508 } 7509 7510 } 7511 if (unlikely(ctx->base.singlestep_enabled)) { 7512 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7513 } 7514 #if defined (DO_SINGLE_STEP) && 0 7515 /* Single step trace mode */ 7516 msr_se = 1; 7517 #endif 7518 7519 bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4; 7520 ctx->base.max_insns = MIN(ctx->base.max_insns, bound); 7521 } 7522 7523 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 7524 { 7525 } 7526 7527 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 7528 { 7529 tcg_gen_insn_start(dcbase->pc_next); 7530 } 7531 7532 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs, 7533 const CPUBreakpoint *bp) 7534 { 7535 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7536 7537 gen_debug_exception(ctx); 7538 dcbase->is_jmp = DISAS_NORETURN; 7539 /* The address covered by the breakpoint must be included in 7540 [tb->pc, tb->pc + tb->size) in order to for it to be 7541 properly cleared -- thus we increment the PC here so that 7542 the logic setting tb->size below does the right thing. */ 7543 ctx->base.pc_next += 4; 7544 return true; 7545 } 7546 7547 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 7548 { 7549 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7550 CPUPPCState *env = cs->env_ptr; 7551 opc_handler_t **table, *handler; 7552 7553 LOG_DISAS("----------------\n"); 7554 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7555 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 7556 7557 if (unlikely(need_byteswap(ctx))) { 7558 ctx->opcode = bswap32(cpu_ldl_code(env, ctx->base.pc_next)); 7559 } else { 7560 ctx->opcode = cpu_ldl_code(env, ctx->base.pc_next); 7561 } 7562 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7563 ctx->opcode, opc1(ctx->opcode), opc2(ctx->opcode), 7564 opc3(ctx->opcode), opc4(ctx->opcode), 7565 ctx->le_mode ? "little" : "big"); 7566 ctx->base.pc_next += 4; 7567 table = env->opcodes; 7568 handler = table[opc1(ctx->opcode)]; 7569 if (is_indirect_opcode(handler)) { 7570 table = ind_table(handler); 7571 handler = table[opc2(ctx->opcode)]; 7572 if (is_indirect_opcode(handler)) { 7573 table = ind_table(handler); 7574 handler = table[opc3(ctx->opcode)]; 7575 if (is_indirect_opcode(handler)) { 7576 table = ind_table(handler); 7577 handler = table[opc4(ctx->opcode)]; 7578 } 7579 } 7580 } 7581 /* Is opcode *REALLY* valid ? */ 7582 if (unlikely(handler->handler == &gen_invalid)) { 7583 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7584 "%02x - %02x - %02x - %02x (%08x) " 7585 TARGET_FMT_lx " %d\n", 7586 opc1(ctx->opcode), opc2(ctx->opcode), 7587 opc3(ctx->opcode), opc4(ctx->opcode), 7588 ctx->opcode, ctx->base.pc_next - 4, (int)msr_ir); 7589 } else { 7590 uint32_t inval; 7591 7592 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 7593 && Rc(ctx->opcode))) { 7594 inval = handler->inval2; 7595 } else { 7596 inval = handler->inval1; 7597 } 7598 7599 if (unlikely((ctx->opcode & inval) != 0)) { 7600 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7601 "%02x - %02x - %02x - %02x (%08x) " 7602 TARGET_FMT_lx "\n", ctx->opcode & inval, 7603 opc1(ctx->opcode), opc2(ctx->opcode), 7604 opc3(ctx->opcode), opc4(ctx->opcode), 7605 ctx->opcode, ctx->base.pc_next - 4); 7606 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7607 ctx->base.is_jmp = DISAS_NORETURN; 7608 return; 7609 } 7610 } 7611 (*(handler->handler))(ctx); 7612 #if defined(DO_PPC_STATISTICS) 7613 handler->count++; 7614 #endif 7615 /* Check trace mode exceptions */ 7616 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP && 7617 (ctx->base.pc_next <= 0x100 || ctx->base.pc_next > 0xF00) && 7618 ctx->exception != POWERPC_SYSCALL && 7619 ctx->exception != POWERPC_EXCP_TRAP && 7620 ctx->exception != POWERPC_EXCP_BRANCH)) { 7621 uint32_t excp = gen_prep_dbgex(ctx, POWERPC_EXCP_TRACE); 7622 if (excp != POWERPC_EXCP_NONE) 7623 gen_exception_nip(ctx, excp, ctx->base.pc_next); 7624 } 7625 7626 if (tcg_check_temp_count()) { 7627 qemu_log("Opcode %02x %02x %02x %02x (%08x) leaked " 7628 "temporaries\n", opc1(ctx->opcode), opc2(ctx->opcode), 7629 opc3(ctx->opcode), opc4(ctx->opcode), ctx->opcode); 7630 } 7631 7632 ctx->base.is_jmp = ctx->exception == POWERPC_EXCP_NONE ? 7633 DISAS_NEXT : DISAS_NORETURN; 7634 } 7635 7636 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 7637 { 7638 DisasContext *ctx = container_of(dcbase, DisasContext, base); 7639 7640 if (ctx->exception == POWERPC_EXCP_NONE) { 7641 gen_goto_tb(ctx, 0, ctx->base.pc_next); 7642 } else if (ctx->exception != POWERPC_EXCP_BRANCH) { 7643 if (unlikely(ctx->base.singlestep_enabled)) { 7644 gen_debug_exception(ctx); 7645 } 7646 /* Generate the return instruction */ 7647 tcg_gen_exit_tb(NULL, 0); 7648 } 7649 } 7650 7651 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 7652 { 7653 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 7654 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 7655 } 7656 7657 static const TranslatorOps ppc_tr_ops = { 7658 .init_disas_context = ppc_tr_init_disas_context, 7659 .tb_start = ppc_tr_tb_start, 7660 .insn_start = ppc_tr_insn_start, 7661 .breakpoint_check = ppc_tr_breakpoint_check, 7662 .translate_insn = ppc_tr_translate_insn, 7663 .tb_stop = ppc_tr_tb_stop, 7664 .disas_log = ppc_tr_disas_log, 7665 }; 7666 7667 void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb) 7668 { 7669 DisasContext ctx; 7670 7671 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb); 7672 } 7673 7674 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7675 target_ulong *data) 7676 { 7677 env->nip = data[0]; 7678 } 7679