1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "qemu/host-utils.h" 28 #include "exec/cpu_ldst.h" 29 30 #include "exec/helper-proto.h" 31 #include "exec/helper-gen.h" 32 33 #include "trace-tcg.h" 34 #include "exec/log.h" 35 36 37 #define CPU_SINGLE_STEP 0x1 38 #define CPU_BRANCH_STEP 0x2 39 #define GDBSTUB_SINGLE_STEP 0x4 40 41 /* Include definitions for instructions classes and implementations flags */ 42 //#define PPC_DEBUG_DISAS 43 //#define DO_PPC_STATISTICS 44 45 #ifdef PPC_DEBUG_DISAS 46 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 47 #else 48 # define LOG_DISAS(...) do { } while (0) 49 #endif 50 /*****************************************************************************/ 51 /* Code translation helpers */ 52 53 /* global register indexes */ 54 static TCGv_env cpu_env; 55 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 56 + 10*4 + 22*5 /* SPE GPRh */ 57 + 10*4 + 22*5 /* FPR */ 58 + 2*(10*6 + 22*7) /* AVRh, AVRl */ 59 + 10*5 + 22*6 /* VSR */ 60 + 8*5 /* CRF */]; 61 static TCGv cpu_gpr[32]; 62 static TCGv cpu_gprh[32]; 63 static TCGv_i64 cpu_fpr[32]; 64 static TCGv_i64 cpu_avrh[32], cpu_avrl[32]; 65 static TCGv_i64 cpu_vsr[32]; 66 static TCGv_i32 cpu_crf[8]; 67 static TCGv cpu_nip; 68 static TCGv cpu_msr; 69 static TCGv cpu_ctr; 70 static TCGv cpu_lr; 71 #if defined(TARGET_PPC64) 72 static TCGv cpu_cfar; 73 #endif 74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca; 75 static TCGv cpu_reserve; 76 static TCGv cpu_fpscr; 77 static TCGv_i32 cpu_access_type; 78 79 #include "exec/gen-icount.h" 80 81 void ppc_translate_init(void) 82 { 83 int i; 84 char* p; 85 size_t cpu_reg_names_size; 86 static int done_init = 0; 87 88 if (done_init) 89 return; 90 91 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env"); 92 tcg_ctx.tcg_env = cpu_env; 93 94 p = cpu_reg_names; 95 cpu_reg_names_size = sizeof(cpu_reg_names); 96 97 for (i = 0; i < 8; i++) { 98 snprintf(p, cpu_reg_names_size, "crf%d", i); 99 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 100 offsetof(CPUPPCState, crf[i]), p); 101 p += 5; 102 cpu_reg_names_size -= 5; 103 } 104 105 for (i = 0; i < 32; i++) { 106 snprintf(p, cpu_reg_names_size, "r%d", i); 107 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 108 offsetof(CPUPPCState, gpr[i]), p); 109 p += (i < 10) ? 3 : 4; 110 cpu_reg_names_size -= (i < 10) ? 3 : 4; 111 snprintf(p, cpu_reg_names_size, "r%dH", i); 112 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 113 offsetof(CPUPPCState, gprh[i]), p); 114 p += (i < 10) ? 4 : 5; 115 cpu_reg_names_size -= (i < 10) ? 4 : 5; 116 117 snprintf(p, cpu_reg_names_size, "fp%d", i); 118 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env, 119 offsetof(CPUPPCState, fpr[i]), p); 120 p += (i < 10) ? 4 : 5; 121 cpu_reg_names_size -= (i < 10) ? 4 : 5; 122 123 snprintf(p, cpu_reg_names_size, "avr%dH", i); 124 #ifdef HOST_WORDS_BIGENDIAN 125 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 126 offsetof(CPUPPCState, avr[i].u64[0]), p); 127 #else 128 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 129 offsetof(CPUPPCState, avr[i].u64[1]), p); 130 #endif 131 p += (i < 10) ? 6 : 7; 132 cpu_reg_names_size -= (i < 10) ? 6 : 7; 133 134 snprintf(p, cpu_reg_names_size, "avr%dL", i); 135 #ifdef HOST_WORDS_BIGENDIAN 136 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 137 offsetof(CPUPPCState, avr[i].u64[1]), p); 138 #else 139 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 140 offsetof(CPUPPCState, avr[i].u64[0]), p); 141 #endif 142 p += (i < 10) ? 6 : 7; 143 cpu_reg_names_size -= (i < 10) ? 6 : 7; 144 snprintf(p, cpu_reg_names_size, "vsr%d", i); 145 cpu_vsr[i] = tcg_global_mem_new_i64(cpu_env, 146 offsetof(CPUPPCState, vsr[i]), p); 147 p += (i < 10) ? 5 : 6; 148 cpu_reg_names_size -= (i < 10) ? 5 : 6; 149 } 150 151 cpu_nip = tcg_global_mem_new(cpu_env, 152 offsetof(CPUPPCState, nip), "nip"); 153 154 cpu_msr = tcg_global_mem_new(cpu_env, 155 offsetof(CPUPPCState, msr), "msr"); 156 157 cpu_ctr = tcg_global_mem_new(cpu_env, 158 offsetof(CPUPPCState, ctr), "ctr"); 159 160 cpu_lr = tcg_global_mem_new(cpu_env, 161 offsetof(CPUPPCState, lr), "lr"); 162 163 #if defined(TARGET_PPC64) 164 cpu_cfar = tcg_global_mem_new(cpu_env, 165 offsetof(CPUPPCState, cfar), "cfar"); 166 #endif 167 168 cpu_xer = tcg_global_mem_new(cpu_env, 169 offsetof(CPUPPCState, xer), "xer"); 170 cpu_so = tcg_global_mem_new(cpu_env, 171 offsetof(CPUPPCState, so), "SO"); 172 cpu_ov = tcg_global_mem_new(cpu_env, 173 offsetof(CPUPPCState, ov), "OV"); 174 cpu_ca = tcg_global_mem_new(cpu_env, 175 offsetof(CPUPPCState, ca), "CA"); 176 177 cpu_reserve = tcg_global_mem_new(cpu_env, 178 offsetof(CPUPPCState, reserve_addr), 179 "reserve_addr"); 180 181 cpu_fpscr = tcg_global_mem_new(cpu_env, 182 offsetof(CPUPPCState, fpscr), "fpscr"); 183 184 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 185 offsetof(CPUPPCState, access_type), "access_type"); 186 187 done_init = 1; 188 } 189 190 /* internal defines */ 191 struct DisasContext { 192 struct TranslationBlock *tb; 193 target_ulong nip; 194 uint32_t opcode; 195 uint32_t exception; 196 /* Routine used to access memory */ 197 bool pr, hv, dr, le_mode; 198 bool lazy_tlb_flush; 199 bool need_access_type; 200 int mem_idx; 201 int access_type; 202 /* Translation flags */ 203 TCGMemOp default_tcg_memop_mask; 204 #if defined(TARGET_PPC64) 205 bool sf_mode; 206 bool has_cfar; 207 #endif 208 bool fpu_enabled; 209 bool altivec_enabled; 210 bool vsx_enabled; 211 bool spe_enabled; 212 bool tm_enabled; 213 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 214 int singlestep_enabled; 215 uint64_t insns_flags; 216 uint64_t insns_flags2; 217 }; 218 219 /* Return true iff byteswap is needed in a scalar memop */ 220 static inline bool need_byteswap(const DisasContext *ctx) 221 { 222 #if defined(TARGET_WORDS_BIGENDIAN) 223 return ctx->le_mode; 224 #else 225 return !ctx->le_mode; 226 #endif 227 } 228 229 /* True when active word size < size of target_long. */ 230 #ifdef TARGET_PPC64 231 # define NARROW_MODE(C) (!(C)->sf_mode) 232 #else 233 # define NARROW_MODE(C) 0 234 #endif 235 236 struct opc_handler_t { 237 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 238 uint32_t inval1; 239 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 240 uint32_t inval2; 241 /* instruction type */ 242 uint64_t type; 243 /* extended instruction type */ 244 uint64_t type2; 245 /* handler */ 246 void (*handler)(DisasContext *ctx); 247 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 248 const char *oname; 249 #endif 250 #if defined(DO_PPC_STATISTICS) 251 uint64_t count; 252 #endif 253 }; 254 255 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 256 { 257 if (ctx->need_access_type && ctx->access_type != access_type) { 258 tcg_gen_movi_i32(cpu_access_type, access_type); 259 ctx->access_type = access_type; 260 } 261 } 262 263 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 264 { 265 if (NARROW_MODE(ctx)) { 266 nip = (uint32_t)nip; 267 } 268 tcg_gen_movi_tl(cpu_nip, nip); 269 } 270 271 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 272 { 273 TCGv_i32 t0, t1; 274 275 /* These are all synchronous exceptions, we set the PC back to 276 * the faulting instruction 277 */ 278 if (ctx->exception == POWERPC_EXCP_NONE) { 279 gen_update_nip(ctx, ctx->nip - 4); 280 } 281 t0 = tcg_const_i32(excp); 282 t1 = tcg_const_i32(error); 283 gen_helper_raise_exception_err(cpu_env, t0, t1); 284 tcg_temp_free_i32(t0); 285 tcg_temp_free_i32(t1); 286 ctx->exception = (excp); 287 } 288 289 static void gen_exception(DisasContext *ctx, uint32_t excp) 290 { 291 TCGv_i32 t0; 292 293 /* These are all synchronous exceptions, we set the PC back to 294 * the faulting instruction 295 */ 296 if (ctx->exception == POWERPC_EXCP_NONE) { 297 gen_update_nip(ctx, ctx->nip - 4); 298 } 299 t0 = tcg_const_i32(excp); 300 gen_helper_raise_exception(cpu_env, t0); 301 tcg_temp_free_i32(t0); 302 ctx->exception = (excp); 303 } 304 305 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 306 target_ulong nip) 307 { 308 TCGv_i32 t0; 309 310 gen_update_nip(ctx, nip); 311 t0 = tcg_const_i32(excp); 312 gen_helper_raise_exception(cpu_env, t0); 313 tcg_temp_free_i32(t0); 314 ctx->exception = (excp); 315 } 316 317 static void gen_debug_exception(DisasContext *ctx) 318 { 319 TCGv_i32 t0; 320 321 /* These are all synchronous exceptions, we set the PC back to 322 * the faulting instruction 323 */ 324 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 325 (ctx->exception != POWERPC_EXCP_SYNC)) { 326 gen_update_nip(ctx, ctx->nip); 327 } 328 t0 = tcg_const_i32(EXCP_DEBUG); 329 gen_helper_raise_exception(cpu_env, t0); 330 tcg_temp_free_i32(t0); 331 } 332 333 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 334 { 335 /* Will be converted to program check if needed */ 336 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 337 } 338 339 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 340 { 341 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 342 } 343 344 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 345 { 346 /* Will be converted to program check if needed */ 347 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 348 } 349 350 /* Stop translation */ 351 static inline void gen_stop_exception(DisasContext *ctx) 352 { 353 gen_update_nip(ctx, ctx->nip); 354 ctx->exception = POWERPC_EXCP_STOP; 355 } 356 357 #ifndef CONFIG_USER_ONLY 358 /* No need to update nip here, as execution flow will change */ 359 static inline void gen_sync_exception(DisasContext *ctx) 360 { 361 ctx->exception = POWERPC_EXCP_SYNC; 362 } 363 #endif 364 365 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 366 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 367 368 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 369 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 370 371 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 372 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 373 374 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 375 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 376 377 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 378 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 379 380 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 381 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 382 383 typedef struct opcode_t { 384 unsigned char opc1, opc2, opc3, opc4; 385 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 386 unsigned char pad[4]; 387 #endif 388 opc_handler_t handler; 389 const char *oname; 390 } opcode_t; 391 392 /* Helpers for priv. check */ 393 #define GEN_PRIV \ 394 do { \ 395 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 396 } while (0) 397 398 #if defined(CONFIG_USER_ONLY) 399 #define CHK_HV GEN_PRIV 400 #define CHK_SV GEN_PRIV 401 #define CHK_HVRM GEN_PRIV 402 #else 403 #define CHK_HV \ 404 do { \ 405 if (unlikely(ctx->pr || !ctx->hv)) { \ 406 GEN_PRIV; \ 407 } \ 408 } while (0) 409 #define CHK_SV \ 410 do { \ 411 if (unlikely(ctx->pr)) { \ 412 GEN_PRIV; \ 413 } \ 414 } while (0) 415 #define CHK_HVRM \ 416 do { \ 417 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 418 GEN_PRIV; \ 419 } \ 420 } while (0) 421 #endif 422 423 #define CHK_NONE 424 425 426 /*****************************************************************************/ 427 /*** Instruction decoding ***/ 428 #define EXTRACT_HELPER(name, shift, nb) \ 429 static inline uint32_t name(uint32_t opcode) \ 430 { \ 431 return (opcode >> (shift)) & ((1 << (nb)) - 1); \ 432 } 433 434 #define EXTRACT_SHELPER(name, shift, nb) \ 435 static inline int32_t name(uint32_t opcode) \ 436 { \ 437 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \ 438 } 439 440 #define EXTRACT_HELPER_SPLIT(name, shift1, nb1, shift2, nb2) \ 441 static inline uint32_t name(uint32_t opcode) \ 442 { \ 443 return (((opcode >> (shift1)) & ((1 << (nb1)) - 1)) << nb2) | \ 444 ((opcode >> (shift2)) & ((1 << (nb2)) - 1)); \ 445 } 446 447 #define EXTRACT_HELPER_DXFORM(name, \ 448 d0_bits, shift_op_d0, shift_d0, \ 449 d1_bits, shift_op_d1, shift_d1, \ 450 d2_bits, shift_op_d2, shift_d2) \ 451 static inline int16_t name(uint32_t opcode) \ 452 { \ 453 return \ 454 (((opcode >> (shift_op_d0)) & ((1 << (d0_bits)) - 1)) << (shift_d0)) | \ 455 (((opcode >> (shift_op_d1)) & ((1 << (d1_bits)) - 1)) << (shift_d1)) | \ 456 (((opcode >> (shift_op_d2)) & ((1 << (d2_bits)) - 1)) << (shift_d2)); \ 457 } 458 459 460 /* Opcode part 1 */ 461 EXTRACT_HELPER(opc1, 26, 6); 462 /* Opcode part 2 */ 463 EXTRACT_HELPER(opc2, 1, 5); 464 /* Opcode part 3 */ 465 EXTRACT_HELPER(opc3, 6, 5); 466 /* Opcode part 4 */ 467 EXTRACT_HELPER(opc4, 16, 5); 468 /* Update Cr0 flags */ 469 EXTRACT_HELPER(Rc, 0, 1); 470 /* Update Cr6 flags (Altivec) */ 471 EXTRACT_HELPER(Rc21, 10, 1); 472 /* Destination */ 473 EXTRACT_HELPER(rD, 21, 5); 474 /* Source */ 475 EXTRACT_HELPER(rS, 21, 5); 476 /* First operand */ 477 EXTRACT_HELPER(rA, 16, 5); 478 /* Second operand */ 479 EXTRACT_HELPER(rB, 11, 5); 480 /* Third operand */ 481 EXTRACT_HELPER(rC, 6, 5); 482 /*** Get CRn ***/ 483 EXTRACT_HELPER(crfD, 23, 3); 484 EXTRACT_HELPER(crfS, 18, 3); 485 EXTRACT_HELPER(crbD, 21, 5); 486 EXTRACT_HELPER(crbA, 16, 5); 487 EXTRACT_HELPER(crbB, 11, 5); 488 /* SPR / TBL */ 489 EXTRACT_HELPER(_SPR, 11, 10); 490 static inline uint32_t SPR(uint32_t opcode) 491 { 492 uint32_t sprn = _SPR(opcode); 493 494 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 495 } 496 /*** Get constants ***/ 497 /* 16 bits signed immediate value */ 498 EXTRACT_SHELPER(SIMM, 0, 16); 499 /* 16 bits unsigned immediate value */ 500 EXTRACT_HELPER(UIMM, 0, 16); 501 /* 5 bits signed immediate value */ 502 EXTRACT_HELPER(SIMM5, 16, 5); 503 /* 5 bits signed immediate value */ 504 EXTRACT_HELPER(UIMM5, 16, 5); 505 /* 4 bits unsigned immediate value */ 506 EXTRACT_HELPER(UIMM4, 16, 4); 507 /* Bit count */ 508 EXTRACT_HELPER(NB, 11, 5); 509 /* Shift count */ 510 EXTRACT_HELPER(SH, 11, 5); 511 /* Vector shift count */ 512 EXTRACT_HELPER(VSH, 6, 4); 513 /* Mask start */ 514 EXTRACT_HELPER(MB, 6, 5); 515 /* Mask end */ 516 EXTRACT_HELPER(ME, 1, 5); 517 /* Trap operand */ 518 EXTRACT_HELPER(TO, 21, 5); 519 520 EXTRACT_HELPER(CRM, 12, 8); 521 522 #ifndef CONFIG_USER_ONLY 523 EXTRACT_HELPER(SR, 16, 4); 524 #endif 525 526 /* mtfsf/mtfsfi */ 527 EXTRACT_HELPER(FPBF, 23, 3); 528 EXTRACT_HELPER(FPIMM, 12, 4); 529 EXTRACT_HELPER(FPL, 25, 1); 530 EXTRACT_HELPER(FPFLM, 17, 8); 531 EXTRACT_HELPER(FPW, 16, 1); 532 533 /* addpcis */ 534 EXTRACT_HELPER_DXFORM(DX, 10, 6, 6, 5, 16, 1, 1, 0, 0) 535 #if defined(TARGET_PPC64) 536 /* darn */ 537 EXTRACT_HELPER(L, 16, 2); 538 #endif 539 540 /*** Jump target decoding ***/ 541 /* Immediate address */ 542 static inline target_ulong LI(uint32_t opcode) 543 { 544 return (opcode >> 0) & 0x03FFFFFC; 545 } 546 547 static inline uint32_t BD(uint32_t opcode) 548 { 549 return (opcode >> 0) & 0xFFFC; 550 } 551 552 EXTRACT_HELPER(BO, 21, 5); 553 EXTRACT_HELPER(BI, 16, 5); 554 /* Absolute/relative address */ 555 EXTRACT_HELPER(AA, 1, 1); 556 /* Link */ 557 EXTRACT_HELPER(LK, 0, 1); 558 559 /* DFP Z22-form */ 560 EXTRACT_HELPER(DCM, 10, 6) 561 562 /* DFP Z23-form */ 563 EXTRACT_HELPER(RMC, 9, 2) 564 565 EXTRACT_HELPER_SPLIT(xT, 0, 1, 21, 5); 566 EXTRACT_HELPER_SPLIT(xS, 0, 1, 21, 5); 567 EXTRACT_HELPER_SPLIT(xA, 2, 1, 16, 5); 568 EXTRACT_HELPER_SPLIT(xB, 1, 1, 11, 5); 569 EXTRACT_HELPER_SPLIT(xC, 3, 1, 6, 5); 570 EXTRACT_HELPER(DM, 8, 2); 571 EXTRACT_HELPER(UIM, 16, 2); 572 EXTRACT_HELPER(SHW, 8, 2); 573 EXTRACT_HELPER(SP, 19, 2); 574 EXTRACT_HELPER(IMM8, 11, 8); 575 576 /*****************************************************************************/ 577 /* PowerPC instructions table */ 578 579 #if defined(DO_PPC_STATISTICS) 580 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 581 { \ 582 .opc1 = op1, \ 583 .opc2 = op2, \ 584 .opc3 = op3, \ 585 .opc4 = 0xff, \ 586 .handler = { \ 587 .inval1 = invl, \ 588 .type = _typ, \ 589 .type2 = _typ2, \ 590 .handler = &gen_##name, \ 591 .oname = stringify(name), \ 592 }, \ 593 .oname = stringify(name), \ 594 } 595 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 596 { \ 597 .opc1 = op1, \ 598 .opc2 = op2, \ 599 .opc3 = op3, \ 600 .opc4 = 0xff, \ 601 .handler = { \ 602 .inval1 = invl1, \ 603 .inval2 = invl2, \ 604 .type = _typ, \ 605 .type2 = _typ2, \ 606 .handler = &gen_##name, \ 607 .oname = stringify(name), \ 608 }, \ 609 .oname = stringify(name), \ 610 } 611 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 612 { \ 613 .opc1 = op1, \ 614 .opc2 = op2, \ 615 .opc3 = op3, \ 616 .opc4 = 0xff, \ 617 .handler = { \ 618 .inval1 = invl, \ 619 .type = _typ, \ 620 .type2 = _typ2, \ 621 .handler = &gen_##name, \ 622 .oname = onam, \ 623 }, \ 624 .oname = onam, \ 625 } 626 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 627 { \ 628 .opc1 = op1, \ 629 .opc2 = op2, \ 630 .opc3 = op3, \ 631 .opc4 = op4, \ 632 .handler = { \ 633 .inval1 = invl, \ 634 .type = _typ, \ 635 .type2 = _typ2, \ 636 .handler = &gen_##name, \ 637 .oname = stringify(name), \ 638 }, \ 639 .oname = stringify(name), \ 640 } 641 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 642 { \ 643 .opc1 = op1, \ 644 .opc2 = op2, \ 645 .opc3 = op3, \ 646 .opc4 = op4, \ 647 .handler = { \ 648 .inval1 = invl, \ 649 .type = _typ, \ 650 .type2 = _typ2, \ 651 .handler = &gen_##name, \ 652 .oname = onam, \ 653 }, \ 654 .oname = onam, \ 655 } 656 #else 657 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 658 { \ 659 .opc1 = op1, \ 660 .opc2 = op2, \ 661 .opc3 = op3, \ 662 .opc4 = 0xff, \ 663 .handler = { \ 664 .inval1 = invl, \ 665 .type = _typ, \ 666 .type2 = _typ2, \ 667 .handler = &gen_##name, \ 668 }, \ 669 .oname = stringify(name), \ 670 } 671 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 672 { \ 673 .opc1 = op1, \ 674 .opc2 = op2, \ 675 .opc3 = op3, \ 676 .opc4 = 0xff, \ 677 .handler = { \ 678 .inval1 = invl1, \ 679 .inval2 = invl2, \ 680 .type = _typ, \ 681 .type2 = _typ2, \ 682 .handler = &gen_##name, \ 683 }, \ 684 .oname = stringify(name), \ 685 } 686 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 687 { \ 688 .opc1 = op1, \ 689 .opc2 = op2, \ 690 .opc3 = op3, \ 691 .opc4 = 0xff, \ 692 .handler = { \ 693 .inval1 = invl, \ 694 .type = _typ, \ 695 .type2 = _typ2, \ 696 .handler = &gen_##name, \ 697 }, \ 698 .oname = onam, \ 699 } 700 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 701 { \ 702 .opc1 = op1, \ 703 .opc2 = op2, \ 704 .opc3 = op3, \ 705 .opc4 = op4, \ 706 .handler = { \ 707 .inval1 = invl, \ 708 .type = _typ, \ 709 .type2 = _typ2, \ 710 .handler = &gen_##name, \ 711 }, \ 712 .oname = stringify(name), \ 713 } 714 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 715 { \ 716 .opc1 = op1, \ 717 .opc2 = op2, \ 718 .opc3 = op3, \ 719 .opc4 = op4, \ 720 .handler = { \ 721 .inval1 = invl, \ 722 .type = _typ, \ 723 .type2 = _typ2, \ 724 .handler = &gen_##name, \ 725 }, \ 726 .oname = onam, \ 727 } 728 #endif 729 730 /* SPR load/store helpers */ 731 static inline void gen_load_spr(TCGv t, int reg) 732 { 733 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 734 } 735 736 static inline void gen_store_spr(int reg, TCGv t) 737 { 738 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 739 } 740 741 /* Invalid instruction */ 742 static void gen_invalid(DisasContext *ctx) 743 { 744 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 745 } 746 747 static opc_handler_t invalid_handler = { 748 .inval1 = 0xFFFFFFFF, 749 .inval2 = 0xFFFFFFFF, 750 .type = PPC_NONE, 751 .type2 = PPC_NONE, 752 .handler = gen_invalid, 753 }; 754 755 /*** Integer comparison ***/ 756 757 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 758 { 759 TCGv t0 = tcg_temp_new(); 760 TCGv_i32 t1 = tcg_temp_new_i32(); 761 762 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 763 764 tcg_gen_setcond_tl((s ? TCG_COND_LT: TCG_COND_LTU), t0, arg0, arg1); 765 tcg_gen_trunc_tl_i32(t1, t0); 766 tcg_gen_shli_i32(t1, t1, CRF_LT); 767 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 768 769 tcg_gen_setcond_tl((s ? TCG_COND_GT: TCG_COND_GTU), t0, arg0, arg1); 770 tcg_gen_trunc_tl_i32(t1, t0); 771 tcg_gen_shli_i32(t1, t1, CRF_GT); 772 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 773 774 tcg_gen_setcond_tl(TCG_COND_EQ, t0, arg0, arg1); 775 tcg_gen_trunc_tl_i32(t1, t0); 776 tcg_gen_shli_i32(t1, t1, CRF_EQ); 777 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 778 779 tcg_temp_free(t0); 780 tcg_temp_free_i32(t1); 781 } 782 783 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 784 { 785 TCGv t0 = tcg_const_tl(arg1); 786 gen_op_cmp(arg0, t0, s, crf); 787 tcg_temp_free(t0); 788 } 789 790 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 791 { 792 TCGv t0, t1; 793 t0 = tcg_temp_new(); 794 t1 = tcg_temp_new(); 795 if (s) { 796 tcg_gen_ext32s_tl(t0, arg0); 797 tcg_gen_ext32s_tl(t1, arg1); 798 } else { 799 tcg_gen_ext32u_tl(t0, arg0); 800 tcg_gen_ext32u_tl(t1, arg1); 801 } 802 gen_op_cmp(t0, t1, s, crf); 803 tcg_temp_free(t1); 804 tcg_temp_free(t0); 805 } 806 807 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 808 { 809 TCGv t0 = tcg_const_tl(arg1); 810 gen_op_cmp32(arg0, t0, s, crf); 811 tcg_temp_free(t0); 812 } 813 814 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 815 { 816 if (NARROW_MODE(ctx)) { 817 gen_op_cmpi32(reg, 0, 1, 0); 818 } else { 819 gen_op_cmpi(reg, 0, 1, 0); 820 } 821 } 822 823 /* cmp */ 824 static void gen_cmp(DisasContext *ctx) 825 { 826 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 827 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 828 1, crfD(ctx->opcode)); 829 } else { 830 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 831 1, crfD(ctx->opcode)); 832 } 833 } 834 835 /* cmpi */ 836 static void gen_cmpi(DisasContext *ctx) 837 { 838 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 839 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 840 1, crfD(ctx->opcode)); 841 } else { 842 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 843 1, crfD(ctx->opcode)); 844 } 845 } 846 847 /* cmpl */ 848 static void gen_cmpl(DisasContext *ctx) 849 { 850 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 851 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 852 0, crfD(ctx->opcode)); 853 } else { 854 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 855 0, crfD(ctx->opcode)); 856 } 857 } 858 859 /* cmpli */ 860 static void gen_cmpli(DisasContext *ctx) 861 { 862 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 863 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 864 0, crfD(ctx->opcode)); 865 } else { 866 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 867 0, crfD(ctx->opcode)); 868 } 869 } 870 871 /* cmprb - range comparison: isupper, isaplha, islower*/ 872 static void gen_cmprb(DisasContext *ctx) 873 { 874 TCGv_i32 src1 = tcg_temp_new_i32(); 875 TCGv_i32 src2 = tcg_temp_new_i32(); 876 TCGv_i32 src2lo = tcg_temp_new_i32(); 877 TCGv_i32 src2hi = tcg_temp_new_i32(); 878 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 879 880 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 881 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 882 883 tcg_gen_andi_i32(src1, src1, 0xFF); 884 tcg_gen_ext8u_i32(src2lo, src2); 885 tcg_gen_shri_i32(src2, src2, 8); 886 tcg_gen_ext8u_i32(src2hi, src2); 887 888 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 889 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 890 tcg_gen_and_i32(crf, src2lo, src2hi); 891 892 if (ctx->opcode & 0x00200000) { 893 tcg_gen_shri_i32(src2, src2, 8); 894 tcg_gen_ext8u_i32(src2lo, src2); 895 tcg_gen_shri_i32(src2, src2, 8); 896 tcg_gen_ext8u_i32(src2hi, src2); 897 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 898 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 899 tcg_gen_and_i32(src2lo, src2lo, src2hi); 900 tcg_gen_or_i32(crf, crf, src2lo); 901 } 902 tcg_gen_shli_i32(crf, crf, CRF_GT); 903 tcg_temp_free_i32(src1); 904 tcg_temp_free_i32(src2); 905 tcg_temp_free_i32(src2lo); 906 tcg_temp_free_i32(src2hi); 907 } 908 909 #if defined(TARGET_PPC64) 910 /* cmpeqb */ 911 static void gen_cmpeqb(DisasContext *ctx) 912 { 913 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 914 cpu_gpr[rB(ctx->opcode)]); 915 } 916 #endif 917 918 /* isel (PowerPC 2.03 specification) */ 919 static void gen_isel(DisasContext *ctx) 920 { 921 uint32_t bi = rC(ctx->opcode); 922 uint32_t mask = 0x08 >> (bi & 0x03); 923 TCGv t0 = tcg_temp_new(); 924 TCGv zr; 925 926 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 927 tcg_gen_andi_tl(t0, t0, mask); 928 929 zr = tcg_const_tl(0); 930 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 931 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 932 cpu_gpr[rB(ctx->opcode)]); 933 tcg_temp_free(zr); 934 tcg_temp_free(t0); 935 } 936 937 /* cmpb: PowerPC 2.05 specification */ 938 static void gen_cmpb(DisasContext *ctx) 939 { 940 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 941 cpu_gpr[rB(ctx->opcode)]); 942 } 943 944 /*** Integer arithmetic ***/ 945 946 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 947 TCGv arg1, TCGv arg2, int sub) 948 { 949 TCGv t0 = tcg_temp_new(); 950 951 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 952 tcg_gen_xor_tl(t0, arg1, arg2); 953 if (sub) { 954 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 955 } else { 956 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 957 } 958 tcg_temp_free(t0); 959 if (NARROW_MODE(ctx)) { 960 tcg_gen_ext32s_tl(cpu_ov, cpu_ov); 961 } 962 tcg_gen_shri_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1); 963 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 964 } 965 966 /* Common add function */ 967 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 968 TCGv arg2, bool add_ca, bool compute_ca, 969 bool compute_ov, bool compute_rc0) 970 { 971 TCGv t0 = ret; 972 973 if (compute_ca || compute_ov) { 974 t0 = tcg_temp_new(); 975 } 976 977 if (compute_ca) { 978 if (NARROW_MODE(ctx)) { 979 /* Caution: a non-obvious corner case of the spec is that we 980 must produce the *entire* 64-bit addition, but produce the 981 carry into bit 32. */ 982 TCGv t1 = tcg_temp_new(); 983 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 984 tcg_gen_add_tl(t0, arg1, arg2); 985 if (add_ca) { 986 tcg_gen_add_tl(t0, t0, cpu_ca); 987 } 988 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */ 989 tcg_temp_free(t1); 990 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */ 991 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1); 992 } else { 993 TCGv zero = tcg_const_tl(0); 994 if (add_ca) { 995 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero); 996 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero); 997 } else { 998 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero); 999 } 1000 tcg_temp_free(zero); 1001 } 1002 } else { 1003 tcg_gen_add_tl(t0, arg1, arg2); 1004 if (add_ca) { 1005 tcg_gen_add_tl(t0, t0, cpu_ca); 1006 } 1007 } 1008 1009 if (compute_ov) { 1010 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 1011 } 1012 if (unlikely(compute_rc0)) { 1013 gen_set_Rc0(ctx, t0); 1014 } 1015 1016 if (!TCGV_EQUAL(t0, ret)) { 1017 tcg_gen_mov_tl(ret, t0); 1018 tcg_temp_free(t0); 1019 } 1020 } 1021 /* Add functions with two operands */ 1022 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 1023 static void glue(gen_, name)(DisasContext *ctx) \ 1024 { \ 1025 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1026 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1027 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1028 } 1029 /* Add functions with one operand and one immediate */ 1030 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 1031 add_ca, compute_ca, compute_ov) \ 1032 static void glue(gen_, name)(DisasContext *ctx) \ 1033 { \ 1034 TCGv t0 = tcg_const_tl(const_val); \ 1035 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1036 cpu_gpr[rA(ctx->opcode)], t0, \ 1037 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1038 tcg_temp_free(t0); \ 1039 } 1040 1041 /* add add. addo addo. */ 1042 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 1043 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 1044 /* addc addc. addco addco. */ 1045 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 1046 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 1047 /* adde adde. addeo addeo. */ 1048 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 1049 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 1050 /* addme addme. addmeo addmeo. */ 1051 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 1052 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 1053 /* addze addze. addzeo addzeo.*/ 1054 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 1055 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 1056 /* addi */ 1057 static void gen_addi(DisasContext *ctx) 1058 { 1059 target_long simm = SIMM(ctx->opcode); 1060 1061 if (rA(ctx->opcode) == 0) { 1062 /* li case */ 1063 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 1064 } else { 1065 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 1066 cpu_gpr[rA(ctx->opcode)], simm); 1067 } 1068 } 1069 /* addic addic.*/ 1070 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 1071 { 1072 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1073 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1074 c, 0, 1, 0, compute_rc0); 1075 tcg_temp_free(c); 1076 } 1077 1078 static void gen_addic(DisasContext *ctx) 1079 { 1080 gen_op_addic(ctx, 0); 1081 } 1082 1083 static void gen_addic_(DisasContext *ctx) 1084 { 1085 gen_op_addic(ctx, 1); 1086 } 1087 1088 /* addis */ 1089 static void gen_addis(DisasContext *ctx) 1090 { 1091 target_long simm = SIMM(ctx->opcode); 1092 1093 if (rA(ctx->opcode) == 0) { 1094 /* lis case */ 1095 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 1096 } else { 1097 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 1098 cpu_gpr[rA(ctx->opcode)], simm << 16); 1099 } 1100 } 1101 1102 /* addpcis */ 1103 static void gen_addpcis(DisasContext *ctx) 1104 { 1105 target_long d = DX(ctx->opcode); 1106 1107 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->nip + (d << 16)); 1108 } 1109 1110 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1111 TCGv arg2, int sign, int compute_ov) 1112 { 1113 TCGv_i32 t0 = tcg_temp_new_i32(); 1114 TCGv_i32 t1 = tcg_temp_new_i32(); 1115 TCGv_i32 t2 = tcg_temp_new_i32(); 1116 TCGv_i32 t3 = tcg_temp_new_i32(); 1117 1118 tcg_gen_trunc_tl_i32(t0, arg1); 1119 tcg_gen_trunc_tl_i32(t1, arg2); 1120 if (sign) { 1121 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1122 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1123 tcg_gen_and_i32(t2, t2, t3); 1124 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1125 tcg_gen_or_i32(t2, t2, t3); 1126 tcg_gen_movi_i32(t3, 0); 1127 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1128 tcg_gen_div_i32(t3, t0, t1); 1129 tcg_gen_extu_i32_tl(ret, t3); 1130 } else { 1131 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1132 tcg_gen_movi_i32(t3, 0); 1133 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1134 tcg_gen_divu_i32(t3, t0, t1); 1135 tcg_gen_extu_i32_tl(ret, t3); 1136 } 1137 if (compute_ov) { 1138 tcg_gen_extu_i32_tl(cpu_ov, t2); 1139 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1140 } 1141 tcg_temp_free_i32(t0); 1142 tcg_temp_free_i32(t1); 1143 tcg_temp_free_i32(t2); 1144 tcg_temp_free_i32(t3); 1145 1146 if (unlikely(Rc(ctx->opcode) != 0)) 1147 gen_set_Rc0(ctx, ret); 1148 } 1149 /* Div functions */ 1150 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1151 static void glue(gen_, name)(DisasContext *ctx) \ 1152 { \ 1153 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1154 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1155 sign, compute_ov); \ 1156 } 1157 /* divwu divwu. divwuo divwuo. */ 1158 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1159 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1160 /* divw divw. divwo divwo. */ 1161 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1162 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1163 1164 /* div[wd]eu[o][.] */ 1165 #define GEN_DIVE(name, hlpr, compute_ov) \ 1166 static void gen_##name(DisasContext *ctx) \ 1167 { \ 1168 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1169 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1170 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1171 tcg_temp_free_i32(t0); \ 1172 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1173 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1174 } \ 1175 } 1176 1177 GEN_DIVE(divweu, divweu, 0); 1178 GEN_DIVE(divweuo, divweu, 1); 1179 GEN_DIVE(divwe, divwe, 0); 1180 GEN_DIVE(divweo, divwe, 1); 1181 1182 #if defined(TARGET_PPC64) 1183 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1184 TCGv arg2, int sign, int compute_ov) 1185 { 1186 TCGv_i64 t0 = tcg_temp_new_i64(); 1187 TCGv_i64 t1 = tcg_temp_new_i64(); 1188 TCGv_i64 t2 = tcg_temp_new_i64(); 1189 TCGv_i64 t3 = tcg_temp_new_i64(); 1190 1191 tcg_gen_mov_i64(t0, arg1); 1192 tcg_gen_mov_i64(t1, arg2); 1193 if (sign) { 1194 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1195 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1196 tcg_gen_and_i64(t2, t2, t3); 1197 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1198 tcg_gen_or_i64(t2, t2, t3); 1199 tcg_gen_movi_i64(t3, 0); 1200 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1201 tcg_gen_div_i64(ret, t0, t1); 1202 } else { 1203 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1204 tcg_gen_movi_i64(t3, 0); 1205 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1206 tcg_gen_divu_i64(ret, t0, t1); 1207 } 1208 if (compute_ov) { 1209 tcg_gen_mov_tl(cpu_ov, t2); 1210 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1211 } 1212 tcg_temp_free_i64(t0); 1213 tcg_temp_free_i64(t1); 1214 tcg_temp_free_i64(t2); 1215 tcg_temp_free_i64(t3); 1216 1217 if (unlikely(Rc(ctx->opcode) != 0)) 1218 gen_set_Rc0(ctx, ret); 1219 } 1220 1221 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1222 static void glue(gen_, name)(DisasContext *ctx) \ 1223 { \ 1224 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1225 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1226 sign, compute_ov); \ 1227 } 1228 /* divwu divwu. divwuo divwuo. */ 1229 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1230 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1231 /* divw divw. divwo divwo. */ 1232 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1233 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1234 1235 GEN_DIVE(divdeu, divdeu, 0); 1236 GEN_DIVE(divdeuo, divdeu, 1); 1237 GEN_DIVE(divde, divde, 0); 1238 GEN_DIVE(divdeo, divde, 1); 1239 #endif 1240 1241 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1242 TCGv arg2, int sign) 1243 { 1244 TCGv_i32 t0 = tcg_temp_new_i32(); 1245 TCGv_i32 t1 = tcg_temp_new_i32(); 1246 1247 tcg_gen_trunc_tl_i32(t0, arg1); 1248 tcg_gen_trunc_tl_i32(t1, arg2); 1249 if (sign) { 1250 TCGv_i32 t2 = tcg_temp_new_i32(); 1251 TCGv_i32 t3 = tcg_temp_new_i32(); 1252 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1253 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1254 tcg_gen_and_i32(t2, t2, t3); 1255 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1256 tcg_gen_or_i32(t2, t2, t3); 1257 tcg_gen_movi_i32(t3, 0); 1258 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1259 tcg_gen_rem_i32(t3, t0, t1); 1260 tcg_gen_ext_i32_tl(ret, t3); 1261 tcg_temp_free_i32(t2); 1262 tcg_temp_free_i32(t3); 1263 } else { 1264 TCGv_i32 t2 = tcg_const_i32(1); 1265 TCGv_i32 t3 = tcg_const_i32(0); 1266 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1267 tcg_gen_remu_i32(t3, t0, t1); 1268 tcg_gen_extu_i32_tl(ret, t3); 1269 tcg_temp_free_i32(t2); 1270 tcg_temp_free_i32(t3); 1271 } 1272 tcg_temp_free_i32(t0); 1273 tcg_temp_free_i32(t1); 1274 } 1275 1276 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1277 static void glue(gen_, name)(DisasContext *ctx) \ 1278 { \ 1279 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1280 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1281 sign); \ 1282 } 1283 1284 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1285 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1286 1287 #if defined(TARGET_PPC64) 1288 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1289 TCGv arg2, int sign) 1290 { 1291 TCGv_i64 t0 = tcg_temp_new_i64(); 1292 TCGv_i64 t1 = tcg_temp_new_i64(); 1293 1294 tcg_gen_mov_i64(t0, arg1); 1295 tcg_gen_mov_i64(t1, arg2); 1296 if (sign) { 1297 TCGv_i64 t2 = tcg_temp_new_i64(); 1298 TCGv_i64 t3 = tcg_temp_new_i64(); 1299 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1300 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1301 tcg_gen_and_i64(t2, t2, t3); 1302 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1303 tcg_gen_or_i64(t2, t2, t3); 1304 tcg_gen_movi_i64(t3, 0); 1305 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1306 tcg_gen_rem_i64(ret, t0, t1); 1307 tcg_temp_free_i64(t2); 1308 tcg_temp_free_i64(t3); 1309 } else { 1310 TCGv_i64 t2 = tcg_const_i64(1); 1311 TCGv_i64 t3 = tcg_const_i64(0); 1312 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1313 tcg_gen_remu_i64(ret, t0, t1); 1314 tcg_temp_free_i64(t2); 1315 tcg_temp_free_i64(t3); 1316 } 1317 tcg_temp_free_i64(t0); 1318 tcg_temp_free_i64(t1); 1319 } 1320 1321 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1322 static void glue(gen_, name)(DisasContext *ctx) \ 1323 { \ 1324 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1325 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1326 sign); \ 1327 } 1328 1329 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1330 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1331 #endif 1332 1333 /* mulhw mulhw. */ 1334 static void gen_mulhw(DisasContext *ctx) 1335 { 1336 TCGv_i32 t0 = tcg_temp_new_i32(); 1337 TCGv_i32 t1 = tcg_temp_new_i32(); 1338 1339 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1340 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1341 tcg_gen_muls2_i32(t0, t1, t0, t1); 1342 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1343 tcg_temp_free_i32(t0); 1344 tcg_temp_free_i32(t1); 1345 if (unlikely(Rc(ctx->opcode) != 0)) 1346 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1347 } 1348 1349 /* mulhwu mulhwu. */ 1350 static void gen_mulhwu(DisasContext *ctx) 1351 { 1352 TCGv_i32 t0 = tcg_temp_new_i32(); 1353 TCGv_i32 t1 = tcg_temp_new_i32(); 1354 1355 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1356 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1357 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1358 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1359 tcg_temp_free_i32(t0); 1360 tcg_temp_free_i32(t1); 1361 if (unlikely(Rc(ctx->opcode) != 0)) 1362 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1363 } 1364 1365 /* mullw mullw. */ 1366 static void gen_mullw(DisasContext *ctx) 1367 { 1368 #if defined(TARGET_PPC64) 1369 TCGv_i64 t0, t1; 1370 t0 = tcg_temp_new_i64(); 1371 t1 = tcg_temp_new_i64(); 1372 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1373 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1374 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1375 tcg_temp_free(t0); 1376 tcg_temp_free(t1); 1377 #else 1378 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1379 cpu_gpr[rB(ctx->opcode)]); 1380 #endif 1381 if (unlikely(Rc(ctx->opcode) != 0)) 1382 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1383 } 1384 1385 /* mullwo mullwo. */ 1386 static void gen_mullwo(DisasContext *ctx) 1387 { 1388 TCGv_i32 t0 = tcg_temp_new_i32(); 1389 TCGv_i32 t1 = tcg_temp_new_i32(); 1390 1391 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1392 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1393 tcg_gen_muls2_i32(t0, t1, t0, t1); 1394 #if defined(TARGET_PPC64) 1395 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1396 #else 1397 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1398 #endif 1399 1400 tcg_gen_sari_i32(t0, t0, 31); 1401 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1402 tcg_gen_extu_i32_tl(cpu_ov, t0); 1403 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1404 1405 tcg_temp_free_i32(t0); 1406 tcg_temp_free_i32(t1); 1407 if (unlikely(Rc(ctx->opcode) != 0)) 1408 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1409 } 1410 1411 /* mulli */ 1412 static void gen_mulli(DisasContext *ctx) 1413 { 1414 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1415 SIMM(ctx->opcode)); 1416 } 1417 1418 #if defined(TARGET_PPC64) 1419 /* mulhd mulhd. */ 1420 static void gen_mulhd(DisasContext *ctx) 1421 { 1422 TCGv lo = tcg_temp_new(); 1423 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1424 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1425 tcg_temp_free(lo); 1426 if (unlikely(Rc(ctx->opcode) != 0)) { 1427 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1428 } 1429 } 1430 1431 /* mulhdu mulhdu. */ 1432 static void gen_mulhdu(DisasContext *ctx) 1433 { 1434 TCGv lo = tcg_temp_new(); 1435 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1436 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1437 tcg_temp_free(lo); 1438 if (unlikely(Rc(ctx->opcode) != 0)) { 1439 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1440 } 1441 } 1442 1443 /* mulld mulld. */ 1444 static void gen_mulld(DisasContext *ctx) 1445 { 1446 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1447 cpu_gpr[rB(ctx->opcode)]); 1448 if (unlikely(Rc(ctx->opcode) != 0)) 1449 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1450 } 1451 1452 /* mulldo mulldo. */ 1453 static void gen_mulldo(DisasContext *ctx) 1454 { 1455 TCGv_i64 t0 = tcg_temp_new_i64(); 1456 TCGv_i64 t1 = tcg_temp_new_i64(); 1457 1458 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1459 cpu_gpr[rB(ctx->opcode)]); 1460 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1461 1462 tcg_gen_sari_i64(t0, t0, 63); 1463 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1464 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1465 1466 tcg_temp_free_i64(t0); 1467 tcg_temp_free_i64(t1); 1468 1469 if (unlikely(Rc(ctx->opcode) != 0)) { 1470 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1471 } 1472 } 1473 #endif 1474 1475 /* Common subf function */ 1476 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1477 TCGv arg2, bool add_ca, bool compute_ca, 1478 bool compute_ov, bool compute_rc0) 1479 { 1480 TCGv t0 = ret; 1481 1482 if (compute_ca || compute_ov) { 1483 t0 = tcg_temp_new(); 1484 } 1485 1486 if (compute_ca) { 1487 /* dest = ~arg1 + arg2 [+ ca]. */ 1488 if (NARROW_MODE(ctx)) { 1489 /* Caution: a non-obvious corner case of the spec is that we 1490 must produce the *entire* 64-bit addition, but produce the 1491 carry into bit 32. */ 1492 TCGv inv1 = tcg_temp_new(); 1493 TCGv t1 = tcg_temp_new(); 1494 tcg_gen_not_tl(inv1, arg1); 1495 if (add_ca) { 1496 tcg_gen_add_tl(t0, arg2, cpu_ca); 1497 } else { 1498 tcg_gen_addi_tl(t0, arg2, 1); 1499 } 1500 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1501 tcg_gen_add_tl(t0, t0, inv1); 1502 tcg_temp_free(inv1); 1503 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1504 tcg_temp_free(t1); 1505 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */ 1506 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1); 1507 } else if (add_ca) { 1508 TCGv zero, inv1 = tcg_temp_new(); 1509 tcg_gen_not_tl(inv1, arg1); 1510 zero = tcg_const_tl(0); 1511 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1512 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1513 tcg_temp_free(zero); 1514 tcg_temp_free(inv1); 1515 } else { 1516 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1517 tcg_gen_sub_tl(t0, arg2, arg1); 1518 } 1519 } else if (add_ca) { 1520 /* Since we're ignoring carry-out, we can simplify the 1521 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1522 tcg_gen_sub_tl(t0, arg2, arg1); 1523 tcg_gen_add_tl(t0, t0, cpu_ca); 1524 tcg_gen_subi_tl(t0, t0, 1); 1525 } else { 1526 tcg_gen_sub_tl(t0, arg2, arg1); 1527 } 1528 1529 if (compute_ov) { 1530 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1531 } 1532 if (unlikely(compute_rc0)) { 1533 gen_set_Rc0(ctx, t0); 1534 } 1535 1536 if (!TCGV_EQUAL(t0, ret)) { 1537 tcg_gen_mov_tl(ret, t0); 1538 tcg_temp_free(t0); 1539 } 1540 } 1541 /* Sub functions with Two operands functions */ 1542 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1543 static void glue(gen_, name)(DisasContext *ctx) \ 1544 { \ 1545 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1546 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1547 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1548 } 1549 /* Sub functions with one operand and one immediate */ 1550 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1551 add_ca, compute_ca, compute_ov) \ 1552 static void glue(gen_, name)(DisasContext *ctx) \ 1553 { \ 1554 TCGv t0 = tcg_const_tl(const_val); \ 1555 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1556 cpu_gpr[rA(ctx->opcode)], t0, \ 1557 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1558 tcg_temp_free(t0); \ 1559 } 1560 /* subf subf. subfo subfo. */ 1561 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1562 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1563 /* subfc subfc. subfco subfco. */ 1564 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1565 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1566 /* subfe subfe. subfeo subfo. */ 1567 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1568 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1569 /* subfme subfme. subfmeo subfmeo. */ 1570 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1571 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1572 /* subfze subfze. subfzeo subfzeo.*/ 1573 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1574 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1575 1576 /* subfic */ 1577 static void gen_subfic(DisasContext *ctx) 1578 { 1579 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1580 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1581 c, 0, 1, 0, 0); 1582 tcg_temp_free(c); 1583 } 1584 1585 /* neg neg. nego nego. */ 1586 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1587 { 1588 TCGv zero = tcg_const_tl(0); 1589 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1590 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1591 tcg_temp_free(zero); 1592 } 1593 1594 static void gen_neg(DisasContext *ctx) 1595 { 1596 gen_op_arith_neg(ctx, 0); 1597 } 1598 1599 static void gen_nego(DisasContext *ctx) 1600 { 1601 gen_op_arith_neg(ctx, 1); 1602 } 1603 1604 /*** Integer logical ***/ 1605 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1606 static void glue(gen_, name)(DisasContext *ctx) \ 1607 { \ 1608 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1609 cpu_gpr[rB(ctx->opcode)]); \ 1610 if (unlikely(Rc(ctx->opcode) != 0)) \ 1611 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1612 } 1613 1614 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1615 static void glue(gen_, name)(DisasContext *ctx) \ 1616 { \ 1617 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1618 if (unlikely(Rc(ctx->opcode) != 0)) \ 1619 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1620 } 1621 1622 /* and & and. */ 1623 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1624 /* andc & andc. */ 1625 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1626 1627 /* andi. */ 1628 static void gen_andi_(DisasContext *ctx) 1629 { 1630 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1631 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1632 } 1633 1634 /* andis. */ 1635 static void gen_andis_(DisasContext *ctx) 1636 { 1637 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1638 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1639 } 1640 1641 /* cntlzw */ 1642 static void gen_cntlzw(DisasContext *ctx) 1643 { 1644 TCGv_i32 t = tcg_temp_new_i32(); 1645 1646 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1647 tcg_gen_clzi_i32(t, t, 32); 1648 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1649 tcg_temp_free_i32(t); 1650 1651 if (unlikely(Rc(ctx->opcode) != 0)) 1652 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1653 } 1654 1655 /* cnttzw */ 1656 static void gen_cnttzw(DisasContext *ctx) 1657 { 1658 TCGv_i32 t = tcg_temp_new_i32(); 1659 1660 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1661 tcg_gen_ctzi_i32(t, t, 32); 1662 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1663 tcg_temp_free_i32(t); 1664 1665 if (unlikely(Rc(ctx->opcode) != 0)) { 1666 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1667 } 1668 } 1669 1670 /* eqv & eqv. */ 1671 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1672 /* extsb & extsb. */ 1673 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1674 /* extsh & extsh. */ 1675 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1676 /* nand & nand. */ 1677 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1678 /* nor & nor. */ 1679 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1680 1681 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1682 static void gen_pause(DisasContext *ctx) 1683 { 1684 TCGv_i32 t0 = tcg_const_i32(0); 1685 tcg_gen_st_i32(t0, cpu_env, 1686 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1687 tcg_temp_free_i32(t0); 1688 1689 /* Stop translation, this gives other CPUs a chance to run */ 1690 gen_exception_nip(ctx, EXCP_HLT, ctx->nip); 1691 } 1692 #endif /* defined(TARGET_PPC64) */ 1693 1694 /* or & or. */ 1695 static void gen_or(DisasContext *ctx) 1696 { 1697 int rs, ra, rb; 1698 1699 rs = rS(ctx->opcode); 1700 ra = rA(ctx->opcode); 1701 rb = rB(ctx->opcode); 1702 /* Optimisation for mr. ri case */ 1703 if (rs != ra || rs != rb) { 1704 if (rs != rb) 1705 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1706 else 1707 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1708 if (unlikely(Rc(ctx->opcode) != 0)) 1709 gen_set_Rc0(ctx, cpu_gpr[ra]); 1710 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1711 gen_set_Rc0(ctx, cpu_gpr[rs]); 1712 #if defined(TARGET_PPC64) 1713 } else if (rs != 0) { /* 0 is nop */ 1714 int prio = 0; 1715 1716 switch (rs) { 1717 case 1: 1718 /* Set process priority to low */ 1719 prio = 2; 1720 break; 1721 case 6: 1722 /* Set process priority to medium-low */ 1723 prio = 3; 1724 break; 1725 case 2: 1726 /* Set process priority to normal */ 1727 prio = 4; 1728 break; 1729 #if !defined(CONFIG_USER_ONLY) 1730 case 31: 1731 if (!ctx->pr) { 1732 /* Set process priority to very low */ 1733 prio = 1; 1734 } 1735 break; 1736 case 5: 1737 if (!ctx->pr) { 1738 /* Set process priority to medium-hight */ 1739 prio = 5; 1740 } 1741 break; 1742 case 3: 1743 if (!ctx->pr) { 1744 /* Set process priority to high */ 1745 prio = 6; 1746 } 1747 break; 1748 case 7: 1749 if (ctx->hv && !ctx->pr) { 1750 /* Set process priority to very high */ 1751 prio = 7; 1752 } 1753 break; 1754 #endif 1755 default: 1756 break; 1757 } 1758 if (prio) { 1759 TCGv t0 = tcg_temp_new(); 1760 gen_load_spr(t0, SPR_PPR); 1761 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1762 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1763 gen_store_spr(SPR_PPR, t0); 1764 tcg_temp_free(t0); 1765 } 1766 #if !defined(CONFIG_USER_ONLY) 1767 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1768 * CPU and the kernel hangs. This applies to all encodings other 1769 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1770 * and all currently undefined. 1771 */ 1772 gen_pause(ctx); 1773 #endif 1774 #endif 1775 } 1776 } 1777 /* orc & orc. */ 1778 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1779 1780 /* xor & xor. */ 1781 static void gen_xor(DisasContext *ctx) 1782 { 1783 /* Optimisation for "set to zero" case */ 1784 if (rS(ctx->opcode) != rB(ctx->opcode)) 1785 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1786 else 1787 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1788 if (unlikely(Rc(ctx->opcode) != 0)) 1789 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1790 } 1791 1792 /* ori */ 1793 static void gen_ori(DisasContext *ctx) 1794 { 1795 target_ulong uimm = UIMM(ctx->opcode); 1796 1797 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1798 return; 1799 } 1800 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1801 } 1802 1803 /* oris */ 1804 static void gen_oris(DisasContext *ctx) 1805 { 1806 target_ulong uimm = UIMM(ctx->opcode); 1807 1808 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1809 /* NOP */ 1810 return; 1811 } 1812 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1813 } 1814 1815 /* xori */ 1816 static void gen_xori(DisasContext *ctx) 1817 { 1818 target_ulong uimm = UIMM(ctx->opcode); 1819 1820 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1821 /* NOP */ 1822 return; 1823 } 1824 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1825 } 1826 1827 /* xoris */ 1828 static void gen_xoris(DisasContext *ctx) 1829 { 1830 target_ulong uimm = UIMM(ctx->opcode); 1831 1832 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1833 /* NOP */ 1834 return; 1835 } 1836 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1837 } 1838 1839 /* popcntb : PowerPC 2.03 specification */ 1840 static void gen_popcntb(DisasContext *ctx) 1841 { 1842 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1843 } 1844 1845 static void gen_popcntw(DisasContext *ctx) 1846 { 1847 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1848 } 1849 1850 #if defined(TARGET_PPC64) 1851 /* popcntd: PowerPC 2.06 specification */ 1852 static void gen_popcntd(DisasContext *ctx) 1853 { 1854 gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1855 } 1856 #endif 1857 1858 /* prtyw: PowerPC 2.05 specification */ 1859 static void gen_prtyw(DisasContext *ctx) 1860 { 1861 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1862 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1863 TCGv t0 = tcg_temp_new(); 1864 tcg_gen_shri_tl(t0, rs, 16); 1865 tcg_gen_xor_tl(ra, rs, t0); 1866 tcg_gen_shri_tl(t0, ra, 8); 1867 tcg_gen_xor_tl(ra, ra, t0); 1868 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1869 tcg_temp_free(t0); 1870 } 1871 1872 #if defined(TARGET_PPC64) 1873 /* prtyd: PowerPC 2.05 specification */ 1874 static void gen_prtyd(DisasContext *ctx) 1875 { 1876 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1877 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1878 TCGv t0 = tcg_temp_new(); 1879 tcg_gen_shri_tl(t0, rs, 32); 1880 tcg_gen_xor_tl(ra, rs, t0); 1881 tcg_gen_shri_tl(t0, ra, 16); 1882 tcg_gen_xor_tl(ra, ra, t0); 1883 tcg_gen_shri_tl(t0, ra, 8); 1884 tcg_gen_xor_tl(ra, ra, t0); 1885 tcg_gen_andi_tl(ra, ra, 1); 1886 tcg_temp_free(t0); 1887 } 1888 #endif 1889 1890 #if defined(TARGET_PPC64) 1891 /* bpermd */ 1892 static void gen_bpermd(DisasContext *ctx) 1893 { 1894 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1895 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1896 } 1897 #endif 1898 1899 #if defined(TARGET_PPC64) 1900 /* extsw & extsw. */ 1901 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1902 1903 /* cntlzd */ 1904 static void gen_cntlzd(DisasContext *ctx) 1905 { 1906 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1907 if (unlikely(Rc(ctx->opcode) != 0)) 1908 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1909 } 1910 1911 /* cnttzd */ 1912 static void gen_cnttzd(DisasContext *ctx) 1913 { 1914 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1915 if (unlikely(Rc(ctx->opcode) != 0)) { 1916 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1917 } 1918 } 1919 1920 /* darn */ 1921 static void gen_darn(DisasContext *ctx) 1922 { 1923 int l = L(ctx->opcode); 1924 1925 if (l == 0) { 1926 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1927 } else if (l <= 2) { 1928 /* Return 64-bit random for both CRN and RRN */ 1929 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1930 } else { 1931 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1932 } 1933 } 1934 #endif 1935 1936 /*** Integer rotate ***/ 1937 1938 /* rlwimi & rlwimi. */ 1939 static void gen_rlwimi(DisasContext *ctx) 1940 { 1941 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1942 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1943 uint32_t sh = SH(ctx->opcode); 1944 uint32_t mb = MB(ctx->opcode); 1945 uint32_t me = ME(ctx->opcode); 1946 1947 if (sh == (31-me) && mb <= me) { 1948 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1949 } else { 1950 target_ulong mask; 1951 TCGv t1; 1952 1953 #if defined(TARGET_PPC64) 1954 mb += 32; 1955 me += 32; 1956 #endif 1957 mask = MASK(mb, me); 1958 1959 t1 = tcg_temp_new(); 1960 if (mask <= 0xffffffffu) { 1961 TCGv_i32 t0 = tcg_temp_new_i32(); 1962 tcg_gen_trunc_tl_i32(t0, t_rs); 1963 tcg_gen_rotli_i32(t0, t0, sh); 1964 tcg_gen_extu_i32_tl(t1, t0); 1965 tcg_temp_free_i32(t0); 1966 } else { 1967 #if defined(TARGET_PPC64) 1968 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1969 tcg_gen_rotli_i64(t1, t1, sh); 1970 #else 1971 g_assert_not_reached(); 1972 #endif 1973 } 1974 1975 tcg_gen_andi_tl(t1, t1, mask); 1976 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1977 tcg_gen_or_tl(t_ra, t_ra, t1); 1978 tcg_temp_free(t1); 1979 } 1980 if (unlikely(Rc(ctx->opcode) != 0)) { 1981 gen_set_Rc0(ctx, t_ra); 1982 } 1983 } 1984 1985 /* rlwinm & rlwinm. */ 1986 static void gen_rlwinm(DisasContext *ctx) 1987 { 1988 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1989 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1990 int sh = SH(ctx->opcode); 1991 int mb = MB(ctx->opcode); 1992 int me = ME(ctx->opcode); 1993 int len = me - mb + 1; 1994 int rsh = (32 - sh) & 31; 1995 1996 if (sh != 0 && len > 0 && me == (31 - sh)) { 1997 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 1998 } else if (me == 31 && rsh + len <= 32) { 1999 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2000 } else { 2001 target_ulong mask; 2002 #if defined(TARGET_PPC64) 2003 mb += 32; 2004 me += 32; 2005 #endif 2006 mask = MASK(mb, me); 2007 if (sh == 0) { 2008 tcg_gen_andi_tl(t_ra, t_rs, mask); 2009 } else if (mask <= 0xffffffffu) { 2010 TCGv_i32 t0 = tcg_temp_new_i32(); 2011 tcg_gen_trunc_tl_i32(t0, t_rs); 2012 tcg_gen_rotli_i32(t0, t0, sh); 2013 tcg_gen_andi_i32(t0, t0, mask); 2014 tcg_gen_extu_i32_tl(t_ra, t0); 2015 tcg_temp_free_i32(t0); 2016 } else { 2017 #if defined(TARGET_PPC64) 2018 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2019 tcg_gen_rotli_i64(t_ra, t_ra, sh); 2020 tcg_gen_andi_i64(t_ra, t_ra, mask); 2021 #else 2022 g_assert_not_reached(); 2023 #endif 2024 } 2025 } 2026 if (unlikely(Rc(ctx->opcode) != 0)) { 2027 gen_set_Rc0(ctx, t_ra); 2028 } 2029 } 2030 2031 /* rlwnm & rlwnm. */ 2032 static void gen_rlwnm(DisasContext *ctx) 2033 { 2034 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2035 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2036 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2037 uint32_t mb = MB(ctx->opcode); 2038 uint32_t me = ME(ctx->opcode); 2039 target_ulong mask; 2040 2041 #if defined(TARGET_PPC64) 2042 mb += 32; 2043 me += 32; 2044 #endif 2045 mask = MASK(mb, me); 2046 2047 if (mask <= 0xffffffffu) { 2048 TCGv_i32 t0 = tcg_temp_new_i32(); 2049 TCGv_i32 t1 = tcg_temp_new_i32(); 2050 tcg_gen_trunc_tl_i32(t0, t_rb); 2051 tcg_gen_trunc_tl_i32(t1, t_rs); 2052 tcg_gen_andi_i32(t0, t0, 0x1f); 2053 tcg_gen_rotl_i32(t1, t1, t0); 2054 tcg_gen_extu_i32_tl(t_ra, t1); 2055 tcg_temp_free_i32(t0); 2056 tcg_temp_free_i32(t1); 2057 } else { 2058 #if defined(TARGET_PPC64) 2059 TCGv_i64 t0 = tcg_temp_new_i64(); 2060 tcg_gen_andi_i64(t0, t_rb, 0x1f); 2061 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2062 tcg_gen_rotl_i64(t_ra, t_ra, t0); 2063 tcg_temp_free_i64(t0); 2064 #else 2065 g_assert_not_reached(); 2066 #endif 2067 } 2068 2069 tcg_gen_andi_tl(t_ra, t_ra, mask); 2070 2071 if (unlikely(Rc(ctx->opcode) != 0)) { 2072 gen_set_Rc0(ctx, t_ra); 2073 } 2074 } 2075 2076 #if defined(TARGET_PPC64) 2077 #define GEN_PPC64_R2(name, opc1, opc2) \ 2078 static void glue(gen_, name##0)(DisasContext *ctx) \ 2079 { \ 2080 gen_##name(ctx, 0); \ 2081 } \ 2082 \ 2083 static void glue(gen_, name##1)(DisasContext *ctx) \ 2084 { \ 2085 gen_##name(ctx, 1); \ 2086 } 2087 #define GEN_PPC64_R4(name, opc1, opc2) \ 2088 static void glue(gen_, name##0)(DisasContext *ctx) \ 2089 { \ 2090 gen_##name(ctx, 0, 0); \ 2091 } \ 2092 \ 2093 static void glue(gen_, name##1)(DisasContext *ctx) \ 2094 { \ 2095 gen_##name(ctx, 0, 1); \ 2096 } \ 2097 \ 2098 static void glue(gen_, name##2)(DisasContext *ctx) \ 2099 { \ 2100 gen_##name(ctx, 1, 0); \ 2101 } \ 2102 \ 2103 static void glue(gen_, name##3)(DisasContext *ctx) \ 2104 { \ 2105 gen_##name(ctx, 1, 1); \ 2106 } 2107 2108 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2109 { 2110 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2111 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2112 int len = me - mb + 1; 2113 int rsh = (64 - sh) & 63; 2114 2115 if (sh != 0 && len > 0 && me == (63 - sh)) { 2116 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2117 } else if (me == 63 && rsh + len <= 64) { 2118 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2119 } else { 2120 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2121 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2122 } 2123 if (unlikely(Rc(ctx->opcode) != 0)) { 2124 gen_set_Rc0(ctx, t_ra); 2125 } 2126 } 2127 2128 /* rldicl - rldicl. */ 2129 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2130 { 2131 uint32_t sh, mb; 2132 2133 sh = SH(ctx->opcode) | (shn << 5); 2134 mb = MB(ctx->opcode) | (mbn << 5); 2135 gen_rldinm(ctx, mb, 63, sh); 2136 } 2137 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2138 2139 /* rldicr - rldicr. */ 2140 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2141 { 2142 uint32_t sh, me; 2143 2144 sh = SH(ctx->opcode) | (shn << 5); 2145 me = MB(ctx->opcode) | (men << 5); 2146 gen_rldinm(ctx, 0, me, sh); 2147 } 2148 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2149 2150 /* rldic - rldic. */ 2151 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2152 { 2153 uint32_t sh, mb; 2154 2155 sh = SH(ctx->opcode) | (shn << 5); 2156 mb = MB(ctx->opcode) | (mbn << 5); 2157 gen_rldinm(ctx, mb, 63 - sh, sh); 2158 } 2159 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2160 2161 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2162 { 2163 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2164 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2165 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2166 TCGv t0; 2167 2168 t0 = tcg_temp_new(); 2169 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2170 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2171 tcg_temp_free(t0); 2172 2173 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2174 if (unlikely(Rc(ctx->opcode) != 0)) { 2175 gen_set_Rc0(ctx, t_ra); 2176 } 2177 } 2178 2179 /* rldcl - rldcl. */ 2180 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2181 { 2182 uint32_t mb; 2183 2184 mb = MB(ctx->opcode) | (mbn << 5); 2185 gen_rldnm(ctx, mb, 63); 2186 } 2187 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2188 2189 /* rldcr - rldcr. */ 2190 static inline void gen_rldcr(DisasContext *ctx, int men) 2191 { 2192 uint32_t me; 2193 2194 me = MB(ctx->opcode) | (men << 5); 2195 gen_rldnm(ctx, 0, me); 2196 } 2197 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2198 2199 /* rldimi - rldimi. */ 2200 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2201 { 2202 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2203 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2204 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2205 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2206 uint32_t me = 63 - sh; 2207 2208 if (mb <= me) { 2209 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2210 } else { 2211 target_ulong mask = MASK(mb, me); 2212 TCGv t1 = tcg_temp_new(); 2213 2214 tcg_gen_rotli_tl(t1, t_rs, sh); 2215 tcg_gen_andi_tl(t1, t1, mask); 2216 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2217 tcg_gen_or_tl(t_ra, t_ra, t1); 2218 tcg_temp_free(t1); 2219 } 2220 if (unlikely(Rc(ctx->opcode) != 0)) { 2221 gen_set_Rc0(ctx, t_ra); 2222 } 2223 } 2224 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2225 #endif 2226 2227 /*** Integer shift ***/ 2228 2229 /* slw & slw. */ 2230 static void gen_slw(DisasContext *ctx) 2231 { 2232 TCGv t0, t1; 2233 2234 t0 = tcg_temp_new(); 2235 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2236 #if defined(TARGET_PPC64) 2237 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2238 tcg_gen_sari_tl(t0, t0, 0x3f); 2239 #else 2240 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2241 tcg_gen_sari_tl(t0, t0, 0x1f); 2242 #endif 2243 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2244 t1 = tcg_temp_new(); 2245 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2246 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2247 tcg_temp_free(t1); 2248 tcg_temp_free(t0); 2249 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2250 if (unlikely(Rc(ctx->opcode) != 0)) 2251 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2252 } 2253 2254 /* sraw & sraw. */ 2255 static void gen_sraw(DisasContext *ctx) 2256 { 2257 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2258 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2259 if (unlikely(Rc(ctx->opcode) != 0)) 2260 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2261 } 2262 2263 /* srawi & srawi. */ 2264 static void gen_srawi(DisasContext *ctx) 2265 { 2266 int sh = SH(ctx->opcode); 2267 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2268 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2269 if (sh == 0) { 2270 tcg_gen_ext32s_tl(dst, src); 2271 tcg_gen_movi_tl(cpu_ca, 0); 2272 } else { 2273 TCGv t0; 2274 tcg_gen_ext32s_tl(dst, src); 2275 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2276 t0 = tcg_temp_new(); 2277 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2278 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2279 tcg_temp_free(t0); 2280 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2281 tcg_gen_sari_tl(dst, dst, sh); 2282 } 2283 if (unlikely(Rc(ctx->opcode) != 0)) { 2284 gen_set_Rc0(ctx, dst); 2285 } 2286 } 2287 2288 /* srw & srw. */ 2289 static void gen_srw(DisasContext *ctx) 2290 { 2291 TCGv t0, t1; 2292 2293 t0 = tcg_temp_new(); 2294 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2295 #if defined(TARGET_PPC64) 2296 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2297 tcg_gen_sari_tl(t0, t0, 0x3f); 2298 #else 2299 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2300 tcg_gen_sari_tl(t0, t0, 0x1f); 2301 #endif 2302 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2303 tcg_gen_ext32u_tl(t0, t0); 2304 t1 = tcg_temp_new(); 2305 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2306 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2307 tcg_temp_free(t1); 2308 tcg_temp_free(t0); 2309 if (unlikely(Rc(ctx->opcode) != 0)) 2310 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2311 } 2312 2313 #if defined(TARGET_PPC64) 2314 /* sld & sld. */ 2315 static void gen_sld(DisasContext *ctx) 2316 { 2317 TCGv t0, t1; 2318 2319 t0 = tcg_temp_new(); 2320 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2321 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2322 tcg_gen_sari_tl(t0, t0, 0x3f); 2323 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2324 t1 = tcg_temp_new(); 2325 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2326 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2327 tcg_temp_free(t1); 2328 tcg_temp_free(t0); 2329 if (unlikely(Rc(ctx->opcode) != 0)) 2330 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2331 } 2332 2333 /* srad & srad. */ 2334 static void gen_srad(DisasContext *ctx) 2335 { 2336 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2337 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2338 if (unlikely(Rc(ctx->opcode) != 0)) 2339 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2340 } 2341 /* sradi & sradi. */ 2342 static inline void gen_sradi(DisasContext *ctx, int n) 2343 { 2344 int sh = SH(ctx->opcode) + (n << 5); 2345 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2346 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2347 if (sh == 0) { 2348 tcg_gen_mov_tl(dst, src); 2349 tcg_gen_movi_tl(cpu_ca, 0); 2350 } else { 2351 TCGv t0; 2352 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2353 t0 = tcg_temp_new(); 2354 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2355 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2356 tcg_temp_free(t0); 2357 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2358 tcg_gen_sari_tl(dst, src, sh); 2359 } 2360 if (unlikely(Rc(ctx->opcode) != 0)) { 2361 gen_set_Rc0(ctx, dst); 2362 } 2363 } 2364 2365 static void gen_sradi0(DisasContext *ctx) 2366 { 2367 gen_sradi(ctx, 0); 2368 } 2369 2370 static void gen_sradi1(DisasContext *ctx) 2371 { 2372 gen_sradi(ctx, 1); 2373 } 2374 2375 /* extswsli & extswsli. */ 2376 static inline void gen_extswsli(DisasContext *ctx, int n) 2377 { 2378 int sh = SH(ctx->opcode) + (n << 5); 2379 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2380 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2381 2382 tcg_gen_ext32s_tl(dst, src); 2383 tcg_gen_shli_tl(dst, dst, sh); 2384 if (unlikely(Rc(ctx->opcode) != 0)) { 2385 gen_set_Rc0(ctx, dst); 2386 } 2387 } 2388 2389 static void gen_extswsli0(DisasContext *ctx) 2390 { 2391 gen_extswsli(ctx, 0); 2392 } 2393 2394 static void gen_extswsli1(DisasContext *ctx) 2395 { 2396 gen_extswsli(ctx, 1); 2397 } 2398 2399 /* srd & srd. */ 2400 static void gen_srd(DisasContext *ctx) 2401 { 2402 TCGv t0, t1; 2403 2404 t0 = tcg_temp_new(); 2405 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2406 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2407 tcg_gen_sari_tl(t0, t0, 0x3f); 2408 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2409 t1 = tcg_temp_new(); 2410 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2411 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2412 tcg_temp_free(t1); 2413 tcg_temp_free(t0); 2414 if (unlikely(Rc(ctx->opcode) != 0)) 2415 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2416 } 2417 #endif 2418 2419 /*** Addressing modes ***/ 2420 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2421 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2422 target_long maskl) 2423 { 2424 target_long simm = SIMM(ctx->opcode); 2425 2426 simm &= ~maskl; 2427 if (rA(ctx->opcode) == 0) { 2428 if (NARROW_MODE(ctx)) { 2429 simm = (uint32_t)simm; 2430 } 2431 tcg_gen_movi_tl(EA, simm); 2432 } else if (likely(simm != 0)) { 2433 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2434 if (NARROW_MODE(ctx)) { 2435 tcg_gen_ext32u_tl(EA, EA); 2436 } 2437 } else { 2438 if (NARROW_MODE(ctx)) { 2439 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2440 } else { 2441 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2442 } 2443 } 2444 } 2445 2446 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2447 { 2448 if (rA(ctx->opcode) == 0) { 2449 if (NARROW_MODE(ctx)) { 2450 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2451 } else { 2452 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2453 } 2454 } else { 2455 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2456 if (NARROW_MODE(ctx)) { 2457 tcg_gen_ext32u_tl(EA, EA); 2458 } 2459 } 2460 } 2461 2462 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2463 { 2464 if (rA(ctx->opcode) == 0) { 2465 tcg_gen_movi_tl(EA, 0); 2466 } else if (NARROW_MODE(ctx)) { 2467 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2468 } else { 2469 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2470 } 2471 } 2472 2473 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2474 target_long val) 2475 { 2476 tcg_gen_addi_tl(ret, arg1, val); 2477 if (NARROW_MODE(ctx)) { 2478 tcg_gen_ext32u_tl(ret, ret); 2479 } 2480 } 2481 2482 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask) 2483 { 2484 TCGLabel *l1 = gen_new_label(); 2485 TCGv t0 = tcg_temp_new(); 2486 TCGv_i32 t1, t2; 2487 tcg_gen_andi_tl(t0, EA, mask); 2488 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 2489 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN); 2490 t2 = tcg_const_i32(ctx->opcode & 0x03FF0000); 2491 gen_update_nip(ctx, ctx->nip - 4); 2492 gen_helper_raise_exception_err(cpu_env, t1, t2); 2493 tcg_temp_free_i32(t1); 2494 tcg_temp_free_i32(t2); 2495 gen_set_label(l1); 2496 tcg_temp_free(t0); 2497 } 2498 2499 static inline void gen_align_no_le(DisasContext *ctx) 2500 { 2501 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2502 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2503 } 2504 2505 /*** Integer load ***/ 2506 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2507 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2508 2509 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2510 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2511 TCGv val, \ 2512 TCGv addr) \ 2513 { \ 2514 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2515 } 2516 2517 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2518 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2519 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2520 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2521 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2522 2523 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2524 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2525 2526 #define GEN_QEMU_LOAD_64(ldop, op) \ 2527 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2528 TCGv_i64 val, \ 2529 TCGv addr) \ 2530 { \ 2531 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2532 } 2533 2534 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2535 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2536 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2537 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2538 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2539 2540 #if defined(TARGET_PPC64) 2541 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2542 #endif 2543 2544 #define GEN_QEMU_STORE_TL(stop, op) \ 2545 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2546 TCGv val, \ 2547 TCGv addr) \ 2548 { \ 2549 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2550 } 2551 2552 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2553 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2554 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2555 2556 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2557 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2558 2559 #define GEN_QEMU_STORE_64(stop, op) \ 2560 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2561 TCGv_i64 val, \ 2562 TCGv addr) \ 2563 { \ 2564 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2565 } 2566 2567 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2568 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2569 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2570 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2571 2572 #if defined(TARGET_PPC64) 2573 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2574 #endif 2575 2576 #define GEN_LD(name, ldop, opc, type) \ 2577 static void glue(gen_, name)(DisasContext *ctx) \ 2578 { \ 2579 TCGv EA; \ 2580 gen_set_access_type(ctx, ACCESS_INT); \ 2581 EA = tcg_temp_new(); \ 2582 gen_addr_imm_index(ctx, EA, 0); \ 2583 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2584 tcg_temp_free(EA); \ 2585 } 2586 2587 #define GEN_LDU(name, ldop, opc, type) \ 2588 static void glue(gen_, name##u)(DisasContext *ctx) \ 2589 { \ 2590 TCGv EA; \ 2591 if (unlikely(rA(ctx->opcode) == 0 || \ 2592 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2593 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2594 return; \ 2595 } \ 2596 gen_set_access_type(ctx, ACCESS_INT); \ 2597 EA = tcg_temp_new(); \ 2598 if (type == PPC_64B) \ 2599 gen_addr_imm_index(ctx, EA, 0x03); \ 2600 else \ 2601 gen_addr_imm_index(ctx, EA, 0); \ 2602 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2603 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2604 tcg_temp_free(EA); \ 2605 } 2606 2607 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2608 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2609 { \ 2610 TCGv EA; \ 2611 if (unlikely(rA(ctx->opcode) == 0 || \ 2612 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2613 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2614 return; \ 2615 } \ 2616 gen_set_access_type(ctx, ACCESS_INT); \ 2617 EA = tcg_temp_new(); \ 2618 gen_addr_reg_index(ctx, EA); \ 2619 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2620 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2621 tcg_temp_free(EA); \ 2622 } 2623 2624 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2625 static void glue(gen_, name##x)(DisasContext *ctx) \ 2626 { \ 2627 TCGv EA; \ 2628 chk; \ 2629 gen_set_access_type(ctx, ACCESS_INT); \ 2630 EA = tcg_temp_new(); \ 2631 gen_addr_reg_index(ctx, EA); \ 2632 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2633 tcg_temp_free(EA); \ 2634 } 2635 2636 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2637 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2638 2639 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2640 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2641 2642 #define GEN_LDS(name, ldop, op, type) \ 2643 GEN_LD(name, ldop, op | 0x20, type); \ 2644 GEN_LDU(name, ldop, op | 0x21, type); \ 2645 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2646 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2647 2648 /* lbz lbzu lbzux lbzx */ 2649 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2650 /* lha lhau lhaux lhax */ 2651 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2652 /* lhz lhzu lhzux lhzx */ 2653 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2654 /* lwz lwzu lwzux lwzx */ 2655 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2656 #if defined(TARGET_PPC64) 2657 /* lwaux */ 2658 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2659 /* lwax */ 2660 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2661 /* ldux */ 2662 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2663 /* ldx */ 2664 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2665 2666 /* CI load/store variants */ 2667 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2668 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2669 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2670 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2671 2672 static void gen_ld(DisasContext *ctx) 2673 { 2674 TCGv EA; 2675 if (Rc(ctx->opcode)) { 2676 if (unlikely(rA(ctx->opcode) == 0 || 2677 rA(ctx->opcode) == rD(ctx->opcode))) { 2678 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2679 return; 2680 } 2681 } 2682 gen_set_access_type(ctx, ACCESS_INT); 2683 EA = tcg_temp_new(); 2684 gen_addr_imm_index(ctx, EA, 0x03); 2685 if (ctx->opcode & 0x02) { 2686 /* lwa (lwau is undefined) */ 2687 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2688 } else { 2689 /* ld - ldu */ 2690 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2691 } 2692 if (Rc(ctx->opcode)) 2693 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2694 tcg_temp_free(EA); 2695 } 2696 2697 /* lq */ 2698 static void gen_lq(DisasContext *ctx) 2699 { 2700 int ra, rd; 2701 TCGv EA; 2702 2703 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2704 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2705 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2706 2707 if (!legal_in_user_mode && ctx->pr) { 2708 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2709 return; 2710 } 2711 2712 if (!le_is_supported && ctx->le_mode) { 2713 gen_align_no_le(ctx); 2714 return; 2715 } 2716 ra = rA(ctx->opcode); 2717 rd = rD(ctx->opcode); 2718 if (unlikely((rd & 1) || rd == ra)) { 2719 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2720 return; 2721 } 2722 2723 gen_set_access_type(ctx, ACCESS_INT); 2724 EA = tcg_temp_new(); 2725 gen_addr_imm_index(ctx, EA, 0x0F); 2726 2727 /* We only need to swap high and low halves. gen_qemu_ld64_i64 does 2728 necessary 64-bit byteswap already. */ 2729 if (unlikely(ctx->le_mode)) { 2730 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA); 2731 gen_addr_add(ctx, EA, EA, 8); 2732 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA); 2733 } else { 2734 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA); 2735 gen_addr_add(ctx, EA, EA, 8); 2736 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA); 2737 } 2738 tcg_temp_free(EA); 2739 } 2740 #endif 2741 2742 /*** Integer store ***/ 2743 #define GEN_ST(name, stop, opc, type) \ 2744 static void glue(gen_, name)(DisasContext *ctx) \ 2745 { \ 2746 TCGv EA; \ 2747 gen_set_access_type(ctx, ACCESS_INT); \ 2748 EA = tcg_temp_new(); \ 2749 gen_addr_imm_index(ctx, EA, 0); \ 2750 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2751 tcg_temp_free(EA); \ 2752 } 2753 2754 #define GEN_STU(name, stop, opc, type) \ 2755 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2756 { \ 2757 TCGv EA; \ 2758 if (unlikely(rA(ctx->opcode) == 0)) { \ 2759 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2760 return; \ 2761 } \ 2762 gen_set_access_type(ctx, ACCESS_INT); \ 2763 EA = tcg_temp_new(); \ 2764 if (type == PPC_64B) \ 2765 gen_addr_imm_index(ctx, EA, 0x03); \ 2766 else \ 2767 gen_addr_imm_index(ctx, EA, 0); \ 2768 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2769 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2770 tcg_temp_free(EA); \ 2771 } 2772 2773 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2774 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2775 { \ 2776 TCGv EA; \ 2777 if (unlikely(rA(ctx->opcode) == 0)) { \ 2778 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2779 return; \ 2780 } \ 2781 gen_set_access_type(ctx, ACCESS_INT); \ 2782 EA = tcg_temp_new(); \ 2783 gen_addr_reg_index(ctx, EA); \ 2784 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2785 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2786 tcg_temp_free(EA); \ 2787 } 2788 2789 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2790 static void glue(gen_, name##x)(DisasContext *ctx) \ 2791 { \ 2792 TCGv EA; \ 2793 chk; \ 2794 gen_set_access_type(ctx, ACCESS_INT); \ 2795 EA = tcg_temp_new(); \ 2796 gen_addr_reg_index(ctx, EA); \ 2797 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2798 tcg_temp_free(EA); \ 2799 } 2800 #define GEN_STX(name, stop, opc2, opc3, type) \ 2801 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2802 2803 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2804 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2805 2806 #define GEN_STS(name, stop, op, type) \ 2807 GEN_ST(name, stop, op | 0x20, type); \ 2808 GEN_STU(name, stop, op | 0x21, type); \ 2809 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2810 GEN_STX(name, stop, 0x17, op | 0x00, type) 2811 2812 /* stb stbu stbux stbx */ 2813 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2814 /* sth sthu sthux sthx */ 2815 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2816 /* stw stwu stwux stwx */ 2817 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2818 #if defined(TARGET_PPC64) 2819 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2820 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2821 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2822 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2823 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2824 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2825 2826 static void gen_std(DisasContext *ctx) 2827 { 2828 int rs; 2829 TCGv EA; 2830 2831 rs = rS(ctx->opcode); 2832 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2833 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2834 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2835 2836 if (!(ctx->insns_flags & PPC_64BX)) { 2837 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2838 } 2839 2840 if (!legal_in_user_mode && ctx->pr) { 2841 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2842 return; 2843 } 2844 2845 if (!le_is_supported && ctx->le_mode) { 2846 gen_align_no_le(ctx); 2847 return; 2848 } 2849 2850 if (unlikely(rs & 1)) { 2851 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2852 return; 2853 } 2854 gen_set_access_type(ctx, ACCESS_INT); 2855 EA = tcg_temp_new(); 2856 gen_addr_imm_index(ctx, EA, 0x03); 2857 2858 /* We only need to swap high and low halves. gen_qemu_st64_i64 does 2859 necessary 64-bit byteswap already. */ 2860 if (unlikely(ctx->le_mode)) { 2861 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA); 2862 gen_addr_add(ctx, EA, EA, 8); 2863 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2864 } else { 2865 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2866 gen_addr_add(ctx, EA, EA, 8); 2867 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA); 2868 } 2869 tcg_temp_free(EA); 2870 } else { 2871 /* std / stdu*/ 2872 if (Rc(ctx->opcode)) { 2873 if (unlikely(rA(ctx->opcode) == 0)) { 2874 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2875 return; 2876 } 2877 } 2878 gen_set_access_type(ctx, ACCESS_INT); 2879 EA = tcg_temp_new(); 2880 gen_addr_imm_index(ctx, EA, 0x03); 2881 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2882 if (Rc(ctx->opcode)) 2883 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2884 tcg_temp_free(EA); 2885 } 2886 } 2887 #endif 2888 /*** Integer load and store with byte reverse ***/ 2889 2890 /* lhbrx */ 2891 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2892 2893 /* lwbrx */ 2894 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2895 2896 #if defined(TARGET_PPC64) 2897 /* ldbrx */ 2898 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2899 /* stdbrx */ 2900 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2901 #endif /* TARGET_PPC64 */ 2902 2903 /* sthbrx */ 2904 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2905 /* stwbrx */ 2906 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2907 2908 /*** Integer load and store multiple ***/ 2909 2910 /* lmw */ 2911 static void gen_lmw(DisasContext *ctx) 2912 { 2913 TCGv t0; 2914 TCGv_i32 t1; 2915 2916 if (ctx->le_mode) { 2917 gen_align_no_le(ctx); 2918 return; 2919 } 2920 gen_set_access_type(ctx, ACCESS_INT); 2921 t0 = tcg_temp_new(); 2922 t1 = tcg_const_i32(rD(ctx->opcode)); 2923 gen_addr_imm_index(ctx, t0, 0); 2924 gen_helper_lmw(cpu_env, t0, t1); 2925 tcg_temp_free(t0); 2926 tcg_temp_free_i32(t1); 2927 } 2928 2929 /* stmw */ 2930 static void gen_stmw(DisasContext *ctx) 2931 { 2932 TCGv t0; 2933 TCGv_i32 t1; 2934 2935 if (ctx->le_mode) { 2936 gen_align_no_le(ctx); 2937 return; 2938 } 2939 gen_set_access_type(ctx, ACCESS_INT); 2940 t0 = tcg_temp_new(); 2941 t1 = tcg_const_i32(rS(ctx->opcode)); 2942 gen_addr_imm_index(ctx, t0, 0); 2943 gen_helper_stmw(cpu_env, t0, t1); 2944 tcg_temp_free(t0); 2945 tcg_temp_free_i32(t1); 2946 } 2947 2948 /*** Integer load and store strings ***/ 2949 2950 /* lswi */ 2951 /* PowerPC32 specification says we must generate an exception if 2952 * rA is in the range of registers to be loaded. 2953 * In an other hand, IBM says this is valid, but rA won't be loaded. 2954 * For now, I'll follow the spec... 2955 */ 2956 static void gen_lswi(DisasContext *ctx) 2957 { 2958 TCGv t0; 2959 TCGv_i32 t1, t2; 2960 int nb = NB(ctx->opcode); 2961 int start = rD(ctx->opcode); 2962 int ra = rA(ctx->opcode); 2963 int nr; 2964 2965 if (ctx->le_mode) { 2966 gen_align_no_le(ctx); 2967 return; 2968 } 2969 if (nb == 0) 2970 nb = 32; 2971 nr = (nb + 3) / 4; 2972 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2973 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2974 return; 2975 } 2976 gen_set_access_type(ctx, ACCESS_INT); 2977 t0 = tcg_temp_new(); 2978 gen_addr_register(ctx, t0); 2979 t1 = tcg_const_i32(nb); 2980 t2 = tcg_const_i32(start); 2981 gen_helper_lsw(cpu_env, t0, t1, t2); 2982 tcg_temp_free(t0); 2983 tcg_temp_free_i32(t1); 2984 tcg_temp_free_i32(t2); 2985 } 2986 2987 /* lswx */ 2988 static void gen_lswx(DisasContext *ctx) 2989 { 2990 TCGv t0; 2991 TCGv_i32 t1, t2, t3; 2992 2993 if (ctx->le_mode) { 2994 gen_align_no_le(ctx); 2995 return; 2996 } 2997 gen_set_access_type(ctx, ACCESS_INT); 2998 t0 = tcg_temp_new(); 2999 gen_addr_reg_index(ctx, t0); 3000 t1 = tcg_const_i32(rD(ctx->opcode)); 3001 t2 = tcg_const_i32(rA(ctx->opcode)); 3002 t3 = tcg_const_i32(rB(ctx->opcode)); 3003 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 3004 tcg_temp_free(t0); 3005 tcg_temp_free_i32(t1); 3006 tcg_temp_free_i32(t2); 3007 tcg_temp_free_i32(t3); 3008 } 3009 3010 /* stswi */ 3011 static void gen_stswi(DisasContext *ctx) 3012 { 3013 TCGv t0; 3014 TCGv_i32 t1, t2; 3015 int nb = NB(ctx->opcode); 3016 3017 if (ctx->le_mode) { 3018 gen_align_no_le(ctx); 3019 return; 3020 } 3021 gen_set_access_type(ctx, ACCESS_INT); 3022 t0 = tcg_temp_new(); 3023 gen_addr_register(ctx, t0); 3024 if (nb == 0) 3025 nb = 32; 3026 t1 = tcg_const_i32(nb); 3027 t2 = tcg_const_i32(rS(ctx->opcode)); 3028 gen_helper_stsw(cpu_env, t0, t1, t2); 3029 tcg_temp_free(t0); 3030 tcg_temp_free_i32(t1); 3031 tcg_temp_free_i32(t2); 3032 } 3033 3034 /* stswx */ 3035 static void gen_stswx(DisasContext *ctx) 3036 { 3037 TCGv t0; 3038 TCGv_i32 t1, t2; 3039 3040 if (ctx->le_mode) { 3041 gen_align_no_le(ctx); 3042 return; 3043 } 3044 gen_set_access_type(ctx, ACCESS_INT); 3045 t0 = tcg_temp_new(); 3046 gen_addr_reg_index(ctx, t0); 3047 t1 = tcg_temp_new_i32(); 3048 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3049 tcg_gen_andi_i32(t1, t1, 0x7F); 3050 t2 = tcg_const_i32(rS(ctx->opcode)); 3051 gen_helper_stsw(cpu_env, t0, t1, t2); 3052 tcg_temp_free(t0); 3053 tcg_temp_free_i32(t1); 3054 tcg_temp_free_i32(t2); 3055 } 3056 3057 /*** Memory synchronisation ***/ 3058 /* eieio */ 3059 static void gen_eieio(DisasContext *ctx) 3060 { 3061 } 3062 3063 #if !defined(CONFIG_USER_ONLY) 3064 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3065 { 3066 TCGv_i32 t; 3067 TCGLabel *l; 3068 3069 if (!ctx->lazy_tlb_flush) { 3070 return; 3071 } 3072 l = gen_new_label(); 3073 t = tcg_temp_new_i32(); 3074 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3075 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3076 if (global) { 3077 gen_helper_check_tlb_flush_global(cpu_env); 3078 } else { 3079 gen_helper_check_tlb_flush_local(cpu_env); 3080 } 3081 gen_set_label(l); 3082 tcg_temp_free_i32(t); 3083 } 3084 #else 3085 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3086 #endif 3087 3088 /* isync */ 3089 static void gen_isync(DisasContext *ctx) 3090 { 3091 /* 3092 * We need to check for a pending TLB flush. This can only happen in 3093 * kernel mode however so check MSR_PR 3094 */ 3095 if (!ctx->pr) { 3096 gen_check_tlb_flush(ctx, false); 3097 } 3098 gen_stop_exception(ctx); 3099 } 3100 3101 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3102 3103 #define LARX(name, memop) \ 3104 static void gen_##name(DisasContext *ctx) \ 3105 { \ 3106 TCGv t0; \ 3107 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; \ 3108 int len = MEMOP_GET_SIZE(memop); \ 3109 gen_set_access_type(ctx, ACCESS_RES); \ 3110 t0 = tcg_temp_local_new(); \ 3111 gen_addr_reg_index(ctx, t0); \ 3112 if ((len) > 1) { \ 3113 gen_check_align(ctx, t0, (len)-1); \ 3114 } \ 3115 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop); \ 3116 tcg_gen_mov_tl(cpu_reserve, t0); \ 3117 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val)); \ 3118 tcg_temp_free(t0); \ 3119 } 3120 3121 /* lwarx */ 3122 LARX(lbarx, DEF_MEMOP(MO_UB)) 3123 LARX(lharx, DEF_MEMOP(MO_UW)) 3124 LARX(lwarx, DEF_MEMOP(MO_UL)) 3125 3126 #if defined(CONFIG_USER_ONLY) 3127 static void gen_conditional_store(DisasContext *ctx, TCGv EA, 3128 int reg, int memop) 3129 { 3130 TCGv t0 = tcg_temp_new(); 3131 3132 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUPPCState, reserve_ea)); 3133 tcg_gen_movi_tl(t0, (MEMOP_GET_SIZE(memop) << 5) | reg); 3134 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, reserve_info)); 3135 tcg_temp_free(t0); 3136 gen_exception_err(ctx, POWERPC_EXCP_STCX, 0); 3137 } 3138 #else 3139 static void gen_conditional_store(DisasContext *ctx, TCGv EA, 3140 int reg, int memop) 3141 { 3142 TCGLabel *l1; 3143 3144 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3145 l1 = gen_new_label(); 3146 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1); 3147 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); 3148 tcg_gen_qemu_st_tl(cpu_gpr[reg], EA, ctx->mem_idx, memop); 3149 gen_set_label(l1); 3150 tcg_gen_movi_tl(cpu_reserve, -1); 3151 } 3152 #endif 3153 3154 #define STCX(name, memop) \ 3155 static void gen_##name(DisasContext *ctx) \ 3156 { \ 3157 TCGv t0; \ 3158 int len = MEMOP_GET_SIZE(memop); \ 3159 gen_set_access_type(ctx, ACCESS_RES); \ 3160 t0 = tcg_temp_local_new(); \ 3161 gen_addr_reg_index(ctx, t0); \ 3162 if (len > 1) { \ 3163 gen_check_align(ctx, t0, (len) - 1); \ 3164 } \ 3165 gen_conditional_store(ctx, t0, rS(ctx->opcode), memop); \ 3166 tcg_temp_free(t0); \ 3167 } 3168 3169 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3170 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3171 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3172 3173 #if defined(TARGET_PPC64) 3174 /* ldarx */ 3175 LARX(ldarx, DEF_MEMOP(MO_Q)) 3176 /* stdcx. */ 3177 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3178 3179 /* lqarx */ 3180 static void gen_lqarx(DisasContext *ctx) 3181 { 3182 TCGv EA; 3183 int rd = rD(ctx->opcode); 3184 TCGv gpr1, gpr2; 3185 3186 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3187 (rd == rB(ctx->opcode)))) { 3188 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3189 return; 3190 } 3191 3192 gen_set_access_type(ctx, ACCESS_RES); 3193 EA = tcg_temp_local_new(); 3194 gen_addr_reg_index(ctx, EA); 3195 gen_check_align(ctx, EA, 15); 3196 if (unlikely(ctx->le_mode)) { 3197 gpr1 = cpu_gpr[rd+1]; 3198 gpr2 = cpu_gpr[rd]; 3199 } else { 3200 gpr1 = cpu_gpr[rd]; 3201 gpr2 = cpu_gpr[rd+1]; 3202 } 3203 tcg_gen_qemu_ld_i64(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3204 tcg_gen_mov_tl(cpu_reserve, EA); 3205 gen_addr_add(ctx, EA, EA, 8); 3206 tcg_gen_qemu_ld_i64(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3207 3208 tcg_gen_st_tl(gpr1, cpu_env, offsetof(CPUPPCState, reserve_val)); 3209 tcg_gen_st_tl(gpr2, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3210 tcg_temp_free(EA); 3211 } 3212 3213 /* stqcx. */ 3214 static void gen_stqcx_(DisasContext *ctx) 3215 { 3216 TCGv EA; 3217 int reg = rS(ctx->opcode); 3218 int len = 16; 3219 #if !defined(CONFIG_USER_ONLY) 3220 TCGLabel *l1; 3221 TCGv gpr1, gpr2; 3222 #endif 3223 3224 if (unlikely((rD(ctx->opcode) & 1))) { 3225 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3226 return; 3227 } 3228 gen_set_access_type(ctx, ACCESS_RES); 3229 EA = tcg_temp_local_new(); 3230 gen_addr_reg_index(ctx, EA); 3231 if (len > 1) { 3232 gen_check_align(ctx, EA, (len) - 1); 3233 } 3234 3235 #if defined(CONFIG_USER_ONLY) 3236 gen_conditional_store(ctx, EA, reg, 16); 3237 #else 3238 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3239 l1 = gen_new_label(); 3240 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1); 3241 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); 3242 3243 if (unlikely(ctx->le_mode)) { 3244 gpr1 = cpu_gpr[reg + 1]; 3245 gpr2 = cpu_gpr[reg]; 3246 } else { 3247 gpr1 = cpu_gpr[reg]; 3248 gpr2 = cpu_gpr[reg + 1]; 3249 } 3250 tcg_gen_qemu_st_tl(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3251 gen_addr_add(ctx, EA, EA, 8); 3252 tcg_gen_qemu_st_tl(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3253 3254 gen_set_label(l1); 3255 tcg_gen_movi_tl(cpu_reserve, -1); 3256 #endif 3257 tcg_temp_free(EA); 3258 } 3259 3260 #endif /* defined(TARGET_PPC64) */ 3261 3262 /* sync */ 3263 static void gen_sync(DisasContext *ctx) 3264 { 3265 uint32_t l = (ctx->opcode >> 21) & 3; 3266 3267 /* 3268 * We may need to check for a pending TLB flush. 3269 * 3270 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3271 * 3272 * Additionally, this can only happen in kernel mode however so 3273 * check MSR_PR as well. 3274 */ 3275 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3276 gen_check_tlb_flush(ctx, true); 3277 } 3278 } 3279 3280 /* wait */ 3281 static void gen_wait(DisasContext *ctx) 3282 { 3283 TCGv_i32 t0 = tcg_const_i32(1); 3284 tcg_gen_st_i32(t0, cpu_env, 3285 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3286 tcg_temp_free_i32(t0); 3287 /* Stop translation, as the CPU is supposed to sleep from now */ 3288 gen_exception_nip(ctx, EXCP_HLT, ctx->nip); 3289 } 3290 3291 #if defined(TARGET_PPC64) 3292 static void gen_doze(DisasContext *ctx) 3293 { 3294 #if defined(CONFIG_USER_ONLY) 3295 GEN_PRIV; 3296 #else 3297 TCGv_i32 t; 3298 3299 CHK_HV; 3300 t = tcg_const_i32(PPC_PM_DOZE); 3301 gen_helper_pminsn(cpu_env, t); 3302 tcg_temp_free_i32(t); 3303 gen_stop_exception(ctx); 3304 #endif /* defined(CONFIG_USER_ONLY) */ 3305 } 3306 3307 static void gen_nap(DisasContext *ctx) 3308 { 3309 #if defined(CONFIG_USER_ONLY) 3310 GEN_PRIV; 3311 #else 3312 TCGv_i32 t; 3313 3314 CHK_HV; 3315 t = tcg_const_i32(PPC_PM_NAP); 3316 gen_helper_pminsn(cpu_env, t); 3317 tcg_temp_free_i32(t); 3318 gen_stop_exception(ctx); 3319 #endif /* defined(CONFIG_USER_ONLY) */ 3320 } 3321 3322 static void gen_sleep(DisasContext *ctx) 3323 { 3324 #if defined(CONFIG_USER_ONLY) 3325 GEN_PRIV; 3326 #else 3327 TCGv_i32 t; 3328 3329 CHK_HV; 3330 t = tcg_const_i32(PPC_PM_SLEEP); 3331 gen_helper_pminsn(cpu_env, t); 3332 tcg_temp_free_i32(t); 3333 gen_stop_exception(ctx); 3334 #endif /* defined(CONFIG_USER_ONLY) */ 3335 } 3336 3337 static void gen_rvwinkle(DisasContext *ctx) 3338 { 3339 #if defined(CONFIG_USER_ONLY) 3340 GEN_PRIV; 3341 #else 3342 TCGv_i32 t; 3343 3344 CHK_HV; 3345 t = tcg_const_i32(PPC_PM_RVWINKLE); 3346 gen_helper_pminsn(cpu_env, t); 3347 tcg_temp_free_i32(t); 3348 gen_stop_exception(ctx); 3349 #endif /* defined(CONFIG_USER_ONLY) */ 3350 } 3351 #endif /* #if defined(TARGET_PPC64) */ 3352 3353 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3354 { 3355 #if defined(TARGET_PPC64) 3356 if (ctx->has_cfar) 3357 tcg_gen_movi_tl(cpu_cfar, nip); 3358 #endif 3359 } 3360 3361 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3362 { 3363 if (unlikely(ctx->singlestep_enabled)) { 3364 return false; 3365 } 3366 3367 #ifndef CONFIG_USER_ONLY 3368 return (ctx->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3369 #else 3370 return true; 3371 #endif 3372 } 3373 3374 /*** Branch ***/ 3375 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3376 { 3377 if (NARROW_MODE(ctx)) { 3378 dest = (uint32_t) dest; 3379 } 3380 if (use_goto_tb(ctx, dest)) { 3381 tcg_gen_goto_tb(n); 3382 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3383 tcg_gen_exit_tb((uintptr_t)ctx->tb + n); 3384 } else { 3385 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3386 if (unlikely(ctx->singlestep_enabled)) { 3387 if ((ctx->singlestep_enabled & 3388 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) && 3389 (ctx->exception == POWERPC_EXCP_BRANCH || 3390 ctx->exception == POWERPC_EXCP_TRACE)) { 3391 gen_exception_nip(ctx, POWERPC_EXCP_TRACE, dest); 3392 } 3393 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) { 3394 gen_debug_exception(ctx); 3395 } 3396 } 3397 tcg_gen_exit_tb(0); 3398 } 3399 } 3400 3401 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3402 { 3403 if (NARROW_MODE(ctx)) { 3404 nip = (uint32_t)nip; 3405 } 3406 tcg_gen_movi_tl(cpu_lr, nip); 3407 } 3408 3409 /* b ba bl bla */ 3410 static void gen_b(DisasContext *ctx) 3411 { 3412 target_ulong li, target; 3413 3414 ctx->exception = POWERPC_EXCP_BRANCH; 3415 /* sign extend LI */ 3416 li = LI(ctx->opcode); 3417 li = (li ^ 0x02000000) - 0x02000000; 3418 if (likely(AA(ctx->opcode) == 0)) { 3419 target = ctx->nip + li - 4; 3420 } else { 3421 target = li; 3422 } 3423 if (LK(ctx->opcode)) { 3424 gen_setlr(ctx, ctx->nip); 3425 } 3426 gen_update_cfar(ctx, ctx->nip - 4); 3427 gen_goto_tb(ctx, 0, target); 3428 } 3429 3430 #define BCOND_IM 0 3431 #define BCOND_LR 1 3432 #define BCOND_CTR 2 3433 #define BCOND_TAR 3 3434 3435 static inline void gen_bcond(DisasContext *ctx, int type) 3436 { 3437 uint32_t bo = BO(ctx->opcode); 3438 TCGLabel *l1; 3439 TCGv target; 3440 3441 ctx->exception = POWERPC_EXCP_BRANCH; 3442 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3443 target = tcg_temp_local_new(); 3444 if (type == BCOND_CTR) 3445 tcg_gen_mov_tl(target, cpu_ctr); 3446 else if (type == BCOND_TAR) 3447 gen_load_spr(target, SPR_TAR); 3448 else 3449 tcg_gen_mov_tl(target, cpu_lr); 3450 } else { 3451 TCGV_UNUSED(target); 3452 } 3453 if (LK(ctx->opcode)) 3454 gen_setlr(ctx, ctx->nip); 3455 l1 = gen_new_label(); 3456 if ((bo & 0x4) == 0) { 3457 /* Decrement and test CTR */ 3458 TCGv temp = tcg_temp_new(); 3459 if (unlikely(type == BCOND_CTR)) { 3460 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3461 return; 3462 } 3463 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3464 if (NARROW_MODE(ctx)) { 3465 tcg_gen_ext32u_tl(temp, cpu_ctr); 3466 } else { 3467 tcg_gen_mov_tl(temp, cpu_ctr); 3468 } 3469 if (bo & 0x2) { 3470 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3471 } else { 3472 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3473 } 3474 tcg_temp_free(temp); 3475 } 3476 if ((bo & 0x10) == 0) { 3477 /* Test CR */ 3478 uint32_t bi = BI(ctx->opcode); 3479 uint32_t mask = 0x08 >> (bi & 0x03); 3480 TCGv_i32 temp = tcg_temp_new_i32(); 3481 3482 if (bo & 0x8) { 3483 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3484 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3485 } else { 3486 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3487 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3488 } 3489 tcg_temp_free_i32(temp); 3490 } 3491 gen_update_cfar(ctx, ctx->nip - 4); 3492 if (type == BCOND_IM) { 3493 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3494 if (likely(AA(ctx->opcode) == 0)) { 3495 gen_goto_tb(ctx, 0, ctx->nip + li - 4); 3496 } else { 3497 gen_goto_tb(ctx, 0, li); 3498 } 3499 if ((bo & 0x14) != 0x14) { 3500 gen_set_label(l1); 3501 gen_goto_tb(ctx, 1, ctx->nip); 3502 } 3503 } else { 3504 if (NARROW_MODE(ctx)) { 3505 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3506 } else { 3507 tcg_gen_andi_tl(cpu_nip, target, ~3); 3508 } 3509 tcg_gen_exit_tb(0); 3510 if ((bo & 0x14) != 0x14) { 3511 gen_set_label(l1); 3512 gen_update_nip(ctx, ctx->nip); 3513 tcg_gen_exit_tb(0); 3514 } 3515 } 3516 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3517 tcg_temp_free(target); 3518 } 3519 } 3520 3521 static void gen_bc(DisasContext *ctx) 3522 { 3523 gen_bcond(ctx, BCOND_IM); 3524 } 3525 3526 static void gen_bcctr(DisasContext *ctx) 3527 { 3528 gen_bcond(ctx, BCOND_CTR); 3529 } 3530 3531 static void gen_bclr(DisasContext *ctx) 3532 { 3533 gen_bcond(ctx, BCOND_LR); 3534 } 3535 3536 static void gen_bctar(DisasContext *ctx) 3537 { 3538 gen_bcond(ctx, BCOND_TAR); 3539 } 3540 3541 /*** Condition register logical ***/ 3542 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3543 static void glue(gen_, name)(DisasContext *ctx) \ 3544 { \ 3545 uint8_t bitmask; \ 3546 int sh; \ 3547 TCGv_i32 t0, t1; \ 3548 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3549 t0 = tcg_temp_new_i32(); \ 3550 if (sh > 0) \ 3551 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3552 else if (sh < 0) \ 3553 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3554 else \ 3555 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3556 t1 = tcg_temp_new_i32(); \ 3557 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3558 if (sh > 0) \ 3559 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3560 else if (sh < 0) \ 3561 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3562 else \ 3563 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3564 tcg_op(t0, t0, t1); \ 3565 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3566 tcg_gen_andi_i32(t0, t0, bitmask); \ 3567 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3568 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3569 tcg_temp_free_i32(t0); \ 3570 tcg_temp_free_i32(t1); \ 3571 } 3572 3573 /* crand */ 3574 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3575 /* crandc */ 3576 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3577 /* creqv */ 3578 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3579 /* crnand */ 3580 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3581 /* crnor */ 3582 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3583 /* cror */ 3584 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3585 /* crorc */ 3586 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3587 /* crxor */ 3588 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3589 3590 /* mcrf */ 3591 static void gen_mcrf(DisasContext *ctx) 3592 { 3593 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3594 } 3595 3596 /*** System linkage ***/ 3597 3598 /* rfi (supervisor only) */ 3599 static void gen_rfi(DisasContext *ctx) 3600 { 3601 #if defined(CONFIG_USER_ONLY) 3602 GEN_PRIV; 3603 #else 3604 /* This instruction doesn't exist anymore on 64-bit server 3605 * processors compliant with arch 2.x 3606 */ 3607 if (ctx->insns_flags & PPC_SEGMENT_64B) { 3608 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3609 return; 3610 } 3611 /* Restore CPU state */ 3612 CHK_SV; 3613 gen_update_cfar(ctx, ctx->nip - 4); 3614 gen_helper_rfi(cpu_env); 3615 gen_sync_exception(ctx); 3616 #endif 3617 } 3618 3619 #if defined(TARGET_PPC64) 3620 static void gen_rfid(DisasContext *ctx) 3621 { 3622 #if defined(CONFIG_USER_ONLY) 3623 GEN_PRIV; 3624 #else 3625 /* Restore CPU state */ 3626 CHK_SV; 3627 gen_update_cfar(ctx, ctx->nip - 4); 3628 gen_helper_rfid(cpu_env); 3629 gen_sync_exception(ctx); 3630 #endif 3631 } 3632 3633 static void gen_hrfid(DisasContext *ctx) 3634 { 3635 #if defined(CONFIG_USER_ONLY) 3636 GEN_PRIV; 3637 #else 3638 /* Restore CPU state */ 3639 CHK_HV; 3640 gen_helper_hrfid(cpu_env); 3641 gen_sync_exception(ctx); 3642 #endif 3643 } 3644 #endif 3645 3646 /* sc */ 3647 #if defined(CONFIG_USER_ONLY) 3648 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3649 #else 3650 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3651 #endif 3652 static void gen_sc(DisasContext *ctx) 3653 { 3654 uint32_t lev; 3655 3656 lev = (ctx->opcode >> 5) & 0x7F; 3657 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3658 } 3659 3660 /*** Trap ***/ 3661 3662 /* Check for unconditional traps (always or never) */ 3663 static bool check_unconditional_trap(DisasContext *ctx) 3664 { 3665 /* Trap never */ 3666 if (TO(ctx->opcode) == 0) { 3667 return true; 3668 } 3669 /* Trap always */ 3670 if (TO(ctx->opcode) == 31) { 3671 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 3672 return true; 3673 } 3674 return false; 3675 } 3676 3677 /* tw */ 3678 static void gen_tw(DisasContext *ctx) 3679 { 3680 TCGv_i32 t0; 3681 3682 if (check_unconditional_trap(ctx)) { 3683 return; 3684 } 3685 t0 = tcg_const_i32(TO(ctx->opcode)); 3686 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3687 t0); 3688 tcg_temp_free_i32(t0); 3689 } 3690 3691 /* twi */ 3692 static void gen_twi(DisasContext *ctx) 3693 { 3694 TCGv t0; 3695 TCGv_i32 t1; 3696 3697 if (check_unconditional_trap(ctx)) { 3698 return; 3699 } 3700 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3701 t1 = tcg_const_i32(TO(ctx->opcode)); 3702 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3703 tcg_temp_free(t0); 3704 tcg_temp_free_i32(t1); 3705 } 3706 3707 #if defined(TARGET_PPC64) 3708 /* td */ 3709 static void gen_td(DisasContext *ctx) 3710 { 3711 TCGv_i32 t0; 3712 3713 if (check_unconditional_trap(ctx)) { 3714 return; 3715 } 3716 t0 = tcg_const_i32(TO(ctx->opcode)); 3717 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3718 t0); 3719 tcg_temp_free_i32(t0); 3720 } 3721 3722 /* tdi */ 3723 static void gen_tdi(DisasContext *ctx) 3724 { 3725 TCGv t0; 3726 TCGv_i32 t1; 3727 3728 if (check_unconditional_trap(ctx)) { 3729 return; 3730 } 3731 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3732 t1 = tcg_const_i32(TO(ctx->opcode)); 3733 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3734 tcg_temp_free(t0); 3735 tcg_temp_free_i32(t1); 3736 } 3737 #endif 3738 3739 /*** Processor control ***/ 3740 3741 static void gen_read_xer(TCGv dst) 3742 { 3743 TCGv t0 = tcg_temp_new(); 3744 TCGv t1 = tcg_temp_new(); 3745 TCGv t2 = tcg_temp_new(); 3746 tcg_gen_mov_tl(dst, cpu_xer); 3747 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 3748 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 3749 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 3750 tcg_gen_or_tl(t0, t0, t1); 3751 tcg_gen_or_tl(dst, dst, t2); 3752 tcg_gen_or_tl(dst, dst, t0); 3753 tcg_temp_free(t0); 3754 tcg_temp_free(t1); 3755 tcg_temp_free(t2); 3756 } 3757 3758 static void gen_write_xer(TCGv src) 3759 { 3760 tcg_gen_andi_tl(cpu_xer, src, 3761 ~((1u << XER_SO) | (1u << XER_OV) | (1u << XER_CA))); 3762 tcg_gen_shri_tl(cpu_so, src, XER_SO); 3763 tcg_gen_shri_tl(cpu_ov, src, XER_OV); 3764 tcg_gen_shri_tl(cpu_ca, src, XER_CA); 3765 tcg_gen_andi_tl(cpu_so, cpu_so, 1); 3766 tcg_gen_andi_tl(cpu_ov, cpu_ov, 1); 3767 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1); 3768 } 3769 3770 /* mcrxr */ 3771 static void gen_mcrxr(DisasContext *ctx) 3772 { 3773 TCGv_i32 t0 = tcg_temp_new_i32(); 3774 TCGv_i32 t1 = tcg_temp_new_i32(); 3775 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 3776 3777 tcg_gen_trunc_tl_i32(t0, cpu_so); 3778 tcg_gen_trunc_tl_i32(t1, cpu_ov); 3779 tcg_gen_trunc_tl_i32(dst, cpu_ca); 3780 tcg_gen_shli_i32(t0, t0, 3); 3781 tcg_gen_shli_i32(t1, t1, 2); 3782 tcg_gen_shli_i32(dst, dst, 1); 3783 tcg_gen_or_i32(dst, dst, t0); 3784 tcg_gen_or_i32(dst, dst, t1); 3785 tcg_temp_free_i32(t0); 3786 tcg_temp_free_i32(t1); 3787 3788 tcg_gen_movi_tl(cpu_so, 0); 3789 tcg_gen_movi_tl(cpu_ov, 0); 3790 tcg_gen_movi_tl(cpu_ca, 0); 3791 } 3792 3793 /* mfcr mfocrf */ 3794 static void gen_mfcr(DisasContext *ctx) 3795 { 3796 uint32_t crm, crn; 3797 3798 if (likely(ctx->opcode & 0x00100000)) { 3799 crm = CRM(ctx->opcode); 3800 if (likely(crm && ((crm & (crm - 1)) == 0))) { 3801 crn = ctz32 (crm); 3802 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 3803 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 3804 cpu_gpr[rD(ctx->opcode)], crn * 4); 3805 } 3806 } else { 3807 TCGv_i32 t0 = tcg_temp_new_i32(); 3808 tcg_gen_mov_i32(t0, cpu_crf[0]); 3809 tcg_gen_shli_i32(t0, t0, 4); 3810 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 3811 tcg_gen_shli_i32(t0, t0, 4); 3812 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 3813 tcg_gen_shli_i32(t0, t0, 4); 3814 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 3815 tcg_gen_shli_i32(t0, t0, 4); 3816 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 3817 tcg_gen_shli_i32(t0, t0, 4); 3818 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 3819 tcg_gen_shli_i32(t0, t0, 4); 3820 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 3821 tcg_gen_shli_i32(t0, t0, 4); 3822 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 3823 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 3824 tcg_temp_free_i32(t0); 3825 } 3826 } 3827 3828 /* mfmsr */ 3829 static void gen_mfmsr(DisasContext *ctx) 3830 { 3831 CHK_SV; 3832 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 3833 } 3834 3835 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 3836 { 3837 #if 0 3838 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 3839 printf("ERROR: try to access SPR %d !\n", sprn); 3840 #endif 3841 } 3842 #define SPR_NOACCESS (&spr_noaccess) 3843 3844 /* mfspr */ 3845 static inline void gen_op_mfspr(DisasContext *ctx) 3846 { 3847 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 3848 uint32_t sprn = SPR(ctx->opcode); 3849 3850 #if defined(CONFIG_USER_ONLY) 3851 read_cb = ctx->spr_cb[sprn].uea_read; 3852 #else 3853 if (ctx->pr) { 3854 read_cb = ctx->spr_cb[sprn].uea_read; 3855 } else if (ctx->hv) { 3856 read_cb = ctx->spr_cb[sprn].hea_read; 3857 } else { 3858 read_cb = ctx->spr_cb[sprn].oea_read; 3859 } 3860 #endif 3861 if (likely(read_cb != NULL)) { 3862 if (likely(read_cb != SPR_NOACCESS)) { 3863 (*read_cb)(ctx, rD(ctx->opcode), sprn); 3864 } else { 3865 /* Privilege exception */ 3866 /* This is a hack to avoid warnings when running Linux: 3867 * this OS breaks the PowerPC virtualisation model, 3868 * allowing userland application to read the PVR 3869 */ 3870 if (sprn != SPR_PVR) { 3871 fprintf(stderr, "Trying to read privileged spr %d (0x%03x) at " 3872 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3873 if (qemu_log_separate()) { 3874 qemu_log("Trying to read privileged spr %d (0x%03x) at " 3875 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3876 } 3877 } 3878 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 3879 } 3880 } else { 3881 /* ISA 2.07 defines these as no-ops */ 3882 if ((ctx->insns_flags2 & PPC2_ISA207S) && 3883 (sprn >= 808 && sprn <= 811)) { 3884 /* This is a nop */ 3885 return; 3886 } 3887 /* Not defined */ 3888 fprintf(stderr, "Trying to read invalid spr %d (0x%03x) at " 3889 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3890 if (qemu_log_separate()) { 3891 qemu_log("Trying to read invalid spr %d (0x%03x) at " 3892 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3893 } 3894 3895 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 3896 * it can generate a priv, a hv emu or a no-op 3897 */ 3898 if (sprn & 0x10) { 3899 if (ctx->pr) { 3900 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 3901 } 3902 } else { 3903 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 3904 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 3905 } 3906 } 3907 } 3908 } 3909 3910 static void gen_mfspr(DisasContext *ctx) 3911 { 3912 gen_op_mfspr(ctx); 3913 } 3914 3915 /* mftb */ 3916 static void gen_mftb(DisasContext *ctx) 3917 { 3918 gen_op_mfspr(ctx); 3919 } 3920 3921 /* mtcrf mtocrf*/ 3922 static void gen_mtcrf(DisasContext *ctx) 3923 { 3924 uint32_t crm, crn; 3925 3926 crm = CRM(ctx->opcode); 3927 if (likely((ctx->opcode & 0x00100000))) { 3928 if (crm && ((crm & (crm - 1)) == 0)) { 3929 TCGv_i32 temp = tcg_temp_new_i32(); 3930 crn = ctz32 (crm); 3931 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 3932 tcg_gen_shri_i32(temp, temp, crn * 4); 3933 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 3934 tcg_temp_free_i32(temp); 3935 } 3936 } else { 3937 TCGv_i32 temp = tcg_temp_new_i32(); 3938 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 3939 for (crn = 0 ; crn < 8 ; crn++) { 3940 if (crm & (1 << crn)) { 3941 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 3942 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 3943 } 3944 } 3945 tcg_temp_free_i32(temp); 3946 } 3947 } 3948 3949 /* mtmsr */ 3950 #if defined(TARGET_PPC64) 3951 static void gen_mtmsrd(DisasContext *ctx) 3952 { 3953 CHK_SV; 3954 3955 #if !defined(CONFIG_USER_ONLY) 3956 if (ctx->opcode & 0x00010000) { 3957 /* Special form that does not need any synchronisation */ 3958 TCGv t0 = tcg_temp_new(); 3959 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 3960 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 3961 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 3962 tcg_temp_free(t0); 3963 } else { 3964 /* XXX: we need to update nip before the store 3965 * if we enter power saving mode, we will exit the loop 3966 * directly from ppc_store_msr 3967 */ 3968 gen_update_nip(ctx, ctx->nip); 3969 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 3970 /* Must stop the translation as machine state (may have) changed */ 3971 /* Note that mtmsr is not always defined as context-synchronizing */ 3972 gen_stop_exception(ctx); 3973 } 3974 #endif /* !defined(CONFIG_USER_ONLY) */ 3975 } 3976 #endif /* defined(TARGET_PPC64) */ 3977 3978 static void gen_mtmsr(DisasContext *ctx) 3979 { 3980 CHK_SV; 3981 3982 #if !defined(CONFIG_USER_ONLY) 3983 if (ctx->opcode & 0x00010000) { 3984 /* Special form that does not need any synchronisation */ 3985 TCGv t0 = tcg_temp_new(); 3986 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 3987 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 3988 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 3989 tcg_temp_free(t0); 3990 } else { 3991 TCGv msr = tcg_temp_new(); 3992 3993 /* XXX: we need to update nip before the store 3994 * if we enter power saving mode, we will exit the loop 3995 * directly from ppc_store_msr 3996 */ 3997 gen_update_nip(ctx, ctx->nip); 3998 #if defined(TARGET_PPC64) 3999 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4000 #else 4001 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4002 #endif 4003 gen_helper_store_msr(cpu_env, msr); 4004 tcg_temp_free(msr); 4005 /* Must stop the translation as machine state (may have) changed */ 4006 /* Note that mtmsr is not always defined as context-synchronizing */ 4007 gen_stop_exception(ctx); 4008 } 4009 #endif 4010 } 4011 4012 /* mtspr */ 4013 static void gen_mtspr(DisasContext *ctx) 4014 { 4015 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4016 uint32_t sprn = SPR(ctx->opcode); 4017 4018 #if defined(CONFIG_USER_ONLY) 4019 write_cb = ctx->spr_cb[sprn].uea_write; 4020 #else 4021 if (ctx->pr) { 4022 write_cb = ctx->spr_cb[sprn].uea_write; 4023 } else if (ctx->hv) { 4024 write_cb = ctx->spr_cb[sprn].hea_write; 4025 } else { 4026 write_cb = ctx->spr_cb[sprn].oea_write; 4027 } 4028 #endif 4029 if (likely(write_cb != NULL)) { 4030 if (likely(write_cb != SPR_NOACCESS)) { 4031 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4032 } else { 4033 /* Privilege exception */ 4034 fprintf(stderr, "Trying to write privileged spr %d (0x%03x) at " 4035 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4036 if (qemu_log_separate()) { 4037 qemu_log("Trying to write privileged spr %d (0x%03x) at " 4038 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4039 } 4040 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4041 } 4042 } else { 4043 /* ISA 2.07 defines these as no-ops */ 4044 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4045 (sprn >= 808 && sprn <= 811)) { 4046 /* This is a nop */ 4047 return; 4048 } 4049 4050 /* Not defined */ 4051 if (qemu_log_separate()) { 4052 qemu_log("Trying to write invalid spr %d (0x%03x) at " 4053 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4054 } 4055 fprintf(stderr, "Trying to write invalid spr %d (0x%03x) at " 4056 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4057 4058 4059 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4060 * it can generate a priv, a hv emu or a no-op 4061 */ 4062 if (sprn & 0x10) { 4063 if (ctx->pr) { 4064 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4065 } 4066 } else { 4067 if (ctx->pr || sprn == 0) { 4068 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4069 } 4070 } 4071 } 4072 } 4073 4074 #if defined(TARGET_PPC64) 4075 /* setb */ 4076 static void gen_setb(DisasContext *ctx) 4077 { 4078 TCGv_i32 t0 = tcg_temp_new_i32(); 4079 TCGv_i32 t8 = tcg_temp_new_i32(); 4080 TCGv_i32 tm1 = tcg_temp_new_i32(); 4081 int crf = crfS(ctx->opcode); 4082 4083 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4084 tcg_gen_movi_i32(t8, 8); 4085 tcg_gen_movi_i32(tm1, -1); 4086 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4087 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4088 4089 tcg_temp_free_i32(t0); 4090 tcg_temp_free_i32(t8); 4091 tcg_temp_free_i32(tm1); 4092 } 4093 #endif 4094 4095 /*** Cache management ***/ 4096 4097 /* dcbf */ 4098 static void gen_dcbf(DisasContext *ctx) 4099 { 4100 /* XXX: specification says this is treated as a load by the MMU */ 4101 TCGv t0; 4102 gen_set_access_type(ctx, ACCESS_CACHE); 4103 t0 = tcg_temp_new(); 4104 gen_addr_reg_index(ctx, t0); 4105 gen_qemu_ld8u(ctx, t0, t0); 4106 tcg_temp_free(t0); 4107 } 4108 4109 /* dcbi (Supervisor only) */ 4110 static void gen_dcbi(DisasContext *ctx) 4111 { 4112 #if defined(CONFIG_USER_ONLY) 4113 GEN_PRIV; 4114 #else 4115 TCGv EA, val; 4116 4117 CHK_SV; 4118 EA = tcg_temp_new(); 4119 gen_set_access_type(ctx, ACCESS_CACHE); 4120 gen_addr_reg_index(ctx, EA); 4121 val = tcg_temp_new(); 4122 /* XXX: specification says this should be treated as a store by the MMU */ 4123 gen_qemu_ld8u(ctx, val, EA); 4124 gen_qemu_st8(ctx, val, EA); 4125 tcg_temp_free(val); 4126 tcg_temp_free(EA); 4127 #endif /* defined(CONFIG_USER_ONLY) */ 4128 } 4129 4130 /* dcdst */ 4131 static void gen_dcbst(DisasContext *ctx) 4132 { 4133 /* XXX: specification say this is treated as a load by the MMU */ 4134 TCGv t0; 4135 gen_set_access_type(ctx, ACCESS_CACHE); 4136 t0 = tcg_temp_new(); 4137 gen_addr_reg_index(ctx, t0); 4138 gen_qemu_ld8u(ctx, t0, t0); 4139 tcg_temp_free(t0); 4140 } 4141 4142 /* dcbt */ 4143 static void gen_dcbt(DisasContext *ctx) 4144 { 4145 /* interpreted as no-op */ 4146 /* XXX: specification say this is treated as a load by the MMU 4147 * but does not generate any exception 4148 */ 4149 } 4150 4151 /* dcbtst */ 4152 static void gen_dcbtst(DisasContext *ctx) 4153 { 4154 /* interpreted as no-op */ 4155 /* XXX: specification say this is treated as a load by the MMU 4156 * but does not generate any exception 4157 */ 4158 } 4159 4160 /* dcbtls */ 4161 static void gen_dcbtls(DisasContext *ctx) 4162 { 4163 /* Always fails locking the cache */ 4164 TCGv t0 = tcg_temp_new(); 4165 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4166 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4167 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4168 tcg_temp_free(t0); 4169 } 4170 4171 /* dcbz */ 4172 static void gen_dcbz(DisasContext *ctx) 4173 { 4174 TCGv tcgv_addr; 4175 TCGv_i32 tcgv_op; 4176 4177 gen_set_access_type(ctx, ACCESS_CACHE); 4178 tcgv_addr = tcg_temp_new(); 4179 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4180 gen_addr_reg_index(ctx, tcgv_addr); 4181 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4182 tcg_temp_free(tcgv_addr); 4183 tcg_temp_free_i32(tcgv_op); 4184 } 4185 4186 /* dst / dstt */ 4187 static void gen_dst(DisasContext *ctx) 4188 { 4189 if (rA(ctx->opcode) == 0) { 4190 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4191 } else { 4192 /* interpreted as no-op */ 4193 } 4194 } 4195 4196 /* dstst /dststt */ 4197 static void gen_dstst(DisasContext *ctx) 4198 { 4199 if (rA(ctx->opcode) == 0) { 4200 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4201 } else { 4202 /* interpreted as no-op */ 4203 } 4204 4205 } 4206 4207 /* dss / dssall */ 4208 static void gen_dss(DisasContext *ctx) 4209 { 4210 /* interpreted as no-op */ 4211 } 4212 4213 /* icbi */ 4214 static void gen_icbi(DisasContext *ctx) 4215 { 4216 TCGv t0; 4217 gen_set_access_type(ctx, ACCESS_CACHE); 4218 t0 = tcg_temp_new(); 4219 gen_addr_reg_index(ctx, t0); 4220 gen_helper_icbi(cpu_env, t0); 4221 tcg_temp_free(t0); 4222 } 4223 4224 /* Optional: */ 4225 /* dcba */ 4226 static void gen_dcba(DisasContext *ctx) 4227 { 4228 /* interpreted as no-op */ 4229 /* XXX: specification say this is treated as a store by the MMU 4230 * but does not generate any exception 4231 */ 4232 } 4233 4234 /*** Segment register manipulation ***/ 4235 /* Supervisor only: */ 4236 4237 /* mfsr */ 4238 static void gen_mfsr(DisasContext *ctx) 4239 { 4240 #if defined(CONFIG_USER_ONLY) 4241 GEN_PRIV; 4242 #else 4243 TCGv t0; 4244 4245 CHK_SV; 4246 t0 = tcg_const_tl(SR(ctx->opcode)); 4247 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4248 tcg_temp_free(t0); 4249 #endif /* defined(CONFIG_USER_ONLY) */ 4250 } 4251 4252 /* mfsrin */ 4253 static void gen_mfsrin(DisasContext *ctx) 4254 { 4255 #if defined(CONFIG_USER_ONLY) 4256 GEN_PRIV; 4257 #else 4258 TCGv t0; 4259 4260 CHK_SV; 4261 t0 = tcg_temp_new(); 4262 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4263 tcg_gen_andi_tl(t0, t0, 0xF); 4264 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4265 tcg_temp_free(t0); 4266 #endif /* defined(CONFIG_USER_ONLY) */ 4267 } 4268 4269 /* mtsr */ 4270 static void gen_mtsr(DisasContext *ctx) 4271 { 4272 #if defined(CONFIG_USER_ONLY) 4273 GEN_PRIV; 4274 #else 4275 TCGv t0; 4276 4277 CHK_SV; 4278 t0 = tcg_const_tl(SR(ctx->opcode)); 4279 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4280 tcg_temp_free(t0); 4281 #endif /* defined(CONFIG_USER_ONLY) */ 4282 } 4283 4284 /* mtsrin */ 4285 static void gen_mtsrin(DisasContext *ctx) 4286 { 4287 #if defined(CONFIG_USER_ONLY) 4288 GEN_PRIV; 4289 #else 4290 TCGv t0; 4291 CHK_SV; 4292 4293 t0 = tcg_temp_new(); 4294 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4295 tcg_gen_andi_tl(t0, t0, 0xF); 4296 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4297 tcg_temp_free(t0); 4298 #endif /* defined(CONFIG_USER_ONLY) */ 4299 } 4300 4301 #if defined(TARGET_PPC64) 4302 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4303 4304 /* mfsr */ 4305 static void gen_mfsr_64b(DisasContext *ctx) 4306 { 4307 #if defined(CONFIG_USER_ONLY) 4308 GEN_PRIV; 4309 #else 4310 TCGv t0; 4311 4312 CHK_SV; 4313 t0 = tcg_const_tl(SR(ctx->opcode)); 4314 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4315 tcg_temp_free(t0); 4316 #endif /* defined(CONFIG_USER_ONLY) */ 4317 } 4318 4319 /* mfsrin */ 4320 static void gen_mfsrin_64b(DisasContext *ctx) 4321 { 4322 #if defined(CONFIG_USER_ONLY) 4323 GEN_PRIV; 4324 #else 4325 TCGv t0; 4326 4327 CHK_SV; 4328 t0 = tcg_temp_new(); 4329 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4330 tcg_gen_andi_tl(t0, t0, 0xF); 4331 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4332 tcg_temp_free(t0); 4333 #endif /* defined(CONFIG_USER_ONLY) */ 4334 } 4335 4336 /* mtsr */ 4337 static void gen_mtsr_64b(DisasContext *ctx) 4338 { 4339 #if defined(CONFIG_USER_ONLY) 4340 GEN_PRIV; 4341 #else 4342 TCGv t0; 4343 4344 CHK_SV; 4345 t0 = tcg_const_tl(SR(ctx->opcode)); 4346 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4347 tcg_temp_free(t0); 4348 #endif /* defined(CONFIG_USER_ONLY) */ 4349 } 4350 4351 /* mtsrin */ 4352 static void gen_mtsrin_64b(DisasContext *ctx) 4353 { 4354 #if defined(CONFIG_USER_ONLY) 4355 GEN_PRIV; 4356 #else 4357 TCGv t0; 4358 4359 CHK_SV; 4360 t0 = tcg_temp_new(); 4361 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4362 tcg_gen_andi_tl(t0, t0, 0xF); 4363 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4364 tcg_temp_free(t0); 4365 #endif /* defined(CONFIG_USER_ONLY) */ 4366 } 4367 4368 /* slbmte */ 4369 static void gen_slbmte(DisasContext *ctx) 4370 { 4371 #if defined(CONFIG_USER_ONLY) 4372 GEN_PRIV; 4373 #else 4374 CHK_SV; 4375 4376 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4377 cpu_gpr[rS(ctx->opcode)]); 4378 #endif /* defined(CONFIG_USER_ONLY) */ 4379 } 4380 4381 static void gen_slbmfee(DisasContext *ctx) 4382 { 4383 #if defined(CONFIG_USER_ONLY) 4384 GEN_PRIV; 4385 #else 4386 CHK_SV; 4387 4388 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4389 cpu_gpr[rB(ctx->opcode)]); 4390 #endif /* defined(CONFIG_USER_ONLY) */ 4391 } 4392 4393 static void gen_slbmfev(DisasContext *ctx) 4394 { 4395 #if defined(CONFIG_USER_ONLY) 4396 GEN_PRIV; 4397 #else 4398 CHK_SV; 4399 4400 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4401 cpu_gpr[rB(ctx->opcode)]); 4402 #endif /* defined(CONFIG_USER_ONLY) */ 4403 } 4404 4405 static void gen_slbfee_(DisasContext *ctx) 4406 { 4407 #if defined(CONFIG_USER_ONLY) 4408 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4409 #else 4410 TCGLabel *l1, *l2; 4411 4412 if (unlikely(ctx->pr)) { 4413 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4414 return; 4415 } 4416 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4417 cpu_gpr[rB(ctx->opcode)]); 4418 l1 = gen_new_label(); 4419 l2 = gen_new_label(); 4420 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4421 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4422 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); 4423 tcg_gen_br(l2); 4424 gen_set_label(l1); 4425 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4426 gen_set_label(l2); 4427 #endif 4428 } 4429 #endif /* defined(TARGET_PPC64) */ 4430 4431 /*** Lookaside buffer management ***/ 4432 /* Optional & supervisor only: */ 4433 4434 /* tlbia */ 4435 static void gen_tlbia(DisasContext *ctx) 4436 { 4437 #if defined(CONFIG_USER_ONLY) 4438 GEN_PRIV; 4439 #else 4440 CHK_HV; 4441 4442 gen_helper_tlbia(cpu_env); 4443 #endif /* defined(CONFIG_USER_ONLY) */ 4444 } 4445 4446 /* tlbiel */ 4447 static void gen_tlbiel(DisasContext *ctx) 4448 { 4449 #if defined(CONFIG_USER_ONLY) 4450 GEN_PRIV; 4451 #else 4452 CHK_SV; 4453 4454 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4455 #endif /* defined(CONFIG_USER_ONLY) */ 4456 } 4457 4458 /* tlbie */ 4459 static void gen_tlbie(DisasContext *ctx) 4460 { 4461 #if defined(CONFIG_USER_ONLY) 4462 GEN_PRIV; 4463 #else 4464 TCGv_i32 t1; 4465 CHK_HV; 4466 4467 if (NARROW_MODE(ctx)) { 4468 TCGv t0 = tcg_temp_new(); 4469 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4470 gen_helper_tlbie(cpu_env, t0); 4471 tcg_temp_free(t0); 4472 } else { 4473 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4474 } 4475 t1 = tcg_temp_new_i32(); 4476 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4477 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4478 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4479 tcg_temp_free_i32(t1); 4480 #endif /* defined(CONFIG_USER_ONLY) */ 4481 } 4482 4483 /* tlbsync */ 4484 static void gen_tlbsync(DisasContext *ctx) 4485 { 4486 #if defined(CONFIG_USER_ONLY) 4487 GEN_PRIV; 4488 #else 4489 CHK_HV; 4490 4491 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4492 if (ctx->insns_flags & PPC_BOOKE) { 4493 gen_check_tlb_flush(ctx, true); 4494 } 4495 #endif /* defined(CONFIG_USER_ONLY) */ 4496 } 4497 4498 #if defined(TARGET_PPC64) 4499 /* slbia */ 4500 static void gen_slbia(DisasContext *ctx) 4501 { 4502 #if defined(CONFIG_USER_ONLY) 4503 GEN_PRIV; 4504 #else 4505 CHK_SV; 4506 4507 gen_helper_slbia(cpu_env); 4508 #endif /* defined(CONFIG_USER_ONLY) */ 4509 } 4510 4511 /* slbie */ 4512 static void gen_slbie(DisasContext *ctx) 4513 { 4514 #if defined(CONFIG_USER_ONLY) 4515 GEN_PRIV; 4516 #else 4517 CHK_SV; 4518 4519 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4520 #endif /* defined(CONFIG_USER_ONLY) */ 4521 } 4522 #endif /* defined(TARGET_PPC64) */ 4523 4524 /*** External control ***/ 4525 /* Optional: */ 4526 4527 /* eciwx */ 4528 static void gen_eciwx(DisasContext *ctx) 4529 { 4530 TCGv t0; 4531 /* Should check EAR[E] ! */ 4532 gen_set_access_type(ctx, ACCESS_EXT); 4533 t0 = tcg_temp_new(); 4534 gen_addr_reg_index(ctx, t0); 4535 gen_check_align(ctx, t0, 0x03); 4536 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0); 4537 tcg_temp_free(t0); 4538 } 4539 4540 /* ecowx */ 4541 static void gen_ecowx(DisasContext *ctx) 4542 { 4543 TCGv t0; 4544 /* Should check EAR[E] ! */ 4545 gen_set_access_type(ctx, ACCESS_EXT); 4546 t0 = tcg_temp_new(); 4547 gen_addr_reg_index(ctx, t0); 4548 gen_check_align(ctx, t0, 0x03); 4549 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0); 4550 tcg_temp_free(t0); 4551 } 4552 4553 /* PowerPC 601 specific instructions */ 4554 4555 /* abs - abs. */ 4556 static void gen_abs(DisasContext *ctx) 4557 { 4558 TCGLabel *l1 = gen_new_label(); 4559 TCGLabel *l2 = gen_new_label(); 4560 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 4561 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4562 tcg_gen_br(l2); 4563 gen_set_label(l1); 4564 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4565 gen_set_label(l2); 4566 if (unlikely(Rc(ctx->opcode) != 0)) 4567 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4568 } 4569 4570 /* abso - abso. */ 4571 static void gen_abso(DisasContext *ctx) 4572 { 4573 TCGLabel *l1 = gen_new_label(); 4574 TCGLabel *l2 = gen_new_label(); 4575 TCGLabel *l3 = gen_new_label(); 4576 /* Start with XER OV disabled, the most likely case */ 4577 tcg_gen_movi_tl(cpu_ov, 0); 4578 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 4579 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 4580 tcg_gen_movi_tl(cpu_ov, 1); 4581 tcg_gen_movi_tl(cpu_so, 1); 4582 tcg_gen_br(l2); 4583 gen_set_label(l1); 4584 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4585 tcg_gen_br(l3); 4586 gen_set_label(l2); 4587 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4588 gen_set_label(l3); 4589 if (unlikely(Rc(ctx->opcode) != 0)) 4590 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4591 } 4592 4593 /* clcs */ 4594 static void gen_clcs(DisasContext *ctx) 4595 { 4596 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 4597 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4598 tcg_temp_free_i32(t0); 4599 /* Rc=1 sets CR0 to an undefined state */ 4600 } 4601 4602 /* div - div. */ 4603 static void gen_div(DisasContext *ctx) 4604 { 4605 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4606 cpu_gpr[rB(ctx->opcode)]); 4607 if (unlikely(Rc(ctx->opcode) != 0)) 4608 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4609 } 4610 4611 /* divo - divo. */ 4612 static void gen_divo(DisasContext *ctx) 4613 { 4614 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4615 cpu_gpr[rB(ctx->opcode)]); 4616 if (unlikely(Rc(ctx->opcode) != 0)) 4617 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4618 } 4619 4620 /* divs - divs. */ 4621 static void gen_divs(DisasContext *ctx) 4622 { 4623 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4624 cpu_gpr[rB(ctx->opcode)]); 4625 if (unlikely(Rc(ctx->opcode) != 0)) 4626 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4627 } 4628 4629 /* divso - divso. */ 4630 static void gen_divso(DisasContext *ctx) 4631 { 4632 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 4633 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4634 if (unlikely(Rc(ctx->opcode) != 0)) 4635 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4636 } 4637 4638 /* doz - doz. */ 4639 static void gen_doz(DisasContext *ctx) 4640 { 4641 TCGLabel *l1 = gen_new_label(); 4642 TCGLabel *l2 = gen_new_label(); 4643 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4644 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4645 tcg_gen_br(l2); 4646 gen_set_label(l1); 4647 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4648 gen_set_label(l2); 4649 if (unlikely(Rc(ctx->opcode) != 0)) 4650 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4651 } 4652 4653 /* dozo - dozo. */ 4654 static void gen_dozo(DisasContext *ctx) 4655 { 4656 TCGLabel *l1 = gen_new_label(); 4657 TCGLabel *l2 = gen_new_label(); 4658 TCGv t0 = tcg_temp_new(); 4659 TCGv t1 = tcg_temp_new(); 4660 TCGv t2 = tcg_temp_new(); 4661 /* Start with XER OV disabled, the most likely case */ 4662 tcg_gen_movi_tl(cpu_ov, 0); 4663 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4664 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4665 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4666 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 4667 tcg_gen_andc_tl(t1, t1, t2); 4668 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 4669 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 4670 tcg_gen_movi_tl(cpu_ov, 1); 4671 tcg_gen_movi_tl(cpu_so, 1); 4672 tcg_gen_br(l2); 4673 gen_set_label(l1); 4674 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4675 gen_set_label(l2); 4676 tcg_temp_free(t0); 4677 tcg_temp_free(t1); 4678 tcg_temp_free(t2); 4679 if (unlikely(Rc(ctx->opcode) != 0)) 4680 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4681 } 4682 4683 /* dozi */ 4684 static void gen_dozi(DisasContext *ctx) 4685 { 4686 target_long simm = SIMM(ctx->opcode); 4687 TCGLabel *l1 = gen_new_label(); 4688 TCGLabel *l2 = gen_new_label(); 4689 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 4690 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 4691 tcg_gen_br(l2); 4692 gen_set_label(l1); 4693 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4694 gen_set_label(l2); 4695 if (unlikely(Rc(ctx->opcode) != 0)) 4696 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4697 } 4698 4699 /* lscbx - lscbx. */ 4700 static void gen_lscbx(DisasContext *ctx) 4701 { 4702 TCGv t0 = tcg_temp_new(); 4703 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 4704 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 4705 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 4706 4707 gen_addr_reg_index(ctx, t0); 4708 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 4709 tcg_temp_free_i32(t1); 4710 tcg_temp_free_i32(t2); 4711 tcg_temp_free_i32(t3); 4712 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 4713 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 4714 if (unlikely(Rc(ctx->opcode) != 0)) 4715 gen_set_Rc0(ctx, t0); 4716 tcg_temp_free(t0); 4717 } 4718 4719 /* maskg - maskg. */ 4720 static void gen_maskg(DisasContext *ctx) 4721 { 4722 TCGLabel *l1 = gen_new_label(); 4723 TCGv t0 = tcg_temp_new(); 4724 TCGv t1 = tcg_temp_new(); 4725 TCGv t2 = tcg_temp_new(); 4726 TCGv t3 = tcg_temp_new(); 4727 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 4728 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4729 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 4730 tcg_gen_addi_tl(t2, t0, 1); 4731 tcg_gen_shr_tl(t2, t3, t2); 4732 tcg_gen_shr_tl(t3, t3, t1); 4733 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 4734 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 4735 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4736 gen_set_label(l1); 4737 tcg_temp_free(t0); 4738 tcg_temp_free(t1); 4739 tcg_temp_free(t2); 4740 tcg_temp_free(t3); 4741 if (unlikely(Rc(ctx->opcode) != 0)) 4742 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4743 } 4744 4745 /* maskir - maskir. */ 4746 static void gen_maskir(DisasContext *ctx) 4747 { 4748 TCGv t0 = tcg_temp_new(); 4749 TCGv t1 = tcg_temp_new(); 4750 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4751 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4752 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4753 tcg_temp_free(t0); 4754 tcg_temp_free(t1); 4755 if (unlikely(Rc(ctx->opcode) != 0)) 4756 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4757 } 4758 4759 /* mul - mul. */ 4760 static void gen_mul(DisasContext *ctx) 4761 { 4762 TCGv_i64 t0 = tcg_temp_new_i64(); 4763 TCGv_i64 t1 = tcg_temp_new_i64(); 4764 TCGv t2 = tcg_temp_new(); 4765 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 4766 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 4767 tcg_gen_mul_i64(t0, t0, t1); 4768 tcg_gen_trunc_i64_tl(t2, t0); 4769 gen_store_spr(SPR_MQ, t2); 4770 tcg_gen_shri_i64(t1, t0, 32); 4771 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 4772 tcg_temp_free_i64(t0); 4773 tcg_temp_free_i64(t1); 4774 tcg_temp_free(t2); 4775 if (unlikely(Rc(ctx->opcode) != 0)) 4776 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4777 } 4778 4779 /* mulo - mulo. */ 4780 static void gen_mulo(DisasContext *ctx) 4781 { 4782 TCGLabel *l1 = gen_new_label(); 4783 TCGv_i64 t0 = tcg_temp_new_i64(); 4784 TCGv_i64 t1 = tcg_temp_new_i64(); 4785 TCGv t2 = tcg_temp_new(); 4786 /* Start with XER OV disabled, the most likely case */ 4787 tcg_gen_movi_tl(cpu_ov, 0); 4788 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 4789 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 4790 tcg_gen_mul_i64(t0, t0, t1); 4791 tcg_gen_trunc_i64_tl(t2, t0); 4792 gen_store_spr(SPR_MQ, t2); 4793 tcg_gen_shri_i64(t1, t0, 32); 4794 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 4795 tcg_gen_ext32s_i64(t1, t0); 4796 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 4797 tcg_gen_movi_tl(cpu_ov, 1); 4798 tcg_gen_movi_tl(cpu_so, 1); 4799 gen_set_label(l1); 4800 tcg_temp_free_i64(t0); 4801 tcg_temp_free_i64(t1); 4802 tcg_temp_free(t2); 4803 if (unlikely(Rc(ctx->opcode) != 0)) 4804 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4805 } 4806 4807 /* nabs - nabs. */ 4808 static void gen_nabs(DisasContext *ctx) 4809 { 4810 TCGLabel *l1 = gen_new_label(); 4811 TCGLabel *l2 = gen_new_label(); 4812 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 4813 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4814 tcg_gen_br(l2); 4815 gen_set_label(l1); 4816 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4817 gen_set_label(l2); 4818 if (unlikely(Rc(ctx->opcode) != 0)) 4819 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4820 } 4821 4822 /* nabso - nabso. */ 4823 static void gen_nabso(DisasContext *ctx) 4824 { 4825 TCGLabel *l1 = gen_new_label(); 4826 TCGLabel *l2 = gen_new_label(); 4827 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 4828 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4829 tcg_gen_br(l2); 4830 gen_set_label(l1); 4831 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4832 gen_set_label(l2); 4833 /* nabs never overflows */ 4834 tcg_gen_movi_tl(cpu_ov, 0); 4835 if (unlikely(Rc(ctx->opcode) != 0)) 4836 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4837 } 4838 4839 /* rlmi - rlmi. */ 4840 static void gen_rlmi(DisasContext *ctx) 4841 { 4842 uint32_t mb = MB(ctx->opcode); 4843 uint32_t me = ME(ctx->opcode); 4844 TCGv t0 = tcg_temp_new(); 4845 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4846 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4847 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 4848 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 4849 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 4850 tcg_temp_free(t0); 4851 if (unlikely(Rc(ctx->opcode) != 0)) 4852 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4853 } 4854 4855 /* rrib - rrib. */ 4856 static void gen_rrib(DisasContext *ctx) 4857 { 4858 TCGv t0 = tcg_temp_new(); 4859 TCGv t1 = tcg_temp_new(); 4860 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4861 tcg_gen_movi_tl(t1, 0x80000000); 4862 tcg_gen_shr_tl(t1, t1, t0); 4863 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4864 tcg_gen_and_tl(t0, t0, t1); 4865 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 4866 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4867 tcg_temp_free(t0); 4868 tcg_temp_free(t1); 4869 if (unlikely(Rc(ctx->opcode) != 0)) 4870 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4871 } 4872 4873 /* sle - sle. */ 4874 static void gen_sle(DisasContext *ctx) 4875 { 4876 TCGv t0 = tcg_temp_new(); 4877 TCGv t1 = tcg_temp_new(); 4878 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 4879 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 4880 tcg_gen_subfi_tl(t1, 32, t1); 4881 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 4882 tcg_gen_or_tl(t1, t0, t1); 4883 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 4884 gen_store_spr(SPR_MQ, t1); 4885 tcg_temp_free(t0); 4886 tcg_temp_free(t1); 4887 if (unlikely(Rc(ctx->opcode) != 0)) 4888 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4889 } 4890 4891 /* sleq - sleq. */ 4892 static void gen_sleq(DisasContext *ctx) 4893 { 4894 TCGv t0 = tcg_temp_new(); 4895 TCGv t1 = tcg_temp_new(); 4896 TCGv t2 = tcg_temp_new(); 4897 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4898 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 4899 tcg_gen_shl_tl(t2, t2, t0); 4900 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4901 gen_load_spr(t1, SPR_MQ); 4902 gen_store_spr(SPR_MQ, t0); 4903 tcg_gen_and_tl(t0, t0, t2); 4904 tcg_gen_andc_tl(t1, t1, t2); 4905 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4906 tcg_temp_free(t0); 4907 tcg_temp_free(t1); 4908 tcg_temp_free(t2); 4909 if (unlikely(Rc(ctx->opcode) != 0)) 4910 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4911 } 4912 4913 /* sliq - sliq. */ 4914 static void gen_sliq(DisasContext *ctx) 4915 { 4916 int sh = SH(ctx->opcode); 4917 TCGv t0 = tcg_temp_new(); 4918 TCGv t1 = tcg_temp_new(); 4919 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 4920 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 4921 tcg_gen_or_tl(t1, t0, t1); 4922 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 4923 gen_store_spr(SPR_MQ, t1); 4924 tcg_temp_free(t0); 4925 tcg_temp_free(t1); 4926 if (unlikely(Rc(ctx->opcode) != 0)) 4927 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4928 } 4929 4930 /* slliq - slliq. */ 4931 static void gen_slliq(DisasContext *ctx) 4932 { 4933 int sh = SH(ctx->opcode); 4934 TCGv t0 = tcg_temp_new(); 4935 TCGv t1 = tcg_temp_new(); 4936 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 4937 gen_load_spr(t1, SPR_MQ); 4938 gen_store_spr(SPR_MQ, t0); 4939 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 4940 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 4941 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4942 tcg_temp_free(t0); 4943 tcg_temp_free(t1); 4944 if (unlikely(Rc(ctx->opcode) != 0)) 4945 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4946 } 4947 4948 /* sllq - sllq. */ 4949 static void gen_sllq(DisasContext *ctx) 4950 { 4951 TCGLabel *l1 = gen_new_label(); 4952 TCGLabel *l2 = gen_new_label(); 4953 TCGv t0 = tcg_temp_local_new(); 4954 TCGv t1 = tcg_temp_local_new(); 4955 TCGv t2 = tcg_temp_local_new(); 4956 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 4957 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 4958 tcg_gen_shl_tl(t1, t1, t2); 4959 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 4960 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 4961 gen_load_spr(t0, SPR_MQ); 4962 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4963 tcg_gen_br(l2); 4964 gen_set_label(l1); 4965 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 4966 gen_load_spr(t2, SPR_MQ); 4967 tcg_gen_andc_tl(t1, t2, t1); 4968 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4969 gen_set_label(l2); 4970 tcg_temp_free(t0); 4971 tcg_temp_free(t1); 4972 tcg_temp_free(t2); 4973 if (unlikely(Rc(ctx->opcode) != 0)) 4974 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4975 } 4976 4977 /* slq - slq. */ 4978 static void gen_slq(DisasContext *ctx) 4979 { 4980 TCGLabel *l1 = gen_new_label(); 4981 TCGv t0 = tcg_temp_new(); 4982 TCGv t1 = tcg_temp_new(); 4983 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 4984 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 4985 tcg_gen_subfi_tl(t1, 32, t1); 4986 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 4987 tcg_gen_or_tl(t1, t0, t1); 4988 gen_store_spr(SPR_MQ, t1); 4989 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 4990 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 4991 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 4992 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 4993 gen_set_label(l1); 4994 tcg_temp_free(t0); 4995 tcg_temp_free(t1); 4996 if (unlikely(Rc(ctx->opcode) != 0)) 4997 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4998 } 4999 5000 /* sraiq - sraiq. */ 5001 static void gen_sraiq(DisasContext *ctx) 5002 { 5003 int sh = SH(ctx->opcode); 5004 TCGLabel *l1 = gen_new_label(); 5005 TCGv t0 = tcg_temp_new(); 5006 TCGv t1 = tcg_temp_new(); 5007 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5008 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5009 tcg_gen_or_tl(t0, t0, t1); 5010 gen_store_spr(SPR_MQ, t0); 5011 tcg_gen_movi_tl(cpu_ca, 0); 5012 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5013 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5014 tcg_gen_movi_tl(cpu_ca, 1); 5015 gen_set_label(l1); 5016 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5017 tcg_temp_free(t0); 5018 tcg_temp_free(t1); 5019 if (unlikely(Rc(ctx->opcode) != 0)) 5020 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5021 } 5022 5023 /* sraq - sraq. */ 5024 static void gen_sraq(DisasContext *ctx) 5025 { 5026 TCGLabel *l1 = gen_new_label(); 5027 TCGLabel *l2 = gen_new_label(); 5028 TCGv t0 = tcg_temp_new(); 5029 TCGv t1 = tcg_temp_local_new(); 5030 TCGv t2 = tcg_temp_local_new(); 5031 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5032 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5033 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5034 tcg_gen_subfi_tl(t2, 32, t2); 5035 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5036 tcg_gen_or_tl(t0, t0, t2); 5037 gen_store_spr(SPR_MQ, t0); 5038 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5039 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5040 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5041 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5042 gen_set_label(l1); 5043 tcg_temp_free(t0); 5044 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5045 tcg_gen_movi_tl(cpu_ca, 0); 5046 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5047 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5048 tcg_gen_movi_tl(cpu_ca, 1); 5049 gen_set_label(l2); 5050 tcg_temp_free(t1); 5051 tcg_temp_free(t2); 5052 if (unlikely(Rc(ctx->opcode) != 0)) 5053 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5054 } 5055 5056 /* sre - sre. */ 5057 static void gen_sre(DisasContext *ctx) 5058 { 5059 TCGv t0 = tcg_temp_new(); 5060 TCGv t1 = tcg_temp_new(); 5061 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5062 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5063 tcg_gen_subfi_tl(t1, 32, t1); 5064 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5065 tcg_gen_or_tl(t1, t0, t1); 5066 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5067 gen_store_spr(SPR_MQ, t1); 5068 tcg_temp_free(t0); 5069 tcg_temp_free(t1); 5070 if (unlikely(Rc(ctx->opcode) != 0)) 5071 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5072 } 5073 5074 /* srea - srea. */ 5075 static void gen_srea(DisasContext *ctx) 5076 { 5077 TCGv t0 = tcg_temp_new(); 5078 TCGv t1 = tcg_temp_new(); 5079 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5080 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5081 gen_store_spr(SPR_MQ, t0); 5082 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5083 tcg_temp_free(t0); 5084 tcg_temp_free(t1); 5085 if (unlikely(Rc(ctx->opcode) != 0)) 5086 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5087 } 5088 5089 /* sreq */ 5090 static void gen_sreq(DisasContext *ctx) 5091 { 5092 TCGv t0 = tcg_temp_new(); 5093 TCGv t1 = tcg_temp_new(); 5094 TCGv t2 = tcg_temp_new(); 5095 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5096 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5097 tcg_gen_shr_tl(t1, t1, t0); 5098 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5099 gen_load_spr(t2, SPR_MQ); 5100 gen_store_spr(SPR_MQ, t0); 5101 tcg_gen_and_tl(t0, t0, t1); 5102 tcg_gen_andc_tl(t2, t2, t1); 5103 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5104 tcg_temp_free(t0); 5105 tcg_temp_free(t1); 5106 tcg_temp_free(t2); 5107 if (unlikely(Rc(ctx->opcode) != 0)) 5108 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5109 } 5110 5111 /* sriq */ 5112 static void gen_sriq(DisasContext *ctx) 5113 { 5114 int sh = SH(ctx->opcode); 5115 TCGv t0 = tcg_temp_new(); 5116 TCGv t1 = tcg_temp_new(); 5117 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5118 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5119 tcg_gen_or_tl(t1, t0, t1); 5120 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5121 gen_store_spr(SPR_MQ, t1); 5122 tcg_temp_free(t0); 5123 tcg_temp_free(t1); 5124 if (unlikely(Rc(ctx->opcode) != 0)) 5125 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5126 } 5127 5128 /* srliq */ 5129 static void gen_srliq(DisasContext *ctx) 5130 { 5131 int sh = SH(ctx->opcode); 5132 TCGv t0 = tcg_temp_new(); 5133 TCGv t1 = tcg_temp_new(); 5134 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5135 gen_load_spr(t1, SPR_MQ); 5136 gen_store_spr(SPR_MQ, t0); 5137 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5138 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5139 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5140 tcg_temp_free(t0); 5141 tcg_temp_free(t1); 5142 if (unlikely(Rc(ctx->opcode) != 0)) 5143 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5144 } 5145 5146 /* srlq */ 5147 static void gen_srlq(DisasContext *ctx) 5148 { 5149 TCGLabel *l1 = gen_new_label(); 5150 TCGLabel *l2 = gen_new_label(); 5151 TCGv t0 = tcg_temp_local_new(); 5152 TCGv t1 = tcg_temp_local_new(); 5153 TCGv t2 = tcg_temp_local_new(); 5154 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5155 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5156 tcg_gen_shr_tl(t2, t1, t2); 5157 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5158 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5159 gen_load_spr(t0, SPR_MQ); 5160 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5161 tcg_gen_br(l2); 5162 gen_set_label(l1); 5163 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5164 tcg_gen_and_tl(t0, t0, t2); 5165 gen_load_spr(t1, SPR_MQ); 5166 tcg_gen_andc_tl(t1, t1, t2); 5167 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5168 gen_set_label(l2); 5169 tcg_temp_free(t0); 5170 tcg_temp_free(t1); 5171 tcg_temp_free(t2); 5172 if (unlikely(Rc(ctx->opcode) != 0)) 5173 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5174 } 5175 5176 /* srq */ 5177 static void gen_srq(DisasContext *ctx) 5178 { 5179 TCGLabel *l1 = gen_new_label(); 5180 TCGv t0 = tcg_temp_new(); 5181 TCGv t1 = tcg_temp_new(); 5182 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5183 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5184 tcg_gen_subfi_tl(t1, 32, t1); 5185 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5186 tcg_gen_or_tl(t1, t0, t1); 5187 gen_store_spr(SPR_MQ, t1); 5188 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5189 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5190 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5191 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5192 gen_set_label(l1); 5193 tcg_temp_free(t0); 5194 tcg_temp_free(t1); 5195 if (unlikely(Rc(ctx->opcode) != 0)) 5196 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5197 } 5198 5199 /* PowerPC 602 specific instructions */ 5200 5201 /* dsa */ 5202 static void gen_dsa(DisasContext *ctx) 5203 { 5204 /* XXX: TODO */ 5205 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5206 } 5207 5208 /* esa */ 5209 static void gen_esa(DisasContext *ctx) 5210 { 5211 /* XXX: TODO */ 5212 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5213 } 5214 5215 /* mfrom */ 5216 static void gen_mfrom(DisasContext *ctx) 5217 { 5218 #if defined(CONFIG_USER_ONLY) 5219 GEN_PRIV; 5220 #else 5221 CHK_SV; 5222 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5223 #endif /* defined(CONFIG_USER_ONLY) */ 5224 } 5225 5226 /* 602 - 603 - G2 TLB management */ 5227 5228 /* tlbld */ 5229 static void gen_tlbld_6xx(DisasContext *ctx) 5230 { 5231 #if defined(CONFIG_USER_ONLY) 5232 GEN_PRIV; 5233 #else 5234 CHK_SV; 5235 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5236 #endif /* defined(CONFIG_USER_ONLY) */ 5237 } 5238 5239 /* tlbli */ 5240 static void gen_tlbli_6xx(DisasContext *ctx) 5241 { 5242 #if defined(CONFIG_USER_ONLY) 5243 GEN_PRIV; 5244 #else 5245 CHK_SV; 5246 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5247 #endif /* defined(CONFIG_USER_ONLY) */ 5248 } 5249 5250 /* 74xx TLB management */ 5251 5252 /* tlbld */ 5253 static void gen_tlbld_74xx(DisasContext *ctx) 5254 { 5255 #if defined(CONFIG_USER_ONLY) 5256 GEN_PRIV; 5257 #else 5258 CHK_SV; 5259 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5260 #endif /* defined(CONFIG_USER_ONLY) */ 5261 } 5262 5263 /* tlbli */ 5264 static void gen_tlbli_74xx(DisasContext *ctx) 5265 { 5266 #if defined(CONFIG_USER_ONLY) 5267 GEN_PRIV; 5268 #else 5269 CHK_SV; 5270 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5271 #endif /* defined(CONFIG_USER_ONLY) */ 5272 } 5273 5274 /* POWER instructions not in PowerPC 601 */ 5275 5276 /* clf */ 5277 static void gen_clf(DisasContext *ctx) 5278 { 5279 /* Cache line flush: implemented as no-op */ 5280 } 5281 5282 /* cli */ 5283 static void gen_cli(DisasContext *ctx) 5284 { 5285 #if defined(CONFIG_USER_ONLY) 5286 GEN_PRIV; 5287 #else 5288 /* Cache line invalidate: privileged and treated as no-op */ 5289 CHK_SV; 5290 #endif /* defined(CONFIG_USER_ONLY) */ 5291 } 5292 5293 /* dclst */ 5294 static void gen_dclst(DisasContext *ctx) 5295 { 5296 /* Data cache line store: treated as no-op */ 5297 } 5298 5299 static void gen_mfsri(DisasContext *ctx) 5300 { 5301 #if defined(CONFIG_USER_ONLY) 5302 GEN_PRIV; 5303 #else 5304 int ra = rA(ctx->opcode); 5305 int rd = rD(ctx->opcode); 5306 TCGv t0; 5307 5308 CHK_SV; 5309 t0 = tcg_temp_new(); 5310 gen_addr_reg_index(ctx, t0); 5311 tcg_gen_shri_tl(t0, t0, 28); 5312 tcg_gen_andi_tl(t0, t0, 0xF); 5313 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5314 tcg_temp_free(t0); 5315 if (ra != 0 && ra != rd) 5316 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5317 #endif /* defined(CONFIG_USER_ONLY) */ 5318 } 5319 5320 static void gen_rac(DisasContext *ctx) 5321 { 5322 #if defined(CONFIG_USER_ONLY) 5323 GEN_PRIV; 5324 #else 5325 TCGv t0; 5326 5327 CHK_SV; 5328 t0 = tcg_temp_new(); 5329 gen_addr_reg_index(ctx, t0); 5330 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5331 tcg_temp_free(t0); 5332 #endif /* defined(CONFIG_USER_ONLY) */ 5333 } 5334 5335 static void gen_rfsvc(DisasContext *ctx) 5336 { 5337 #if defined(CONFIG_USER_ONLY) 5338 GEN_PRIV; 5339 #else 5340 CHK_SV; 5341 5342 gen_helper_rfsvc(cpu_env); 5343 gen_sync_exception(ctx); 5344 #endif /* defined(CONFIG_USER_ONLY) */ 5345 } 5346 5347 /* svc is not implemented for now */ 5348 5349 /* BookE specific instructions */ 5350 5351 /* XXX: not implemented on 440 ? */ 5352 static void gen_mfapidi(DisasContext *ctx) 5353 { 5354 /* XXX: TODO */ 5355 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5356 } 5357 5358 /* XXX: not implemented on 440 ? */ 5359 static void gen_tlbiva(DisasContext *ctx) 5360 { 5361 #if defined(CONFIG_USER_ONLY) 5362 GEN_PRIV; 5363 #else 5364 TCGv t0; 5365 5366 CHK_SV; 5367 t0 = tcg_temp_new(); 5368 gen_addr_reg_index(ctx, t0); 5369 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5370 tcg_temp_free(t0); 5371 #endif /* defined(CONFIG_USER_ONLY) */ 5372 } 5373 5374 /* All 405 MAC instructions are translated here */ 5375 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5376 int ra, int rb, int rt, int Rc) 5377 { 5378 TCGv t0, t1; 5379 5380 t0 = tcg_temp_local_new(); 5381 t1 = tcg_temp_local_new(); 5382 5383 switch (opc3 & 0x0D) { 5384 case 0x05: 5385 /* macchw - macchw. - macchwo - macchwo. */ 5386 /* macchws - macchws. - macchwso - macchwso. */ 5387 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5388 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5389 /* mulchw - mulchw. */ 5390 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5391 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5392 tcg_gen_ext16s_tl(t1, t1); 5393 break; 5394 case 0x04: 5395 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5396 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5397 /* mulchwu - mulchwu. */ 5398 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5399 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5400 tcg_gen_ext16u_tl(t1, t1); 5401 break; 5402 case 0x01: 5403 /* machhw - machhw. - machhwo - machhwo. */ 5404 /* machhws - machhws. - machhwso - machhwso. */ 5405 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5406 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5407 /* mulhhw - mulhhw. */ 5408 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5409 tcg_gen_ext16s_tl(t0, t0); 5410 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5411 tcg_gen_ext16s_tl(t1, t1); 5412 break; 5413 case 0x00: 5414 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5415 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5416 /* mulhhwu - mulhhwu. */ 5417 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5418 tcg_gen_ext16u_tl(t0, t0); 5419 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5420 tcg_gen_ext16u_tl(t1, t1); 5421 break; 5422 case 0x0D: 5423 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5424 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5425 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5426 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5427 /* mullhw - mullhw. */ 5428 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5429 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5430 break; 5431 case 0x0C: 5432 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5433 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5434 /* mullhwu - mullhwu. */ 5435 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5436 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5437 break; 5438 } 5439 if (opc2 & 0x04) { 5440 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5441 tcg_gen_mul_tl(t1, t0, t1); 5442 if (opc2 & 0x02) { 5443 /* nmultiply-and-accumulate (0x0E) */ 5444 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5445 } else { 5446 /* multiply-and-accumulate (0x0C) */ 5447 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5448 } 5449 5450 if (opc3 & 0x12) { 5451 /* Check overflow and/or saturate */ 5452 TCGLabel *l1 = gen_new_label(); 5453 5454 if (opc3 & 0x10) { 5455 /* Start with XER OV disabled, the most likely case */ 5456 tcg_gen_movi_tl(cpu_ov, 0); 5457 } 5458 if (opc3 & 0x01) { 5459 /* Signed */ 5460 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5461 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5462 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5463 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5464 if (opc3 & 0x02) { 5465 /* Saturate */ 5466 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5467 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5468 } 5469 } else { 5470 /* Unsigned */ 5471 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5472 if (opc3 & 0x02) { 5473 /* Saturate */ 5474 tcg_gen_movi_tl(t0, UINT32_MAX); 5475 } 5476 } 5477 if (opc3 & 0x10) { 5478 /* Check overflow */ 5479 tcg_gen_movi_tl(cpu_ov, 1); 5480 tcg_gen_movi_tl(cpu_so, 1); 5481 } 5482 gen_set_label(l1); 5483 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5484 } 5485 } else { 5486 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5487 } 5488 tcg_temp_free(t0); 5489 tcg_temp_free(t1); 5490 if (unlikely(Rc) != 0) { 5491 /* Update Rc0 */ 5492 gen_set_Rc0(ctx, cpu_gpr[rt]); 5493 } 5494 } 5495 5496 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5497 static void glue(gen_, name)(DisasContext *ctx) \ 5498 { \ 5499 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5500 rD(ctx->opcode), Rc(ctx->opcode)); \ 5501 } 5502 5503 /* macchw - macchw. */ 5504 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5505 /* macchwo - macchwo. */ 5506 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5507 /* macchws - macchws. */ 5508 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5509 /* macchwso - macchwso. */ 5510 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5511 /* macchwsu - macchwsu. */ 5512 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5513 /* macchwsuo - macchwsuo. */ 5514 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5515 /* macchwu - macchwu. */ 5516 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5517 /* macchwuo - macchwuo. */ 5518 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5519 /* machhw - machhw. */ 5520 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5521 /* machhwo - machhwo. */ 5522 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5523 /* machhws - machhws. */ 5524 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5525 /* machhwso - machhwso. */ 5526 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5527 /* machhwsu - machhwsu. */ 5528 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5529 /* machhwsuo - machhwsuo. */ 5530 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5531 /* machhwu - machhwu. */ 5532 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5533 /* machhwuo - machhwuo. */ 5534 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5535 /* maclhw - maclhw. */ 5536 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5537 /* maclhwo - maclhwo. */ 5538 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5539 /* maclhws - maclhws. */ 5540 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5541 /* maclhwso - maclhwso. */ 5542 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 5543 /* maclhwu - maclhwu. */ 5544 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 5545 /* maclhwuo - maclhwuo. */ 5546 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 5547 /* maclhwsu - maclhwsu. */ 5548 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 5549 /* maclhwsuo - maclhwsuo. */ 5550 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 5551 /* nmacchw - nmacchw. */ 5552 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 5553 /* nmacchwo - nmacchwo. */ 5554 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 5555 /* nmacchws - nmacchws. */ 5556 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 5557 /* nmacchwso - nmacchwso. */ 5558 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 5559 /* nmachhw - nmachhw. */ 5560 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 5561 /* nmachhwo - nmachhwo. */ 5562 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 5563 /* nmachhws - nmachhws. */ 5564 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 5565 /* nmachhwso - nmachhwso. */ 5566 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 5567 /* nmaclhw - nmaclhw. */ 5568 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 5569 /* nmaclhwo - nmaclhwo. */ 5570 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 5571 /* nmaclhws - nmaclhws. */ 5572 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 5573 /* nmaclhwso - nmaclhwso. */ 5574 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 5575 5576 /* mulchw - mulchw. */ 5577 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 5578 /* mulchwu - mulchwu. */ 5579 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 5580 /* mulhhw - mulhhw. */ 5581 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 5582 /* mulhhwu - mulhhwu. */ 5583 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 5584 /* mullhw - mullhw. */ 5585 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 5586 /* mullhwu - mullhwu. */ 5587 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 5588 5589 /* mfdcr */ 5590 static void gen_mfdcr(DisasContext *ctx) 5591 { 5592 #if defined(CONFIG_USER_ONLY) 5593 GEN_PRIV; 5594 #else 5595 TCGv dcrn; 5596 5597 CHK_SV; 5598 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5599 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 5600 tcg_temp_free(dcrn); 5601 #endif /* defined(CONFIG_USER_ONLY) */ 5602 } 5603 5604 /* mtdcr */ 5605 static void gen_mtdcr(DisasContext *ctx) 5606 { 5607 #if defined(CONFIG_USER_ONLY) 5608 GEN_PRIV; 5609 #else 5610 TCGv dcrn; 5611 5612 CHK_SV; 5613 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5614 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 5615 tcg_temp_free(dcrn); 5616 #endif /* defined(CONFIG_USER_ONLY) */ 5617 } 5618 5619 /* mfdcrx */ 5620 /* XXX: not implemented on 440 ? */ 5621 static void gen_mfdcrx(DisasContext *ctx) 5622 { 5623 #if defined(CONFIG_USER_ONLY) 5624 GEN_PRIV; 5625 #else 5626 CHK_SV; 5627 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5628 cpu_gpr[rA(ctx->opcode)]); 5629 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5630 #endif /* defined(CONFIG_USER_ONLY) */ 5631 } 5632 5633 /* mtdcrx */ 5634 /* XXX: not implemented on 440 ? */ 5635 static void gen_mtdcrx(DisasContext *ctx) 5636 { 5637 #if defined(CONFIG_USER_ONLY) 5638 GEN_PRIV; 5639 #else 5640 CHK_SV; 5641 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5642 cpu_gpr[rS(ctx->opcode)]); 5643 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5644 #endif /* defined(CONFIG_USER_ONLY) */ 5645 } 5646 5647 /* mfdcrux (PPC 460) : user-mode access to DCR */ 5648 static void gen_mfdcrux(DisasContext *ctx) 5649 { 5650 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5651 cpu_gpr[rA(ctx->opcode)]); 5652 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5653 } 5654 5655 /* mtdcrux (PPC 460) : user-mode access to DCR */ 5656 static void gen_mtdcrux(DisasContext *ctx) 5657 { 5658 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5659 cpu_gpr[rS(ctx->opcode)]); 5660 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5661 } 5662 5663 /* dccci */ 5664 static void gen_dccci(DisasContext *ctx) 5665 { 5666 CHK_SV; 5667 /* interpreted as no-op */ 5668 } 5669 5670 /* dcread */ 5671 static void gen_dcread(DisasContext *ctx) 5672 { 5673 #if defined(CONFIG_USER_ONLY) 5674 GEN_PRIV; 5675 #else 5676 TCGv EA, val; 5677 5678 CHK_SV; 5679 gen_set_access_type(ctx, ACCESS_CACHE); 5680 EA = tcg_temp_new(); 5681 gen_addr_reg_index(ctx, EA); 5682 val = tcg_temp_new(); 5683 gen_qemu_ld32u(ctx, val, EA); 5684 tcg_temp_free(val); 5685 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 5686 tcg_temp_free(EA); 5687 #endif /* defined(CONFIG_USER_ONLY) */ 5688 } 5689 5690 /* icbt */ 5691 static void gen_icbt_40x(DisasContext *ctx) 5692 { 5693 /* interpreted as no-op */ 5694 /* XXX: specification say this is treated as a load by the MMU 5695 * but does not generate any exception 5696 */ 5697 } 5698 5699 /* iccci */ 5700 static void gen_iccci(DisasContext *ctx) 5701 { 5702 CHK_SV; 5703 /* interpreted as no-op */ 5704 } 5705 5706 /* icread */ 5707 static void gen_icread(DisasContext *ctx) 5708 { 5709 CHK_SV; 5710 /* interpreted as no-op */ 5711 } 5712 5713 /* rfci (supervisor only) */ 5714 static void gen_rfci_40x(DisasContext *ctx) 5715 { 5716 #if defined(CONFIG_USER_ONLY) 5717 GEN_PRIV; 5718 #else 5719 CHK_SV; 5720 /* Restore CPU state */ 5721 gen_helper_40x_rfci(cpu_env); 5722 gen_sync_exception(ctx); 5723 #endif /* defined(CONFIG_USER_ONLY) */ 5724 } 5725 5726 static void gen_rfci(DisasContext *ctx) 5727 { 5728 #if defined(CONFIG_USER_ONLY) 5729 GEN_PRIV; 5730 #else 5731 CHK_SV; 5732 /* Restore CPU state */ 5733 gen_helper_rfci(cpu_env); 5734 gen_sync_exception(ctx); 5735 #endif /* defined(CONFIG_USER_ONLY) */ 5736 } 5737 5738 /* BookE specific */ 5739 5740 /* XXX: not implemented on 440 ? */ 5741 static void gen_rfdi(DisasContext *ctx) 5742 { 5743 #if defined(CONFIG_USER_ONLY) 5744 GEN_PRIV; 5745 #else 5746 CHK_SV; 5747 /* Restore CPU state */ 5748 gen_helper_rfdi(cpu_env); 5749 gen_sync_exception(ctx); 5750 #endif /* defined(CONFIG_USER_ONLY) */ 5751 } 5752 5753 /* XXX: not implemented on 440 ? */ 5754 static void gen_rfmci(DisasContext *ctx) 5755 { 5756 #if defined(CONFIG_USER_ONLY) 5757 GEN_PRIV; 5758 #else 5759 CHK_SV; 5760 /* Restore CPU state */ 5761 gen_helper_rfmci(cpu_env); 5762 gen_sync_exception(ctx); 5763 #endif /* defined(CONFIG_USER_ONLY) */ 5764 } 5765 5766 /* TLB management - PowerPC 405 implementation */ 5767 5768 /* tlbre */ 5769 static void gen_tlbre_40x(DisasContext *ctx) 5770 { 5771 #if defined(CONFIG_USER_ONLY) 5772 GEN_PRIV; 5773 #else 5774 CHK_SV; 5775 switch (rB(ctx->opcode)) { 5776 case 0: 5777 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 5778 cpu_gpr[rA(ctx->opcode)]); 5779 break; 5780 case 1: 5781 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 5782 cpu_gpr[rA(ctx->opcode)]); 5783 break; 5784 default: 5785 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5786 break; 5787 } 5788 #endif /* defined(CONFIG_USER_ONLY) */ 5789 } 5790 5791 /* tlbsx - tlbsx. */ 5792 static void gen_tlbsx_40x(DisasContext *ctx) 5793 { 5794 #if defined(CONFIG_USER_ONLY) 5795 GEN_PRIV; 5796 #else 5797 TCGv t0; 5798 5799 CHK_SV; 5800 t0 = tcg_temp_new(); 5801 gen_addr_reg_index(ctx, t0); 5802 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5803 tcg_temp_free(t0); 5804 if (Rc(ctx->opcode)) { 5805 TCGLabel *l1 = gen_new_label(); 5806 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5807 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 5808 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 5809 gen_set_label(l1); 5810 } 5811 #endif /* defined(CONFIG_USER_ONLY) */ 5812 } 5813 5814 /* tlbwe */ 5815 static void gen_tlbwe_40x(DisasContext *ctx) 5816 { 5817 #if defined(CONFIG_USER_ONLY) 5818 GEN_PRIV; 5819 #else 5820 CHK_SV; 5821 5822 switch (rB(ctx->opcode)) { 5823 case 0: 5824 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 5825 cpu_gpr[rS(ctx->opcode)]); 5826 break; 5827 case 1: 5828 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 5829 cpu_gpr[rS(ctx->opcode)]); 5830 break; 5831 default: 5832 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5833 break; 5834 } 5835 #endif /* defined(CONFIG_USER_ONLY) */ 5836 } 5837 5838 /* TLB management - PowerPC 440 implementation */ 5839 5840 /* tlbre */ 5841 static void gen_tlbre_440(DisasContext *ctx) 5842 { 5843 #if defined(CONFIG_USER_ONLY) 5844 GEN_PRIV; 5845 #else 5846 CHK_SV; 5847 5848 switch (rB(ctx->opcode)) { 5849 case 0: 5850 case 1: 5851 case 2: 5852 { 5853 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 5854 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 5855 t0, cpu_gpr[rA(ctx->opcode)]); 5856 tcg_temp_free_i32(t0); 5857 } 5858 break; 5859 default: 5860 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5861 break; 5862 } 5863 #endif /* defined(CONFIG_USER_ONLY) */ 5864 } 5865 5866 /* tlbsx - tlbsx. */ 5867 static void gen_tlbsx_440(DisasContext *ctx) 5868 { 5869 #if defined(CONFIG_USER_ONLY) 5870 GEN_PRIV; 5871 #else 5872 TCGv t0; 5873 5874 CHK_SV; 5875 t0 = tcg_temp_new(); 5876 gen_addr_reg_index(ctx, t0); 5877 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5878 tcg_temp_free(t0); 5879 if (Rc(ctx->opcode)) { 5880 TCGLabel *l1 = gen_new_label(); 5881 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5882 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 5883 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 5884 gen_set_label(l1); 5885 } 5886 #endif /* defined(CONFIG_USER_ONLY) */ 5887 } 5888 5889 /* tlbwe */ 5890 static void gen_tlbwe_440(DisasContext *ctx) 5891 { 5892 #if defined(CONFIG_USER_ONLY) 5893 GEN_PRIV; 5894 #else 5895 CHK_SV; 5896 switch (rB(ctx->opcode)) { 5897 case 0: 5898 case 1: 5899 case 2: 5900 { 5901 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 5902 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 5903 cpu_gpr[rS(ctx->opcode)]); 5904 tcg_temp_free_i32(t0); 5905 } 5906 break; 5907 default: 5908 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5909 break; 5910 } 5911 #endif /* defined(CONFIG_USER_ONLY) */ 5912 } 5913 5914 /* TLB management - PowerPC BookE 2.06 implementation */ 5915 5916 /* tlbre */ 5917 static void gen_tlbre_booke206(DisasContext *ctx) 5918 { 5919 #if defined(CONFIG_USER_ONLY) 5920 GEN_PRIV; 5921 #else 5922 CHK_SV; 5923 gen_helper_booke206_tlbre(cpu_env); 5924 #endif /* defined(CONFIG_USER_ONLY) */ 5925 } 5926 5927 /* tlbsx - tlbsx. */ 5928 static void gen_tlbsx_booke206(DisasContext *ctx) 5929 { 5930 #if defined(CONFIG_USER_ONLY) 5931 GEN_PRIV; 5932 #else 5933 TCGv t0; 5934 5935 CHK_SV; 5936 if (rA(ctx->opcode)) { 5937 t0 = tcg_temp_new(); 5938 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 5939 } else { 5940 t0 = tcg_const_tl(0); 5941 } 5942 5943 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 5944 gen_helper_booke206_tlbsx(cpu_env, t0); 5945 tcg_temp_free(t0); 5946 #endif /* defined(CONFIG_USER_ONLY) */ 5947 } 5948 5949 /* tlbwe */ 5950 static void gen_tlbwe_booke206(DisasContext *ctx) 5951 { 5952 #if defined(CONFIG_USER_ONLY) 5953 GEN_PRIV; 5954 #else 5955 CHK_SV; 5956 gen_helper_booke206_tlbwe(cpu_env); 5957 #endif /* defined(CONFIG_USER_ONLY) */ 5958 } 5959 5960 static void gen_tlbivax_booke206(DisasContext *ctx) 5961 { 5962 #if defined(CONFIG_USER_ONLY) 5963 GEN_PRIV; 5964 #else 5965 TCGv t0; 5966 5967 CHK_SV; 5968 t0 = tcg_temp_new(); 5969 gen_addr_reg_index(ctx, t0); 5970 gen_helper_booke206_tlbivax(cpu_env, t0); 5971 tcg_temp_free(t0); 5972 #endif /* defined(CONFIG_USER_ONLY) */ 5973 } 5974 5975 static void gen_tlbilx_booke206(DisasContext *ctx) 5976 { 5977 #if defined(CONFIG_USER_ONLY) 5978 GEN_PRIV; 5979 #else 5980 TCGv t0; 5981 5982 CHK_SV; 5983 t0 = tcg_temp_new(); 5984 gen_addr_reg_index(ctx, t0); 5985 5986 switch((ctx->opcode >> 21) & 0x3) { 5987 case 0: 5988 gen_helper_booke206_tlbilx0(cpu_env, t0); 5989 break; 5990 case 1: 5991 gen_helper_booke206_tlbilx1(cpu_env, t0); 5992 break; 5993 case 3: 5994 gen_helper_booke206_tlbilx3(cpu_env, t0); 5995 break; 5996 default: 5997 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5998 break; 5999 } 6000 6001 tcg_temp_free(t0); 6002 #endif /* defined(CONFIG_USER_ONLY) */ 6003 } 6004 6005 6006 /* wrtee */ 6007 static void gen_wrtee(DisasContext *ctx) 6008 { 6009 #if defined(CONFIG_USER_ONLY) 6010 GEN_PRIV; 6011 #else 6012 TCGv t0; 6013 6014 CHK_SV; 6015 t0 = tcg_temp_new(); 6016 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6017 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6018 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6019 tcg_temp_free(t0); 6020 /* Stop translation to have a chance to raise an exception 6021 * if we just set msr_ee to 1 6022 */ 6023 gen_stop_exception(ctx); 6024 #endif /* defined(CONFIG_USER_ONLY) */ 6025 } 6026 6027 /* wrteei */ 6028 static void gen_wrteei(DisasContext *ctx) 6029 { 6030 #if defined(CONFIG_USER_ONLY) 6031 GEN_PRIV; 6032 #else 6033 CHK_SV; 6034 if (ctx->opcode & 0x00008000) { 6035 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6036 /* Stop translation to have a chance to raise an exception */ 6037 gen_stop_exception(ctx); 6038 } else { 6039 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6040 } 6041 #endif /* defined(CONFIG_USER_ONLY) */ 6042 } 6043 6044 /* PowerPC 440 specific instructions */ 6045 6046 /* dlmzb */ 6047 static void gen_dlmzb(DisasContext *ctx) 6048 { 6049 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6050 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6051 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6052 tcg_temp_free_i32(t0); 6053 } 6054 6055 /* mbar replaces eieio on 440 */ 6056 static void gen_mbar(DisasContext *ctx) 6057 { 6058 /* interpreted as no-op */ 6059 } 6060 6061 /* msync replaces sync on 440 */ 6062 static void gen_msync_4xx(DisasContext *ctx) 6063 { 6064 /* interpreted as no-op */ 6065 } 6066 6067 /* icbt */ 6068 static void gen_icbt_440(DisasContext *ctx) 6069 { 6070 /* interpreted as no-op */ 6071 /* XXX: specification say this is treated as a load by the MMU 6072 * but does not generate any exception 6073 */ 6074 } 6075 6076 /* Embedded.Processor Control */ 6077 6078 static void gen_msgclr(DisasContext *ctx) 6079 { 6080 #if defined(CONFIG_USER_ONLY) 6081 GEN_PRIV; 6082 #else 6083 CHK_SV; 6084 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6085 #endif /* defined(CONFIG_USER_ONLY) */ 6086 } 6087 6088 static void gen_msgsnd(DisasContext *ctx) 6089 { 6090 #if defined(CONFIG_USER_ONLY) 6091 GEN_PRIV; 6092 #else 6093 CHK_SV; 6094 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6095 #endif /* defined(CONFIG_USER_ONLY) */ 6096 } 6097 6098 6099 #if defined(TARGET_PPC64) 6100 static void gen_maddld(DisasContext *ctx) 6101 { 6102 TCGv_i64 t1 = tcg_temp_new_i64(); 6103 6104 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6105 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6106 tcg_temp_free_i64(t1); 6107 } 6108 6109 /* maddhd maddhdu */ 6110 static void gen_maddhd_maddhdu(DisasContext *ctx) 6111 { 6112 TCGv_i64 lo = tcg_temp_new_i64(); 6113 TCGv_i64 hi = tcg_temp_new_i64(); 6114 TCGv_i64 t1 = tcg_temp_new_i64(); 6115 6116 if (Rc(ctx->opcode)) { 6117 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6118 cpu_gpr[rB(ctx->opcode)]); 6119 tcg_gen_movi_i64(t1, 0); 6120 } else { 6121 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6122 cpu_gpr[rB(ctx->opcode)]); 6123 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6124 } 6125 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6126 cpu_gpr[rC(ctx->opcode)], t1); 6127 tcg_temp_free_i64(lo); 6128 tcg_temp_free_i64(hi); 6129 tcg_temp_free_i64(t1); 6130 } 6131 #endif /* defined(TARGET_PPC64) */ 6132 6133 static void gen_tbegin(DisasContext *ctx) 6134 { 6135 if (unlikely(!ctx->tm_enabled)) { 6136 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6137 return; 6138 } 6139 gen_helper_tbegin(cpu_env); 6140 } 6141 6142 #define GEN_TM_NOOP(name) \ 6143 static inline void gen_##name(DisasContext *ctx) \ 6144 { \ 6145 if (unlikely(!ctx->tm_enabled)) { \ 6146 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6147 return; \ 6148 } \ 6149 /* Because tbegin always fails in QEMU, these user \ 6150 * space instructions all have a simple implementation: \ 6151 * \ 6152 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6153 * = 0b0 || 0b00 || 0b0 \ 6154 */ \ 6155 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6156 } 6157 6158 GEN_TM_NOOP(tend); 6159 GEN_TM_NOOP(tabort); 6160 GEN_TM_NOOP(tabortwc); 6161 GEN_TM_NOOP(tabortwci); 6162 GEN_TM_NOOP(tabortdc); 6163 GEN_TM_NOOP(tabortdci); 6164 GEN_TM_NOOP(tsr); 6165 6166 static void gen_tcheck(DisasContext *ctx) 6167 { 6168 if (unlikely(!ctx->tm_enabled)) { 6169 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6170 return; 6171 } 6172 /* Because tbegin always fails, the tcheck implementation 6173 * is simple: 6174 * 6175 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6176 * = 0b1 || 0b00 || 0b0 6177 */ 6178 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6179 } 6180 6181 #if defined(CONFIG_USER_ONLY) 6182 #define GEN_TM_PRIV_NOOP(name) \ 6183 static inline void gen_##name(DisasContext *ctx) \ 6184 { \ 6185 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6186 } 6187 6188 #else 6189 6190 #define GEN_TM_PRIV_NOOP(name) \ 6191 static inline void gen_##name(DisasContext *ctx) \ 6192 { \ 6193 CHK_SV; \ 6194 if (unlikely(!ctx->tm_enabled)) { \ 6195 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6196 return; \ 6197 } \ 6198 /* Because tbegin always fails, the implementation is \ 6199 * simple: \ 6200 * \ 6201 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6202 * = 0b0 || 0b00 | 0b0 \ 6203 */ \ 6204 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6205 } 6206 6207 #endif 6208 6209 GEN_TM_PRIV_NOOP(treclaim); 6210 GEN_TM_PRIV_NOOP(trechkpt); 6211 6212 #include "translate/fp-impl.inc.c" 6213 6214 #include "translate/vmx-impl.inc.c" 6215 6216 #include "translate/vsx-impl.inc.c" 6217 6218 #include "translate/dfp-impl.inc.c" 6219 6220 #include "translate/spe-impl.inc.c" 6221 6222 static opcode_t opcodes[] = { 6223 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6224 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6225 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6226 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6227 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6228 #if defined(TARGET_PPC64) 6229 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6230 #endif 6231 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6232 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6233 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6234 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6235 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6236 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6237 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6238 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6239 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6240 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6241 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6242 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6243 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6244 #if defined(TARGET_PPC64) 6245 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6246 #endif 6247 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6248 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6249 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6250 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6251 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6252 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6253 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6254 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6255 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6256 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6257 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6258 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6259 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6260 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6261 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6262 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6263 #if defined(TARGET_PPC64) 6264 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6265 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6266 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6267 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6268 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6269 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6270 #endif 6271 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6272 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6273 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6274 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6275 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6276 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6277 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6278 #if defined(TARGET_PPC64) 6279 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6280 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6281 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6282 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6283 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6284 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6285 PPC_NONE, PPC2_ISA300), 6286 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6287 PPC_NONE, PPC2_ISA300), 6288 #endif 6289 #if defined(TARGET_PPC64) 6290 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6291 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6292 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6293 #endif 6294 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6295 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6296 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6297 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6298 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6299 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6300 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO), 6301 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6302 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6303 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6304 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6305 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6306 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6307 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6308 #if defined(TARGET_PPC64) 6309 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6310 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6311 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6312 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6313 #endif 6314 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6315 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6316 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6317 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6318 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6319 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6320 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6321 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6322 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6323 #if defined(TARGET_PPC64) 6324 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6325 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6326 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6327 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6328 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6329 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6330 #endif 6331 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6332 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6333 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6334 #if defined(TARGET_PPC64) 6335 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6336 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6337 #endif 6338 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6339 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6340 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6341 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6342 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6343 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6344 #if defined(TARGET_PPC64) 6345 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6346 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6347 #endif 6348 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6349 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6350 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6351 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6352 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6353 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6354 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6355 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6356 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6357 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6358 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC), 6359 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6360 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6361 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6362 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6363 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6364 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6365 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6366 #if defined(TARGET_PPC64) 6367 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6368 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6369 PPC_SEGMENT_64B), 6370 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6371 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6372 PPC_SEGMENT_64B), 6373 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6374 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6375 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6376 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6377 #endif 6378 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6379 /* XXX Those instructions will need to be handled differently for 6380 * different ISA versions */ 6381 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6382 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6383 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6384 #if defined(TARGET_PPC64) 6385 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6386 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6387 #endif 6388 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6389 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6390 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6391 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6392 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6393 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6394 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6395 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6396 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6397 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6398 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 6399 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6400 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 6401 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 6402 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 6403 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 6404 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 6405 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 6406 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 6407 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6408 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 6409 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 6410 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 6411 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 6412 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 6413 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 6414 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 6415 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 6416 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 6417 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 6418 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 6419 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 6420 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 6421 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 6422 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 6423 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 6424 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 6425 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 6426 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 6427 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 6428 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 6429 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 6430 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 6431 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 6432 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 6433 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 6434 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 6435 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 6436 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 6437 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6438 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6439 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 6440 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 6441 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6442 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6443 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 6444 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 6445 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 6446 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 6447 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 6448 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 6449 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 6450 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 6451 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 6452 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 6453 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 6454 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 6455 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 6456 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 6457 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 6458 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 6459 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 6460 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 6461 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 6462 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 6463 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 6464 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 6465 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 6466 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 6467 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 6468 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 6469 PPC_NONE, PPC2_BOOKE206), 6470 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 6471 PPC_NONE, PPC2_BOOKE206), 6472 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 6473 PPC_NONE, PPC2_BOOKE206), 6474 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 6475 PPC_NONE, PPC2_BOOKE206), 6476 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 6477 PPC_NONE, PPC2_BOOKE206), 6478 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 6479 PPC_NONE, PPC2_PRCNTL), 6480 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 6481 PPC_NONE, PPC2_PRCNTL), 6482 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 6483 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 6484 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 6485 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 6486 PPC_BOOKE, PPC2_BOOKE206), 6487 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE), 6488 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 6489 PPC_BOOKE, PPC2_BOOKE206), 6490 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 6491 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 6492 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 6493 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 6494 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 6495 #if defined(TARGET_PPC64) 6496 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 6497 PPC2_ISA300), 6498 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6499 #endif 6500 6501 #undef GEN_INT_ARITH_ADD 6502 #undef GEN_INT_ARITH_ADD_CONST 6503 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 6504 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 6505 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 6506 add_ca, compute_ca, compute_ov) \ 6507 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 6508 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 6509 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 6510 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 6511 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 6512 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 6513 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 6514 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 6515 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 6516 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 6517 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 6518 6519 #undef GEN_INT_ARITH_DIVW 6520 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 6521 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 6522 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 6523 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 6524 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 6525 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 6526 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6527 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6528 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6529 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6530 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6531 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6532 6533 #if defined(TARGET_PPC64) 6534 #undef GEN_INT_ARITH_DIVD 6535 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 6536 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6537 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 6538 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 6539 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 6540 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 6541 6542 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6543 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6544 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6545 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6546 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6547 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6548 6549 #undef GEN_INT_ARITH_MUL_HELPER 6550 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 6551 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6552 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 6553 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 6554 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 6555 #endif 6556 6557 #undef GEN_INT_ARITH_SUBF 6558 #undef GEN_INT_ARITH_SUBF_CONST 6559 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 6560 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 6561 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 6562 add_ca, compute_ca, compute_ov) \ 6563 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 6564 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 6565 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 6566 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 6567 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 6568 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 6569 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 6570 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 6571 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 6572 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 6573 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 6574 6575 #undef GEN_LOGICAL1 6576 #undef GEN_LOGICAL2 6577 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 6578 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 6579 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 6580 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 6581 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 6582 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 6583 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 6584 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 6585 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 6586 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 6587 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 6588 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 6589 #if defined(TARGET_PPC64) 6590 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 6591 #endif 6592 6593 #if defined(TARGET_PPC64) 6594 #undef GEN_PPC64_R2 6595 #undef GEN_PPC64_R4 6596 #define GEN_PPC64_R2(name, opc1, opc2) \ 6597 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 6598 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 6599 PPC_64B) 6600 #define GEN_PPC64_R4(name, opc1, opc2) \ 6601 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 6602 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 6603 PPC_64B), \ 6604 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 6605 PPC_64B), \ 6606 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 6607 PPC_64B) 6608 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 6609 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 6610 GEN_PPC64_R4(rldic, 0x1E, 0x04), 6611 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 6612 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 6613 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 6614 #endif 6615 6616 #undef GEN_LD 6617 #undef GEN_LDU 6618 #undef GEN_LDUX 6619 #undef GEN_LDX_E 6620 #undef GEN_LDS 6621 #define GEN_LD(name, ldop, opc, type) \ 6622 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 6623 #define GEN_LDU(name, ldop, opc, type) \ 6624 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 6625 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 6626 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 6627 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 6628 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 6629 #define GEN_LDS(name, ldop, op, type) \ 6630 GEN_LD(name, ldop, op | 0x20, type) \ 6631 GEN_LDU(name, ldop, op | 0x21, type) \ 6632 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 6633 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 6634 6635 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 6636 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 6637 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 6638 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 6639 #if defined(TARGET_PPC64) 6640 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 6641 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 6642 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 6643 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 6644 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 6645 6646 /* HV/P7 and later only */ 6647 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 6648 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 6649 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 6650 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 6651 #endif 6652 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 6653 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 6654 6655 #undef GEN_ST 6656 #undef GEN_STU 6657 #undef GEN_STUX 6658 #undef GEN_STX_E 6659 #undef GEN_STS 6660 #define GEN_ST(name, stop, opc, type) \ 6661 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 6662 #define GEN_STU(name, stop, opc, type) \ 6663 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 6664 #define GEN_STUX(name, stop, opc2, opc3, type) \ 6665 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 6666 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 6667 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 6668 #define GEN_STS(name, stop, op, type) \ 6669 GEN_ST(name, stop, op | 0x20, type) \ 6670 GEN_STU(name, stop, op | 0x21, type) \ 6671 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 6672 GEN_STX(name, stop, 0x17, op | 0x00, type) 6673 6674 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 6675 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 6676 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 6677 #if defined(TARGET_PPC64) 6678 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 6679 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 6680 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 6681 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 6682 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 6683 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 6684 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 6685 #endif 6686 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 6687 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 6688 6689 #undef GEN_CRLOGIC 6690 #define GEN_CRLOGIC(name, tcg_op, opc) \ 6691 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 6692 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 6693 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 6694 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 6695 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 6696 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 6697 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 6698 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 6699 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 6700 6701 #undef GEN_MAC_HANDLER 6702 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 6703 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 6704 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 6705 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 6706 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 6707 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 6708 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 6709 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 6710 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 6711 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 6712 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 6713 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 6714 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 6715 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 6716 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 6717 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 6718 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 6719 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 6720 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 6721 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 6722 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 6723 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 6724 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 6725 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 6726 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 6727 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 6728 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 6729 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 6730 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 6731 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 6732 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 6733 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 6734 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 6735 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 6736 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 6737 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 6738 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 6739 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 6740 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 6741 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 6742 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 6743 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 6744 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 6745 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 6746 6747 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 6748 PPC_NONE, PPC2_TM), 6749 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 6750 PPC_NONE, PPC2_TM), 6751 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 6752 PPC_NONE, PPC2_TM), 6753 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 6754 PPC_NONE, PPC2_TM), 6755 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 6756 PPC_NONE, PPC2_TM), 6757 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 6758 PPC_NONE, PPC2_TM), 6759 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 6760 PPC_NONE, PPC2_TM), 6761 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 6762 PPC_NONE, PPC2_TM), 6763 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 6764 PPC_NONE, PPC2_TM), 6765 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 6766 PPC_NONE, PPC2_TM), 6767 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 6768 PPC_NONE, PPC2_TM), 6769 6770 #include "translate/fp-ops.inc.c" 6771 6772 #include "translate/vmx-ops.inc.c" 6773 6774 #include "translate/vsx-ops.inc.c" 6775 6776 #include "translate/dfp-ops.inc.c" 6777 6778 #include "translate/spe-ops.inc.c" 6779 }; 6780 6781 #include "helper_regs.h" 6782 #include "translate_init.c" 6783 6784 /*****************************************************************************/ 6785 /* Misc PowerPC helpers */ 6786 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 6787 int flags) 6788 { 6789 #define RGPL 4 6790 #define RFPL 4 6791 6792 PowerPCCPU *cpu = POWERPC_CPU(cs); 6793 CPUPPCState *env = &cpu->env; 6794 int i; 6795 6796 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 6797 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 6798 env->nip, env->lr, env->ctr, cpu_read_xer(env), 6799 cs->cpu_index); 6800 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 6801 TARGET_FMT_lx " iidx %d didx %d\n", 6802 env->msr, env->spr[SPR_HID0], 6803 env->hflags, env->immu_idx, env->dmmu_idx); 6804 #if !defined(NO_TIMER_DUMP) 6805 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 6806 #if !defined(CONFIG_USER_ONLY) 6807 " DECR %08" PRIu32 6808 #endif 6809 "\n", 6810 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 6811 #if !defined(CONFIG_USER_ONLY) 6812 , cpu_ppc_load_decr(env) 6813 #endif 6814 ); 6815 #endif 6816 for (i = 0; i < 32; i++) { 6817 if ((i & (RGPL - 1)) == 0) 6818 cpu_fprintf(f, "GPR%02d", i); 6819 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 6820 if ((i & (RGPL - 1)) == (RGPL - 1)) 6821 cpu_fprintf(f, "\n"); 6822 } 6823 cpu_fprintf(f, "CR "); 6824 for (i = 0; i < 8; i++) 6825 cpu_fprintf(f, "%01x", env->crf[i]); 6826 cpu_fprintf(f, " ["); 6827 for (i = 0; i < 8; i++) { 6828 char a = '-'; 6829 if (env->crf[i] & 0x08) 6830 a = 'L'; 6831 else if (env->crf[i] & 0x04) 6832 a = 'G'; 6833 else if (env->crf[i] & 0x02) 6834 a = 'E'; 6835 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 6836 } 6837 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 6838 env->reserve_addr); 6839 for (i = 0; i < 32; i++) { 6840 if ((i & (RFPL - 1)) == 0) 6841 cpu_fprintf(f, "FPR%02d", i); 6842 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i])); 6843 if ((i & (RFPL - 1)) == (RFPL - 1)) 6844 cpu_fprintf(f, "\n"); 6845 } 6846 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 6847 #if !defined(CONFIG_USER_ONLY) 6848 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 6849 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 6850 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 6851 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 6852 6853 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 6854 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 6855 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 6856 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 6857 6858 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 6859 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 6860 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 6861 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 6862 6863 #if defined(TARGET_PPC64) 6864 if (env->excp_model == POWERPC_EXCP_POWER7 || 6865 env->excp_model == POWERPC_EXCP_POWER8) { 6866 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 6867 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 6868 } 6869 #endif 6870 if (env->excp_model == POWERPC_EXCP_BOOKE) { 6871 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 6872 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 6873 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 6874 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 6875 6876 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 6877 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 6878 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 6879 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 6880 6881 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 6882 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 6883 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 6884 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 6885 6886 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 6887 " EPR " TARGET_FMT_lx "\n", 6888 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 6889 env->spr[SPR_BOOKE_EPR]); 6890 6891 /* FSL-specific */ 6892 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 6893 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 6894 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 6895 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 6896 6897 /* 6898 * IVORs are left out as they are large and do not change often -- 6899 * they can be read with "p $ivor0", "p $ivor1", etc. 6900 */ 6901 } 6902 6903 #if defined(TARGET_PPC64) 6904 if (env->flags & POWERPC_FLAG_CFAR) { 6905 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 6906 } 6907 #endif 6908 6909 switch (env->mmu_model) { 6910 case POWERPC_MMU_32B: 6911 case POWERPC_MMU_601: 6912 case POWERPC_MMU_SOFT_6xx: 6913 case POWERPC_MMU_SOFT_74xx: 6914 #if defined(TARGET_PPC64) 6915 case POWERPC_MMU_64B: 6916 case POWERPC_MMU_2_03: 6917 case POWERPC_MMU_2_06: 6918 case POWERPC_MMU_2_06a: 6919 case POWERPC_MMU_2_07: 6920 case POWERPC_MMU_2_07a: 6921 #endif 6922 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " DAR " TARGET_FMT_lx 6923 " DSISR " TARGET_FMT_lx "\n", env->spr[SPR_SDR1], 6924 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 6925 break; 6926 case POWERPC_MMU_BOOKE206: 6927 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 6928 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 6929 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 6930 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 6931 6932 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 6933 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 6934 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 6935 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 6936 6937 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 6938 " TLB1CFG " TARGET_FMT_lx "\n", 6939 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 6940 env->spr[SPR_BOOKE_TLB1CFG]); 6941 break; 6942 default: 6943 break; 6944 } 6945 #endif 6946 6947 #undef RGPL 6948 #undef RFPL 6949 } 6950 6951 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 6952 fprintf_function cpu_fprintf, int flags) 6953 { 6954 #if defined(DO_PPC_STATISTICS) 6955 PowerPCCPU *cpu = POWERPC_CPU(cs); 6956 opc_handler_t **t1, **t2, **t3, *handler; 6957 int op1, op2, op3; 6958 6959 t1 = cpu->env.opcodes; 6960 for (op1 = 0; op1 < 64; op1++) { 6961 handler = t1[op1]; 6962 if (is_indirect_opcode(handler)) { 6963 t2 = ind_table(handler); 6964 for (op2 = 0; op2 < 32; op2++) { 6965 handler = t2[op2]; 6966 if (is_indirect_opcode(handler)) { 6967 t3 = ind_table(handler); 6968 for (op3 = 0; op3 < 32; op3++) { 6969 handler = t3[op3]; 6970 if (handler->count == 0) 6971 continue; 6972 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 6973 "%016" PRIx64 " %" PRId64 "\n", 6974 op1, op2, op3, op1, (op3 << 5) | op2, 6975 handler->oname, 6976 handler->count, handler->count); 6977 } 6978 } else { 6979 if (handler->count == 0) 6980 continue; 6981 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 6982 "%016" PRIx64 " %" PRId64 "\n", 6983 op1, op2, op1, op2, handler->oname, 6984 handler->count, handler->count); 6985 } 6986 } 6987 } else { 6988 if (handler->count == 0) 6989 continue; 6990 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 6991 " %" PRId64 "\n", 6992 op1, op1, handler->oname, 6993 handler->count, handler->count); 6994 } 6995 } 6996 #endif 6997 } 6998 6999 /*****************************************************************************/ 7000 void gen_intermediate_code(CPUPPCState *env, struct TranslationBlock *tb) 7001 { 7002 PowerPCCPU *cpu = ppc_env_get_cpu(env); 7003 CPUState *cs = CPU(cpu); 7004 DisasContext ctx, *ctxp = &ctx; 7005 opc_handler_t **table, *handler; 7006 target_ulong pc_start; 7007 int num_insns; 7008 int max_insns; 7009 7010 pc_start = tb->pc; 7011 ctx.nip = pc_start; 7012 ctx.tb = tb; 7013 ctx.exception = POWERPC_EXCP_NONE; 7014 ctx.spr_cb = env->spr_cb; 7015 ctx.pr = msr_pr; 7016 ctx.mem_idx = env->dmmu_idx; 7017 ctx.dr = msr_dr; 7018 #if !defined(CONFIG_USER_ONLY) 7019 ctx.hv = msr_hv || !env->has_hv_mode; 7020 #endif 7021 ctx.insns_flags = env->insns_flags; 7022 ctx.insns_flags2 = env->insns_flags2; 7023 ctx.access_type = -1; 7024 ctx.need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7025 ctx.le_mode = !!(env->hflags & (1 << MSR_LE)); 7026 ctx.default_tcg_memop_mask = ctx.le_mode ? MO_LE : MO_BE; 7027 #if defined(TARGET_PPC64) 7028 ctx.sf_mode = msr_is_64bit(env, env->msr); 7029 ctx.has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7030 #endif 7031 if (env->mmu_model == POWERPC_MMU_32B || 7032 env->mmu_model == POWERPC_MMU_601 || 7033 (env->mmu_model & POWERPC_MMU_64B)) 7034 ctx.lazy_tlb_flush = true; 7035 7036 ctx.fpu_enabled = !!msr_fp; 7037 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7038 ctx.spe_enabled = !!msr_spe; 7039 else 7040 ctx.spe_enabled = false; 7041 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7042 ctx.altivec_enabled = !!msr_vr; 7043 else 7044 ctx.altivec_enabled = false; 7045 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7046 ctx.vsx_enabled = !!msr_vsx; 7047 } else { 7048 ctx.vsx_enabled = false; 7049 } 7050 #if defined(TARGET_PPC64) 7051 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7052 ctx.tm_enabled = !!msr_tm; 7053 } else { 7054 ctx.tm_enabled = false; 7055 } 7056 #endif 7057 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7058 ctx.singlestep_enabled = CPU_SINGLE_STEP; 7059 else 7060 ctx.singlestep_enabled = 0; 7061 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7062 ctx.singlestep_enabled |= CPU_BRANCH_STEP; 7063 if (unlikely(cs->singlestep_enabled)) { 7064 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7065 } 7066 #if defined (DO_SINGLE_STEP) && 0 7067 /* Single step trace mode */ 7068 msr_se = 1; 7069 #endif 7070 num_insns = 0; 7071 max_insns = tb->cflags & CF_COUNT_MASK; 7072 if (max_insns == 0) { 7073 max_insns = CF_COUNT_MASK; 7074 } 7075 if (max_insns > TCG_MAX_INSNS) { 7076 max_insns = TCG_MAX_INSNS; 7077 } 7078 7079 gen_tb_start(tb); 7080 tcg_clear_temp_count(); 7081 /* Set env in case of segfault during code fetch */ 7082 while (ctx.exception == POWERPC_EXCP_NONE && !tcg_op_buf_full()) { 7083 tcg_gen_insn_start(ctx.nip); 7084 num_insns++; 7085 7086 if (unlikely(cpu_breakpoint_test(cs, ctx.nip, BP_ANY))) { 7087 gen_debug_exception(ctxp); 7088 /* The address covered by the breakpoint must be included in 7089 [tb->pc, tb->pc + tb->size) in order to for it to be 7090 properly cleared -- thus we increment the PC here so that 7091 the logic setting tb->size below does the right thing. */ 7092 ctx.nip += 4; 7093 break; 7094 } 7095 7096 LOG_DISAS("----------------\n"); 7097 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7098 ctx.nip, ctx.mem_idx, (int)msr_ir); 7099 if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) 7100 gen_io_start(); 7101 if (unlikely(need_byteswap(&ctx))) { 7102 ctx.opcode = bswap32(cpu_ldl_code(env, ctx.nip)); 7103 } else { 7104 ctx.opcode = cpu_ldl_code(env, ctx.nip); 7105 } 7106 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7107 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode), 7108 opc3(ctx.opcode), opc4(ctx.opcode), 7109 ctx.le_mode ? "little" : "big"); 7110 ctx.nip += 4; 7111 table = env->opcodes; 7112 handler = table[opc1(ctx.opcode)]; 7113 if (is_indirect_opcode(handler)) { 7114 table = ind_table(handler); 7115 handler = table[opc2(ctx.opcode)]; 7116 if (is_indirect_opcode(handler)) { 7117 table = ind_table(handler); 7118 handler = table[opc3(ctx.opcode)]; 7119 if (is_indirect_opcode(handler)) { 7120 table = ind_table(handler); 7121 handler = table[opc4(ctx.opcode)]; 7122 } 7123 } 7124 } 7125 /* Is opcode *REALLY* valid ? */ 7126 if (unlikely(handler->handler == &gen_invalid)) { 7127 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7128 "%02x - %02x - %02x - %02x (%08x) " 7129 TARGET_FMT_lx " %d\n", 7130 opc1(ctx.opcode), opc2(ctx.opcode), 7131 opc3(ctx.opcode), opc4(ctx.opcode), 7132 ctx.opcode, ctx.nip - 4, (int)msr_ir); 7133 } else { 7134 uint32_t inval; 7135 7136 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) && Rc(ctx.opcode))) { 7137 inval = handler->inval2; 7138 } else { 7139 inval = handler->inval1; 7140 } 7141 7142 if (unlikely((ctx.opcode & inval) != 0)) { 7143 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7144 "%02x - %02x - %02x - %02x (%08x) " 7145 TARGET_FMT_lx "\n", ctx.opcode & inval, 7146 opc1(ctx.opcode), opc2(ctx.opcode), 7147 opc3(ctx.opcode), opc4(ctx.opcode), 7148 ctx.opcode, ctx.nip - 4); 7149 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL); 7150 break; 7151 } 7152 } 7153 (*(handler->handler))(&ctx); 7154 #if defined(DO_PPC_STATISTICS) 7155 handler->count++; 7156 #endif 7157 /* Check trace mode exceptions */ 7158 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP && 7159 (ctx.nip <= 0x100 || ctx.nip > 0xF00) && 7160 ctx.exception != POWERPC_SYSCALL && 7161 ctx.exception != POWERPC_EXCP_TRAP && 7162 ctx.exception != POWERPC_EXCP_BRANCH)) { 7163 gen_exception_nip(ctxp, POWERPC_EXCP_TRACE, ctx.nip); 7164 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) || 7165 (cs->singlestep_enabled) || 7166 singlestep || 7167 num_insns >= max_insns)) { 7168 /* if we reach a page boundary or are single stepping, stop 7169 * generation 7170 */ 7171 break; 7172 } 7173 if (tcg_check_temp_count()) { 7174 fprintf(stderr, "Opcode %02x %02x %02x %02x (%08x) leaked " 7175 "temporaries\n", opc1(ctx.opcode), opc2(ctx.opcode), 7176 opc3(ctx.opcode), opc4(ctx.opcode), ctx.opcode); 7177 exit(1); 7178 } 7179 } 7180 if (tb->cflags & CF_LAST_IO) 7181 gen_io_end(); 7182 if (ctx.exception == POWERPC_EXCP_NONE) { 7183 gen_goto_tb(&ctx, 0, ctx.nip); 7184 } else if (ctx.exception != POWERPC_EXCP_BRANCH) { 7185 if (unlikely(cs->singlestep_enabled)) { 7186 gen_debug_exception(ctxp); 7187 } 7188 /* Generate the return instruction */ 7189 tcg_gen_exit_tb(0); 7190 } 7191 gen_tb_end(tb, num_insns); 7192 7193 tb->size = ctx.nip - pc_start; 7194 tb->icount = num_insns; 7195 7196 #if defined(DEBUG_DISAS) 7197 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM) 7198 && qemu_log_in_addr_range(pc_start)) { 7199 int flags; 7200 flags = env->bfd_mach; 7201 flags |= ctx.le_mode << 16; 7202 qemu_log_lock(); 7203 qemu_log("IN: %s\n", lookup_symbol(pc_start)); 7204 log_target_disas(cs, pc_start, ctx.nip - pc_start, flags); 7205 qemu_log("\n"); 7206 qemu_log_unlock(); 7207 } 7208 #endif 7209 } 7210 7211 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7212 target_ulong *data) 7213 { 7214 env->nip = data[0]; 7215 } 7216