1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg-op.h" 27 #include "qemu/host-utils.h" 28 #include "exec/cpu_ldst.h" 29 30 #include "exec/helper-proto.h" 31 #include "exec/helper-gen.h" 32 33 #include "trace-tcg.h" 34 #include "exec/log.h" 35 36 37 #define CPU_SINGLE_STEP 0x1 38 #define CPU_BRANCH_STEP 0x2 39 #define GDBSTUB_SINGLE_STEP 0x4 40 41 /* Include definitions for instructions classes and implementations flags */ 42 //#define PPC_DEBUG_DISAS 43 //#define DO_PPC_STATISTICS 44 45 #ifdef PPC_DEBUG_DISAS 46 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 47 #else 48 # define LOG_DISAS(...) do { } while (0) 49 #endif 50 /*****************************************************************************/ 51 /* Code translation helpers */ 52 53 /* global register indexes */ 54 static TCGv_env cpu_env; 55 static char cpu_reg_names[10*3 + 22*4 /* GPR */ 56 + 10*4 + 22*5 /* SPE GPRh */ 57 + 10*4 + 22*5 /* FPR */ 58 + 2*(10*6 + 22*7) /* AVRh, AVRl */ 59 + 10*5 + 22*6 /* VSR */ 60 + 8*5 /* CRF */]; 61 static TCGv cpu_gpr[32]; 62 static TCGv cpu_gprh[32]; 63 static TCGv_i64 cpu_fpr[32]; 64 static TCGv_i64 cpu_avrh[32], cpu_avrl[32]; 65 static TCGv_i64 cpu_vsr[32]; 66 static TCGv_i32 cpu_crf[8]; 67 static TCGv cpu_nip; 68 static TCGv cpu_msr; 69 static TCGv cpu_ctr; 70 static TCGv cpu_lr; 71 #if defined(TARGET_PPC64) 72 static TCGv cpu_cfar; 73 #endif 74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca; 75 static TCGv cpu_reserve; 76 static TCGv cpu_fpscr; 77 static TCGv_i32 cpu_access_type; 78 79 #include "exec/gen-icount.h" 80 81 void ppc_translate_init(void) 82 { 83 int i; 84 char* p; 85 size_t cpu_reg_names_size; 86 static int done_init = 0; 87 88 if (done_init) 89 return; 90 91 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env"); 92 tcg_ctx.tcg_env = cpu_env; 93 94 p = cpu_reg_names; 95 cpu_reg_names_size = sizeof(cpu_reg_names); 96 97 for (i = 0; i < 8; i++) { 98 snprintf(p, cpu_reg_names_size, "crf%d", i); 99 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 100 offsetof(CPUPPCState, crf[i]), p); 101 p += 5; 102 cpu_reg_names_size -= 5; 103 } 104 105 for (i = 0; i < 32; i++) { 106 snprintf(p, cpu_reg_names_size, "r%d", i); 107 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 108 offsetof(CPUPPCState, gpr[i]), p); 109 p += (i < 10) ? 3 : 4; 110 cpu_reg_names_size -= (i < 10) ? 3 : 4; 111 snprintf(p, cpu_reg_names_size, "r%dH", i); 112 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 113 offsetof(CPUPPCState, gprh[i]), p); 114 p += (i < 10) ? 4 : 5; 115 cpu_reg_names_size -= (i < 10) ? 4 : 5; 116 117 snprintf(p, cpu_reg_names_size, "fp%d", i); 118 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env, 119 offsetof(CPUPPCState, fpr[i]), p); 120 p += (i < 10) ? 4 : 5; 121 cpu_reg_names_size -= (i < 10) ? 4 : 5; 122 123 snprintf(p, cpu_reg_names_size, "avr%dH", i); 124 #ifdef HOST_WORDS_BIGENDIAN 125 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 126 offsetof(CPUPPCState, avr[i].u64[0]), p); 127 #else 128 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env, 129 offsetof(CPUPPCState, avr[i].u64[1]), p); 130 #endif 131 p += (i < 10) ? 6 : 7; 132 cpu_reg_names_size -= (i < 10) ? 6 : 7; 133 134 snprintf(p, cpu_reg_names_size, "avr%dL", i); 135 #ifdef HOST_WORDS_BIGENDIAN 136 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 137 offsetof(CPUPPCState, avr[i].u64[1]), p); 138 #else 139 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env, 140 offsetof(CPUPPCState, avr[i].u64[0]), p); 141 #endif 142 p += (i < 10) ? 6 : 7; 143 cpu_reg_names_size -= (i < 10) ? 6 : 7; 144 snprintf(p, cpu_reg_names_size, "vsr%d", i); 145 cpu_vsr[i] = tcg_global_mem_new_i64(cpu_env, 146 offsetof(CPUPPCState, vsr[i]), p); 147 p += (i < 10) ? 5 : 6; 148 cpu_reg_names_size -= (i < 10) ? 5 : 6; 149 } 150 151 cpu_nip = tcg_global_mem_new(cpu_env, 152 offsetof(CPUPPCState, nip), "nip"); 153 154 cpu_msr = tcg_global_mem_new(cpu_env, 155 offsetof(CPUPPCState, msr), "msr"); 156 157 cpu_ctr = tcg_global_mem_new(cpu_env, 158 offsetof(CPUPPCState, ctr), "ctr"); 159 160 cpu_lr = tcg_global_mem_new(cpu_env, 161 offsetof(CPUPPCState, lr), "lr"); 162 163 #if defined(TARGET_PPC64) 164 cpu_cfar = tcg_global_mem_new(cpu_env, 165 offsetof(CPUPPCState, cfar), "cfar"); 166 #endif 167 168 cpu_xer = tcg_global_mem_new(cpu_env, 169 offsetof(CPUPPCState, xer), "xer"); 170 cpu_so = tcg_global_mem_new(cpu_env, 171 offsetof(CPUPPCState, so), "SO"); 172 cpu_ov = tcg_global_mem_new(cpu_env, 173 offsetof(CPUPPCState, ov), "OV"); 174 cpu_ca = tcg_global_mem_new(cpu_env, 175 offsetof(CPUPPCState, ca), "CA"); 176 177 cpu_reserve = tcg_global_mem_new(cpu_env, 178 offsetof(CPUPPCState, reserve_addr), 179 "reserve_addr"); 180 181 cpu_fpscr = tcg_global_mem_new(cpu_env, 182 offsetof(CPUPPCState, fpscr), "fpscr"); 183 184 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 185 offsetof(CPUPPCState, access_type), "access_type"); 186 187 done_init = 1; 188 } 189 190 /* internal defines */ 191 struct DisasContext { 192 struct TranslationBlock *tb; 193 target_ulong nip; 194 uint32_t opcode; 195 uint32_t exception; 196 /* Routine used to access memory */ 197 bool pr, hv, dr, le_mode; 198 bool lazy_tlb_flush; 199 bool need_access_type; 200 int mem_idx; 201 int access_type; 202 /* Translation flags */ 203 TCGMemOp default_tcg_memop_mask; 204 #if defined(TARGET_PPC64) 205 bool sf_mode; 206 bool has_cfar; 207 #endif 208 bool fpu_enabled; 209 bool altivec_enabled; 210 bool vsx_enabled; 211 bool spe_enabled; 212 bool tm_enabled; 213 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 214 int singlestep_enabled; 215 uint64_t insns_flags; 216 uint64_t insns_flags2; 217 }; 218 219 /* Return true iff byteswap is needed in a scalar memop */ 220 static inline bool need_byteswap(const DisasContext *ctx) 221 { 222 #if defined(TARGET_WORDS_BIGENDIAN) 223 return ctx->le_mode; 224 #else 225 return !ctx->le_mode; 226 #endif 227 } 228 229 /* True when active word size < size of target_long. */ 230 #ifdef TARGET_PPC64 231 # define NARROW_MODE(C) (!(C)->sf_mode) 232 #else 233 # define NARROW_MODE(C) 0 234 #endif 235 236 struct opc_handler_t { 237 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 238 uint32_t inval1; 239 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 240 uint32_t inval2; 241 /* instruction type */ 242 uint64_t type; 243 /* extended instruction type */ 244 uint64_t type2; 245 /* handler */ 246 void (*handler)(DisasContext *ctx); 247 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) 248 const char *oname; 249 #endif 250 #if defined(DO_PPC_STATISTICS) 251 uint64_t count; 252 #endif 253 }; 254 255 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 256 { 257 if (ctx->need_access_type && ctx->access_type != access_type) { 258 tcg_gen_movi_i32(cpu_access_type, access_type); 259 ctx->access_type = access_type; 260 } 261 } 262 263 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 264 { 265 if (NARROW_MODE(ctx)) { 266 nip = (uint32_t)nip; 267 } 268 tcg_gen_movi_tl(cpu_nip, nip); 269 } 270 271 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 272 { 273 TCGv_i32 t0, t1; 274 275 /* These are all synchronous exceptions, we set the PC back to 276 * the faulting instruction 277 */ 278 if (ctx->exception == POWERPC_EXCP_NONE) { 279 gen_update_nip(ctx, ctx->nip - 4); 280 } 281 t0 = tcg_const_i32(excp); 282 t1 = tcg_const_i32(error); 283 gen_helper_raise_exception_err(cpu_env, t0, t1); 284 tcg_temp_free_i32(t0); 285 tcg_temp_free_i32(t1); 286 ctx->exception = (excp); 287 } 288 289 static void gen_exception(DisasContext *ctx, uint32_t excp) 290 { 291 TCGv_i32 t0; 292 293 /* These are all synchronous exceptions, we set the PC back to 294 * the faulting instruction 295 */ 296 if (ctx->exception == POWERPC_EXCP_NONE) { 297 gen_update_nip(ctx, ctx->nip - 4); 298 } 299 t0 = tcg_const_i32(excp); 300 gen_helper_raise_exception(cpu_env, t0); 301 tcg_temp_free_i32(t0); 302 ctx->exception = (excp); 303 } 304 305 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 306 target_ulong nip) 307 { 308 TCGv_i32 t0; 309 310 gen_update_nip(ctx, nip); 311 t0 = tcg_const_i32(excp); 312 gen_helper_raise_exception(cpu_env, t0); 313 tcg_temp_free_i32(t0); 314 ctx->exception = (excp); 315 } 316 317 static void gen_debug_exception(DisasContext *ctx) 318 { 319 TCGv_i32 t0; 320 321 /* These are all synchronous exceptions, we set the PC back to 322 * the faulting instruction 323 */ 324 if ((ctx->exception != POWERPC_EXCP_BRANCH) && 325 (ctx->exception != POWERPC_EXCP_SYNC)) { 326 gen_update_nip(ctx, ctx->nip); 327 } 328 t0 = tcg_const_i32(EXCP_DEBUG); 329 gen_helper_raise_exception(cpu_env, t0); 330 tcg_temp_free_i32(t0); 331 } 332 333 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 334 { 335 /* Will be converted to program check if needed */ 336 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 337 } 338 339 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 340 { 341 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 342 } 343 344 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 345 { 346 /* Will be converted to program check if needed */ 347 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 348 } 349 350 /* Stop translation */ 351 static inline void gen_stop_exception(DisasContext *ctx) 352 { 353 gen_update_nip(ctx, ctx->nip); 354 ctx->exception = POWERPC_EXCP_STOP; 355 } 356 357 #ifndef CONFIG_USER_ONLY 358 /* No need to update nip here, as execution flow will change */ 359 static inline void gen_sync_exception(DisasContext *ctx) 360 { 361 ctx->exception = POWERPC_EXCP_SYNC; 362 } 363 #endif 364 365 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 366 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 367 368 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 369 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 370 371 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 372 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 373 374 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 375 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 376 377 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 378 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 379 380 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 381 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 382 383 typedef struct opcode_t { 384 unsigned char opc1, opc2, opc3, opc4; 385 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 386 unsigned char pad[4]; 387 #endif 388 opc_handler_t handler; 389 const char *oname; 390 } opcode_t; 391 392 /* Helpers for priv. check */ 393 #define GEN_PRIV \ 394 do { \ 395 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 396 } while (0) 397 398 #if defined(CONFIG_USER_ONLY) 399 #define CHK_HV GEN_PRIV 400 #define CHK_SV GEN_PRIV 401 #define CHK_HVRM GEN_PRIV 402 #else 403 #define CHK_HV \ 404 do { \ 405 if (unlikely(ctx->pr || !ctx->hv)) { \ 406 GEN_PRIV; \ 407 } \ 408 } while (0) 409 #define CHK_SV \ 410 do { \ 411 if (unlikely(ctx->pr)) { \ 412 GEN_PRIV; \ 413 } \ 414 } while (0) 415 #define CHK_HVRM \ 416 do { \ 417 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 418 GEN_PRIV; \ 419 } \ 420 } while (0) 421 #endif 422 423 #define CHK_NONE 424 425 426 /*****************************************************************************/ 427 /*** Instruction decoding ***/ 428 #define EXTRACT_HELPER(name, shift, nb) \ 429 static inline uint32_t name(uint32_t opcode) \ 430 { \ 431 return (opcode >> (shift)) & ((1 << (nb)) - 1); \ 432 } 433 434 #define EXTRACT_SHELPER(name, shift, nb) \ 435 static inline int32_t name(uint32_t opcode) \ 436 { \ 437 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \ 438 } 439 440 #define EXTRACT_HELPER_SPLIT(name, shift1, nb1, shift2, nb2) \ 441 static inline uint32_t name(uint32_t opcode) \ 442 { \ 443 return (((opcode >> (shift1)) & ((1 << (nb1)) - 1)) << nb2) | \ 444 ((opcode >> (shift2)) & ((1 << (nb2)) - 1)); \ 445 } 446 447 #define EXTRACT_HELPER_DXFORM(name, \ 448 d0_bits, shift_op_d0, shift_d0, \ 449 d1_bits, shift_op_d1, shift_d1, \ 450 d2_bits, shift_op_d2, shift_d2) \ 451 static inline int16_t name(uint32_t opcode) \ 452 { \ 453 return \ 454 (((opcode >> (shift_op_d0)) & ((1 << (d0_bits)) - 1)) << (shift_d0)) | \ 455 (((opcode >> (shift_op_d1)) & ((1 << (d1_bits)) - 1)) << (shift_d1)) | \ 456 (((opcode >> (shift_op_d2)) & ((1 << (d2_bits)) - 1)) << (shift_d2)); \ 457 } 458 459 460 /* Opcode part 1 */ 461 EXTRACT_HELPER(opc1, 26, 6); 462 /* Opcode part 2 */ 463 EXTRACT_HELPER(opc2, 1, 5); 464 /* Opcode part 3 */ 465 EXTRACT_HELPER(opc3, 6, 5); 466 /* Opcode part 4 */ 467 EXTRACT_HELPER(opc4, 16, 5); 468 /* Update Cr0 flags */ 469 EXTRACT_HELPER(Rc, 0, 1); 470 /* Update Cr6 flags (Altivec) */ 471 EXTRACT_HELPER(Rc21, 10, 1); 472 /* Destination */ 473 EXTRACT_HELPER(rD, 21, 5); 474 /* Source */ 475 EXTRACT_HELPER(rS, 21, 5); 476 /* First operand */ 477 EXTRACT_HELPER(rA, 16, 5); 478 /* Second operand */ 479 EXTRACT_HELPER(rB, 11, 5); 480 /* Third operand */ 481 EXTRACT_HELPER(rC, 6, 5); 482 /*** Get CRn ***/ 483 EXTRACT_HELPER(crfD, 23, 3); 484 EXTRACT_HELPER(crfS, 18, 3); 485 EXTRACT_HELPER(crbD, 21, 5); 486 EXTRACT_HELPER(crbA, 16, 5); 487 EXTRACT_HELPER(crbB, 11, 5); 488 /* SPR / TBL */ 489 EXTRACT_HELPER(_SPR, 11, 10); 490 static inline uint32_t SPR(uint32_t opcode) 491 { 492 uint32_t sprn = _SPR(opcode); 493 494 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 495 } 496 /*** Get constants ***/ 497 /* 16 bits signed immediate value */ 498 EXTRACT_SHELPER(SIMM, 0, 16); 499 /* 16 bits unsigned immediate value */ 500 EXTRACT_HELPER(UIMM, 0, 16); 501 /* 5 bits signed immediate value */ 502 EXTRACT_HELPER(SIMM5, 16, 5); 503 /* 5 bits signed immediate value */ 504 EXTRACT_HELPER(UIMM5, 16, 5); 505 /* 4 bits unsigned immediate value */ 506 EXTRACT_HELPER(UIMM4, 16, 4); 507 /* Bit count */ 508 EXTRACT_HELPER(NB, 11, 5); 509 /* Shift count */ 510 EXTRACT_HELPER(SH, 11, 5); 511 /* Vector shift count */ 512 EXTRACT_HELPER(VSH, 6, 4); 513 /* Mask start */ 514 EXTRACT_HELPER(MB, 6, 5); 515 /* Mask end */ 516 EXTRACT_HELPER(ME, 1, 5); 517 /* Trap operand */ 518 EXTRACT_HELPER(TO, 21, 5); 519 520 EXTRACT_HELPER(CRM, 12, 8); 521 522 #ifndef CONFIG_USER_ONLY 523 EXTRACT_HELPER(SR, 16, 4); 524 #endif 525 526 /* mtfsf/mtfsfi */ 527 EXTRACT_HELPER(FPBF, 23, 3); 528 EXTRACT_HELPER(FPIMM, 12, 4); 529 EXTRACT_HELPER(FPL, 25, 1); 530 EXTRACT_HELPER(FPFLM, 17, 8); 531 EXTRACT_HELPER(FPW, 16, 1); 532 533 /* addpcis */ 534 EXTRACT_HELPER_DXFORM(DX, 10, 6, 6, 5, 16, 1, 1, 0, 0) 535 #if defined(TARGET_PPC64) 536 /* darn */ 537 EXTRACT_HELPER(L, 16, 2); 538 #endif 539 540 /*** Jump target decoding ***/ 541 /* Immediate address */ 542 static inline target_ulong LI(uint32_t opcode) 543 { 544 return (opcode >> 0) & 0x03FFFFFC; 545 } 546 547 static inline uint32_t BD(uint32_t opcode) 548 { 549 return (opcode >> 0) & 0xFFFC; 550 } 551 552 EXTRACT_HELPER(BO, 21, 5); 553 EXTRACT_HELPER(BI, 16, 5); 554 /* Absolute/relative address */ 555 EXTRACT_HELPER(AA, 1, 1); 556 /* Link */ 557 EXTRACT_HELPER(LK, 0, 1); 558 559 /* DFP Z22-form */ 560 EXTRACT_HELPER(DCM, 10, 6) 561 562 /* DFP Z23-form */ 563 EXTRACT_HELPER(RMC, 9, 2) 564 565 EXTRACT_HELPER_SPLIT(xT, 0, 1, 21, 5); 566 EXTRACT_HELPER_SPLIT(xS, 0, 1, 21, 5); 567 EXTRACT_HELPER_SPLIT(xA, 2, 1, 16, 5); 568 EXTRACT_HELPER_SPLIT(xB, 1, 1, 11, 5); 569 EXTRACT_HELPER_SPLIT(xC, 3, 1, 6, 5); 570 EXTRACT_HELPER(DM, 8, 2); 571 EXTRACT_HELPER(UIM, 16, 2); 572 EXTRACT_HELPER(SHW, 8, 2); 573 EXTRACT_HELPER(SP, 19, 2); 574 EXTRACT_HELPER(IMM8, 11, 8); 575 576 /*****************************************************************************/ 577 /* PowerPC instructions table */ 578 579 #if defined(DO_PPC_STATISTICS) 580 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 581 { \ 582 .opc1 = op1, \ 583 .opc2 = op2, \ 584 .opc3 = op3, \ 585 .opc4 = 0xff, \ 586 .handler = { \ 587 .inval1 = invl, \ 588 .type = _typ, \ 589 .type2 = _typ2, \ 590 .handler = &gen_##name, \ 591 .oname = stringify(name), \ 592 }, \ 593 .oname = stringify(name), \ 594 } 595 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 596 { \ 597 .opc1 = op1, \ 598 .opc2 = op2, \ 599 .opc3 = op3, \ 600 .opc4 = 0xff, \ 601 .handler = { \ 602 .inval1 = invl1, \ 603 .inval2 = invl2, \ 604 .type = _typ, \ 605 .type2 = _typ2, \ 606 .handler = &gen_##name, \ 607 .oname = stringify(name), \ 608 }, \ 609 .oname = stringify(name), \ 610 } 611 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 612 { \ 613 .opc1 = op1, \ 614 .opc2 = op2, \ 615 .opc3 = op3, \ 616 .opc4 = 0xff, \ 617 .handler = { \ 618 .inval1 = invl, \ 619 .type = _typ, \ 620 .type2 = _typ2, \ 621 .handler = &gen_##name, \ 622 .oname = onam, \ 623 }, \ 624 .oname = onam, \ 625 } 626 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 627 { \ 628 .opc1 = op1, \ 629 .opc2 = op2, \ 630 .opc3 = op3, \ 631 .opc4 = op4, \ 632 .handler = { \ 633 .inval1 = invl, \ 634 .type = _typ, \ 635 .type2 = _typ2, \ 636 .handler = &gen_##name, \ 637 .oname = stringify(name), \ 638 }, \ 639 .oname = stringify(name), \ 640 } 641 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 642 { \ 643 .opc1 = op1, \ 644 .opc2 = op2, \ 645 .opc3 = op3, \ 646 .opc4 = op4, \ 647 .handler = { \ 648 .inval1 = invl, \ 649 .type = _typ, \ 650 .type2 = _typ2, \ 651 .handler = &gen_##name, \ 652 .oname = onam, \ 653 }, \ 654 .oname = onam, \ 655 } 656 #else 657 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 658 { \ 659 .opc1 = op1, \ 660 .opc2 = op2, \ 661 .opc3 = op3, \ 662 .opc4 = 0xff, \ 663 .handler = { \ 664 .inval1 = invl, \ 665 .type = _typ, \ 666 .type2 = _typ2, \ 667 .handler = &gen_##name, \ 668 }, \ 669 .oname = stringify(name), \ 670 } 671 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 672 { \ 673 .opc1 = op1, \ 674 .opc2 = op2, \ 675 .opc3 = op3, \ 676 .opc4 = 0xff, \ 677 .handler = { \ 678 .inval1 = invl1, \ 679 .inval2 = invl2, \ 680 .type = _typ, \ 681 .type2 = _typ2, \ 682 .handler = &gen_##name, \ 683 }, \ 684 .oname = stringify(name), \ 685 } 686 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 687 { \ 688 .opc1 = op1, \ 689 .opc2 = op2, \ 690 .opc3 = op3, \ 691 .opc4 = 0xff, \ 692 .handler = { \ 693 .inval1 = invl, \ 694 .type = _typ, \ 695 .type2 = _typ2, \ 696 .handler = &gen_##name, \ 697 }, \ 698 .oname = onam, \ 699 } 700 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 701 { \ 702 .opc1 = op1, \ 703 .opc2 = op2, \ 704 .opc3 = op3, \ 705 .opc4 = op4, \ 706 .handler = { \ 707 .inval1 = invl, \ 708 .type = _typ, \ 709 .type2 = _typ2, \ 710 .handler = &gen_##name, \ 711 }, \ 712 .oname = stringify(name), \ 713 } 714 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 715 { \ 716 .opc1 = op1, \ 717 .opc2 = op2, \ 718 .opc3 = op3, \ 719 .opc4 = op4, \ 720 .handler = { \ 721 .inval1 = invl, \ 722 .type = _typ, \ 723 .type2 = _typ2, \ 724 .handler = &gen_##name, \ 725 }, \ 726 .oname = onam, \ 727 } 728 #endif 729 730 /* SPR load/store helpers */ 731 static inline void gen_load_spr(TCGv t, int reg) 732 { 733 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 734 } 735 736 static inline void gen_store_spr(int reg, TCGv t) 737 { 738 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 739 } 740 741 /* Invalid instruction */ 742 static void gen_invalid(DisasContext *ctx) 743 { 744 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 745 } 746 747 static opc_handler_t invalid_handler = { 748 .inval1 = 0xFFFFFFFF, 749 .inval2 = 0xFFFFFFFF, 750 .type = PPC_NONE, 751 .type2 = PPC_NONE, 752 .handler = gen_invalid, 753 }; 754 755 /*** Integer comparison ***/ 756 757 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 758 { 759 TCGv t0 = tcg_temp_new(); 760 TCGv_i32 t1 = tcg_temp_new_i32(); 761 762 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 763 764 tcg_gen_setcond_tl((s ? TCG_COND_LT: TCG_COND_LTU), t0, arg0, arg1); 765 tcg_gen_trunc_tl_i32(t1, t0); 766 tcg_gen_shli_i32(t1, t1, CRF_LT); 767 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 768 769 tcg_gen_setcond_tl((s ? TCG_COND_GT: TCG_COND_GTU), t0, arg0, arg1); 770 tcg_gen_trunc_tl_i32(t1, t0); 771 tcg_gen_shli_i32(t1, t1, CRF_GT); 772 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 773 774 tcg_gen_setcond_tl(TCG_COND_EQ, t0, arg0, arg1); 775 tcg_gen_trunc_tl_i32(t1, t0); 776 tcg_gen_shli_i32(t1, t1, CRF_EQ); 777 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1); 778 779 tcg_temp_free(t0); 780 tcg_temp_free_i32(t1); 781 } 782 783 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 784 { 785 TCGv t0 = tcg_const_tl(arg1); 786 gen_op_cmp(arg0, t0, s, crf); 787 tcg_temp_free(t0); 788 } 789 790 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 791 { 792 TCGv t0, t1; 793 t0 = tcg_temp_new(); 794 t1 = tcg_temp_new(); 795 if (s) { 796 tcg_gen_ext32s_tl(t0, arg0); 797 tcg_gen_ext32s_tl(t1, arg1); 798 } else { 799 tcg_gen_ext32u_tl(t0, arg0); 800 tcg_gen_ext32u_tl(t1, arg1); 801 } 802 gen_op_cmp(t0, t1, s, crf); 803 tcg_temp_free(t1); 804 tcg_temp_free(t0); 805 } 806 807 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 808 { 809 TCGv t0 = tcg_const_tl(arg1); 810 gen_op_cmp32(arg0, t0, s, crf); 811 tcg_temp_free(t0); 812 } 813 814 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 815 { 816 if (NARROW_MODE(ctx)) { 817 gen_op_cmpi32(reg, 0, 1, 0); 818 } else { 819 gen_op_cmpi(reg, 0, 1, 0); 820 } 821 } 822 823 /* cmp */ 824 static void gen_cmp(DisasContext *ctx) 825 { 826 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 827 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 828 1, crfD(ctx->opcode)); 829 } else { 830 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 831 1, crfD(ctx->opcode)); 832 } 833 } 834 835 /* cmpi */ 836 static void gen_cmpi(DisasContext *ctx) 837 { 838 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 839 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 840 1, crfD(ctx->opcode)); 841 } else { 842 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), 843 1, crfD(ctx->opcode)); 844 } 845 } 846 847 /* cmpl */ 848 static void gen_cmpl(DisasContext *ctx) 849 { 850 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 851 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 852 0, crfD(ctx->opcode)); 853 } else { 854 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 855 0, crfD(ctx->opcode)); 856 } 857 } 858 859 /* cmpli */ 860 static void gen_cmpli(DisasContext *ctx) 861 { 862 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) { 863 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 864 0, crfD(ctx->opcode)); 865 } else { 866 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), 867 0, crfD(ctx->opcode)); 868 } 869 } 870 871 /* cmprb - range comparison: isupper, isaplha, islower*/ 872 static void gen_cmprb(DisasContext *ctx) 873 { 874 TCGv_i32 src1 = tcg_temp_new_i32(); 875 TCGv_i32 src2 = tcg_temp_new_i32(); 876 TCGv_i32 src2lo = tcg_temp_new_i32(); 877 TCGv_i32 src2hi = tcg_temp_new_i32(); 878 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 879 880 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 881 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 882 883 tcg_gen_andi_i32(src1, src1, 0xFF); 884 tcg_gen_ext8u_i32(src2lo, src2); 885 tcg_gen_shri_i32(src2, src2, 8); 886 tcg_gen_ext8u_i32(src2hi, src2); 887 888 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 889 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 890 tcg_gen_and_i32(crf, src2lo, src2hi); 891 892 if (ctx->opcode & 0x00200000) { 893 tcg_gen_shri_i32(src2, src2, 8); 894 tcg_gen_ext8u_i32(src2lo, src2); 895 tcg_gen_shri_i32(src2, src2, 8); 896 tcg_gen_ext8u_i32(src2hi, src2); 897 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 898 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 899 tcg_gen_and_i32(src2lo, src2lo, src2hi); 900 tcg_gen_or_i32(crf, crf, src2lo); 901 } 902 tcg_gen_shli_i32(crf, crf, CRF_GT); 903 tcg_temp_free_i32(src1); 904 tcg_temp_free_i32(src2); 905 tcg_temp_free_i32(src2lo); 906 tcg_temp_free_i32(src2hi); 907 } 908 909 #if defined(TARGET_PPC64) 910 /* cmpeqb */ 911 static void gen_cmpeqb(DisasContext *ctx) 912 { 913 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 914 cpu_gpr[rB(ctx->opcode)]); 915 } 916 #endif 917 918 /* isel (PowerPC 2.03 specification) */ 919 static void gen_isel(DisasContext *ctx) 920 { 921 uint32_t bi = rC(ctx->opcode); 922 uint32_t mask = 0x08 >> (bi & 0x03); 923 TCGv t0 = tcg_temp_new(); 924 TCGv zr; 925 926 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 927 tcg_gen_andi_tl(t0, t0, mask); 928 929 zr = tcg_const_tl(0); 930 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 931 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 932 cpu_gpr[rB(ctx->opcode)]); 933 tcg_temp_free(zr); 934 tcg_temp_free(t0); 935 } 936 937 /* cmpb: PowerPC 2.05 specification */ 938 static void gen_cmpb(DisasContext *ctx) 939 { 940 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 941 cpu_gpr[rB(ctx->opcode)]); 942 } 943 944 /*** Integer arithmetic ***/ 945 946 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 947 TCGv arg1, TCGv arg2, int sub) 948 { 949 TCGv t0 = tcg_temp_new(); 950 951 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 952 tcg_gen_xor_tl(t0, arg1, arg2); 953 if (sub) { 954 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 955 } else { 956 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 957 } 958 tcg_temp_free(t0); 959 if (NARROW_MODE(ctx)) { 960 tcg_gen_ext32s_tl(cpu_ov, cpu_ov); 961 } 962 tcg_gen_shri_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1); 963 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 964 } 965 966 /* Common add function */ 967 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 968 TCGv arg2, bool add_ca, bool compute_ca, 969 bool compute_ov, bool compute_rc0) 970 { 971 TCGv t0 = ret; 972 973 if (compute_ca || compute_ov) { 974 t0 = tcg_temp_new(); 975 } 976 977 if (compute_ca) { 978 if (NARROW_MODE(ctx)) { 979 /* Caution: a non-obvious corner case of the spec is that we 980 must produce the *entire* 64-bit addition, but produce the 981 carry into bit 32. */ 982 TCGv t1 = tcg_temp_new(); 983 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 984 tcg_gen_add_tl(t0, arg1, arg2); 985 if (add_ca) { 986 tcg_gen_add_tl(t0, t0, cpu_ca); 987 } 988 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */ 989 tcg_temp_free(t1); 990 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */ 991 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1); 992 } else { 993 TCGv zero = tcg_const_tl(0); 994 if (add_ca) { 995 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero); 996 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero); 997 } else { 998 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero); 999 } 1000 tcg_temp_free(zero); 1001 } 1002 } else { 1003 tcg_gen_add_tl(t0, arg1, arg2); 1004 if (add_ca) { 1005 tcg_gen_add_tl(t0, t0, cpu_ca); 1006 } 1007 } 1008 1009 if (compute_ov) { 1010 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 1011 } 1012 if (unlikely(compute_rc0)) { 1013 gen_set_Rc0(ctx, t0); 1014 } 1015 1016 if (!TCGV_EQUAL(t0, ret)) { 1017 tcg_gen_mov_tl(ret, t0); 1018 tcg_temp_free(t0); 1019 } 1020 } 1021 /* Add functions with two operands */ 1022 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 1023 static void glue(gen_, name)(DisasContext *ctx) \ 1024 { \ 1025 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1026 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1027 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1028 } 1029 /* Add functions with one operand and one immediate */ 1030 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 1031 add_ca, compute_ca, compute_ov) \ 1032 static void glue(gen_, name)(DisasContext *ctx) \ 1033 { \ 1034 TCGv t0 = tcg_const_tl(const_val); \ 1035 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1036 cpu_gpr[rA(ctx->opcode)], t0, \ 1037 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1038 tcg_temp_free(t0); \ 1039 } 1040 1041 /* add add. addo addo. */ 1042 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 1043 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 1044 /* addc addc. addco addco. */ 1045 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 1046 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 1047 /* adde adde. addeo addeo. */ 1048 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 1049 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 1050 /* addme addme. addmeo addmeo. */ 1051 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 1052 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 1053 /* addze addze. addzeo addzeo.*/ 1054 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 1055 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 1056 /* addi */ 1057 static void gen_addi(DisasContext *ctx) 1058 { 1059 target_long simm = SIMM(ctx->opcode); 1060 1061 if (rA(ctx->opcode) == 0) { 1062 /* li case */ 1063 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); 1064 } else { 1065 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 1066 cpu_gpr[rA(ctx->opcode)], simm); 1067 } 1068 } 1069 /* addic addic.*/ 1070 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 1071 { 1072 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1073 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1074 c, 0, 1, 0, compute_rc0); 1075 tcg_temp_free(c); 1076 } 1077 1078 static void gen_addic(DisasContext *ctx) 1079 { 1080 gen_op_addic(ctx, 0); 1081 } 1082 1083 static void gen_addic_(DisasContext *ctx) 1084 { 1085 gen_op_addic(ctx, 1); 1086 } 1087 1088 /* addis */ 1089 static void gen_addis(DisasContext *ctx) 1090 { 1091 target_long simm = SIMM(ctx->opcode); 1092 1093 if (rA(ctx->opcode) == 0) { 1094 /* lis case */ 1095 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); 1096 } else { 1097 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], 1098 cpu_gpr[rA(ctx->opcode)], simm << 16); 1099 } 1100 } 1101 1102 /* addpcis */ 1103 static void gen_addpcis(DisasContext *ctx) 1104 { 1105 target_long d = DX(ctx->opcode); 1106 1107 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->nip + (d << 16)); 1108 } 1109 1110 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1111 TCGv arg2, int sign, int compute_ov) 1112 { 1113 TCGv_i32 t0 = tcg_temp_new_i32(); 1114 TCGv_i32 t1 = tcg_temp_new_i32(); 1115 TCGv_i32 t2 = tcg_temp_new_i32(); 1116 TCGv_i32 t3 = tcg_temp_new_i32(); 1117 1118 tcg_gen_trunc_tl_i32(t0, arg1); 1119 tcg_gen_trunc_tl_i32(t1, arg2); 1120 if (sign) { 1121 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1122 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1123 tcg_gen_and_i32(t2, t2, t3); 1124 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1125 tcg_gen_or_i32(t2, t2, t3); 1126 tcg_gen_movi_i32(t3, 0); 1127 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1128 tcg_gen_div_i32(t3, t0, t1); 1129 tcg_gen_extu_i32_tl(ret, t3); 1130 } else { 1131 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1132 tcg_gen_movi_i32(t3, 0); 1133 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1134 tcg_gen_divu_i32(t3, t0, t1); 1135 tcg_gen_extu_i32_tl(ret, t3); 1136 } 1137 if (compute_ov) { 1138 tcg_gen_extu_i32_tl(cpu_ov, t2); 1139 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1140 } 1141 tcg_temp_free_i32(t0); 1142 tcg_temp_free_i32(t1); 1143 tcg_temp_free_i32(t2); 1144 tcg_temp_free_i32(t3); 1145 1146 if (unlikely(Rc(ctx->opcode) != 0)) 1147 gen_set_Rc0(ctx, ret); 1148 } 1149 /* Div functions */ 1150 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1151 static void glue(gen_, name)(DisasContext *ctx) \ 1152 { \ 1153 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1154 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1155 sign, compute_ov); \ 1156 } 1157 /* divwu divwu. divwuo divwuo. */ 1158 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1159 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1160 /* divw divw. divwo divwo. */ 1161 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1162 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1163 1164 /* div[wd]eu[o][.] */ 1165 #define GEN_DIVE(name, hlpr, compute_ov) \ 1166 static void gen_##name(DisasContext *ctx) \ 1167 { \ 1168 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1169 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1170 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1171 tcg_temp_free_i32(t0); \ 1172 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1173 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1174 } \ 1175 } 1176 1177 GEN_DIVE(divweu, divweu, 0); 1178 GEN_DIVE(divweuo, divweu, 1); 1179 GEN_DIVE(divwe, divwe, 0); 1180 GEN_DIVE(divweo, divwe, 1); 1181 1182 #if defined(TARGET_PPC64) 1183 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1184 TCGv arg2, int sign, int compute_ov) 1185 { 1186 TCGv_i64 t0 = tcg_temp_new_i64(); 1187 TCGv_i64 t1 = tcg_temp_new_i64(); 1188 TCGv_i64 t2 = tcg_temp_new_i64(); 1189 TCGv_i64 t3 = tcg_temp_new_i64(); 1190 1191 tcg_gen_mov_i64(t0, arg1); 1192 tcg_gen_mov_i64(t1, arg2); 1193 if (sign) { 1194 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1195 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1196 tcg_gen_and_i64(t2, t2, t3); 1197 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1198 tcg_gen_or_i64(t2, t2, t3); 1199 tcg_gen_movi_i64(t3, 0); 1200 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1201 tcg_gen_div_i64(ret, t0, t1); 1202 } else { 1203 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1204 tcg_gen_movi_i64(t3, 0); 1205 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1206 tcg_gen_divu_i64(ret, t0, t1); 1207 } 1208 if (compute_ov) { 1209 tcg_gen_mov_tl(cpu_ov, t2); 1210 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1211 } 1212 tcg_temp_free_i64(t0); 1213 tcg_temp_free_i64(t1); 1214 tcg_temp_free_i64(t2); 1215 tcg_temp_free_i64(t3); 1216 1217 if (unlikely(Rc(ctx->opcode) != 0)) 1218 gen_set_Rc0(ctx, ret); 1219 } 1220 1221 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1222 static void glue(gen_, name)(DisasContext *ctx) \ 1223 { \ 1224 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1225 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1226 sign, compute_ov); \ 1227 } 1228 /* divwu divwu. divwuo divwuo. */ 1229 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1230 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1231 /* divw divw. divwo divwo. */ 1232 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1233 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1234 1235 GEN_DIVE(divdeu, divdeu, 0); 1236 GEN_DIVE(divdeuo, divdeu, 1); 1237 GEN_DIVE(divde, divde, 0); 1238 GEN_DIVE(divdeo, divde, 1); 1239 #endif 1240 1241 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1242 TCGv arg2, int sign) 1243 { 1244 TCGv_i32 t0 = tcg_temp_new_i32(); 1245 TCGv_i32 t1 = tcg_temp_new_i32(); 1246 1247 tcg_gen_trunc_tl_i32(t0, arg1); 1248 tcg_gen_trunc_tl_i32(t1, arg2); 1249 if (sign) { 1250 TCGv_i32 t2 = tcg_temp_new_i32(); 1251 TCGv_i32 t3 = tcg_temp_new_i32(); 1252 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1253 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1254 tcg_gen_and_i32(t2, t2, t3); 1255 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1256 tcg_gen_or_i32(t2, t2, t3); 1257 tcg_gen_movi_i32(t3, 0); 1258 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1259 tcg_gen_rem_i32(t3, t0, t1); 1260 tcg_gen_ext_i32_tl(ret, t3); 1261 tcg_temp_free_i32(t2); 1262 tcg_temp_free_i32(t3); 1263 } else { 1264 TCGv_i32 t2 = tcg_const_i32(1); 1265 TCGv_i32 t3 = tcg_const_i32(0); 1266 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1267 tcg_gen_remu_i32(t3, t0, t1); 1268 tcg_gen_extu_i32_tl(ret, t3); 1269 tcg_temp_free_i32(t2); 1270 tcg_temp_free_i32(t3); 1271 } 1272 tcg_temp_free_i32(t0); 1273 tcg_temp_free_i32(t1); 1274 } 1275 1276 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1277 static void glue(gen_, name)(DisasContext *ctx) \ 1278 { \ 1279 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1280 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1281 sign); \ 1282 } 1283 1284 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1285 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1286 1287 #if defined(TARGET_PPC64) 1288 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1289 TCGv arg2, int sign) 1290 { 1291 TCGv_i64 t0 = tcg_temp_new_i64(); 1292 TCGv_i64 t1 = tcg_temp_new_i64(); 1293 1294 tcg_gen_mov_i64(t0, arg1); 1295 tcg_gen_mov_i64(t1, arg2); 1296 if (sign) { 1297 TCGv_i64 t2 = tcg_temp_new_i64(); 1298 TCGv_i64 t3 = tcg_temp_new_i64(); 1299 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1300 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1301 tcg_gen_and_i64(t2, t2, t3); 1302 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1303 tcg_gen_or_i64(t2, t2, t3); 1304 tcg_gen_movi_i64(t3, 0); 1305 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1306 tcg_gen_rem_i64(ret, t0, t1); 1307 tcg_temp_free_i64(t2); 1308 tcg_temp_free_i64(t3); 1309 } else { 1310 TCGv_i64 t2 = tcg_const_i64(1); 1311 TCGv_i64 t3 = tcg_const_i64(0); 1312 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1313 tcg_gen_remu_i64(ret, t0, t1); 1314 tcg_temp_free_i64(t2); 1315 tcg_temp_free_i64(t3); 1316 } 1317 tcg_temp_free_i64(t0); 1318 tcg_temp_free_i64(t1); 1319 } 1320 1321 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1322 static void glue(gen_, name)(DisasContext *ctx) \ 1323 { \ 1324 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1325 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1326 sign); \ 1327 } 1328 1329 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1330 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1331 #endif 1332 1333 /* mulhw mulhw. */ 1334 static void gen_mulhw(DisasContext *ctx) 1335 { 1336 TCGv_i32 t0 = tcg_temp_new_i32(); 1337 TCGv_i32 t1 = tcg_temp_new_i32(); 1338 1339 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1340 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1341 tcg_gen_muls2_i32(t0, t1, t0, t1); 1342 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1343 tcg_temp_free_i32(t0); 1344 tcg_temp_free_i32(t1); 1345 if (unlikely(Rc(ctx->opcode) != 0)) 1346 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1347 } 1348 1349 /* mulhwu mulhwu. */ 1350 static void gen_mulhwu(DisasContext *ctx) 1351 { 1352 TCGv_i32 t0 = tcg_temp_new_i32(); 1353 TCGv_i32 t1 = tcg_temp_new_i32(); 1354 1355 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1356 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1357 tcg_gen_mulu2_i32(t0, t1, t0, t1); 1358 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1359 tcg_temp_free_i32(t0); 1360 tcg_temp_free_i32(t1); 1361 if (unlikely(Rc(ctx->opcode) != 0)) 1362 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1363 } 1364 1365 /* mullw mullw. */ 1366 static void gen_mullw(DisasContext *ctx) 1367 { 1368 #if defined(TARGET_PPC64) 1369 TCGv_i64 t0, t1; 1370 t0 = tcg_temp_new_i64(); 1371 t1 = tcg_temp_new_i64(); 1372 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 1373 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 1374 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1375 tcg_temp_free(t0); 1376 tcg_temp_free(t1); 1377 #else 1378 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1379 cpu_gpr[rB(ctx->opcode)]); 1380 #endif 1381 if (unlikely(Rc(ctx->opcode) != 0)) 1382 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1383 } 1384 1385 /* mullwo mullwo. */ 1386 static void gen_mullwo(DisasContext *ctx) 1387 { 1388 TCGv_i32 t0 = tcg_temp_new_i32(); 1389 TCGv_i32 t1 = tcg_temp_new_i32(); 1390 1391 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1392 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1393 tcg_gen_muls2_i32(t0, t1, t0, t1); 1394 #if defined(TARGET_PPC64) 1395 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 1396 #else 1397 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 1398 #endif 1399 1400 tcg_gen_sari_i32(t0, t0, 31); 1401 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 1402 tcg_gen_extu_i32_tl(cpu_ov, t0); 1403 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1404 1405 tcg_temp_free_i32(t0); 1406 tcg_temp_free_i32(t1); 1407 if (unlikely(Rc(ctx->opcode) != 0)) 1408 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1409 } 1410 1411 /* mulli */ 1412 static void gen_mulli(DisasContext *ctx) 1413 { 1414 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1415 SIMM(ctx->opcode)); 1416 } 1417 1418 #if defined(TARGET_PPC64) 1419 /* mulhd mulhd. */ 1420 static void gen_mulhd(DisasContext *ctx) 1421 { 1422 TCGv lo = tcg_temp_new(); 1423 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1424 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1425 tcg_temp_free(lo); 1426 if (unlikely(Rc(ctx->opcode) != 0)) { 1427 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1428 } 1429 } 1430 1431 /* mulhdu mulhdu. */ 1432 static void gen_mulhdu(DisasContext *ctx) 1433 { 1434 TCGv lo = tcg_temp_new(); 1435 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 1436 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1437 tcg_temp_free(lo); 1438 if (unlikely(Rc(ctx->opcode) != 0)) { 1439 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1440 } 1441 } 1442 1443 /* mulld mulld. */ 1444 static void gen_mulld(DisasContext *ctx) 1445 { 1446 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1447 cpu_gpr[rB(ctx->opcode)]); 1448 if (unlikely(Rc(ctx->opcode) != 0)) 1449 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1450 } 1451 1452 /* mulldo mulldo. */ 1453 static void gen_mulldo(DisasContext *ctx) 1454 { 1455 TCGv_i64 t0 = tcg_temp_new_i64(); 1456 TCGv_i64 t1 = tcg_temp_new_i64(); 1457 1458 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 1459 cpu_gpr[rB(ctx->opcode)]); 1460 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 1461 1462 tcg_gen_sari_i64(t0, t0, 63); 1463 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 1464 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1465 1466 tcg_temp_free_i64(t0); 1467 tcg_temp_free_i64(t1); 1468 1469 if (unlikely(Rc(ctx->opcode) != 0)) { 1470 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1471 } 1472 } 1473 #endif 1474 1475 /* Common subf function */ 1476 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 1477 TCGv arg2, bool add_ca, bool compute_ca, 1478 bool compute_ov, bool compute_rc0) 1479 { 1480 TCGv t0 = ret; 1481 1482 if (compute_ca || compute_ov) { 1483 t0 = tcg_temp_new(); 1484 } 1485 1486 if (compute_ca) { 1487 /* dest = ~arg1 + arg2 [+ ca]. */ 1488 if (NARROW_MODE(ctx)) { 1489 /* Caution: a non-obvious corner case of the spec is that we 1490 must produce the *entire* 64-bit addition, but produce the 1491 carry into bit 32. */ 1492 TCGv inv1 = tcg_temp_new(); 1493 TCGv t1 = tcg_temp_new(); 1494 tcg_gen_not_tl(inv1, arg1); 1495 if (add_ca) { 1496 tcg_gen_add_tl(t0, arg2, cpu_ca); 1497 } else { 1498 tcg_gen_addi_tl(t0, arg2, 1); 1499 } 1500 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 1501 tcg_gen_add_tl(t0, t0, inv1); 1502 tcg_temp_free(inv1); 1503 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 1504 tcg_temp_free(t1); 1505 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */ 1506 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1); 1507 } else if (add_ca) { 1508 TCGv zero, inv1 = tcg_temp_new(); 1509 tcg_gen_not_tl(inv1, arg1); 1510 zero = tcg_const_tl(0); 1511 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 1512 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 1513 tcg_temp_free(zero); 1514 tcg_temp_free(inv1); 1515 } else { 1516 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 1517 tcg_gen_sub_tl(t0, arg2, arg1); 1518 } 1519 } else if (add_ca) { 1520 /* Since we're ignoring carry-out, we can simplify the 1521 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */ 1522 tcg_gen_sub_tl(t0, arg2, arg1); 1523 tcg_gen_add_tl(t0, t0, cpu_ca); 1524 tcg_gen_subi_tl(t0, t0, 1); 1525 } else { 1526 tcg_gen_sub_tl(t0, arg2, arg1); 1527 } 1528 1529 if (compute_ov) { 1530 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 1531 } 1532 if (unlikely(compute_rc0)) { 1533 gen_set_Rc0(ctx, t0); 1534 } 1535 1536 if (!TCGV_EQUAL(t0, ret)) { 1537 tcg_gen_mov_tl(ret, t0); 1538 tcg_temp_free(t0); 1539 } 1540 } 1541 /* Sub functions with Two operands functions */ 1542 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 1543 static void glue(gen_, name)(DisasContext *ctx) \ 1544 { \ 1545 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1546 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1547 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1548 } 1549 /* Sub functions with one operand and one immediate */ 1550 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 1551 add_ca, compute_ca, compute_ov) \ 1552 static void glue(gen_, name)(DisasContext *ctx) \ 1553 { \ 1554 TCGv t0 = tcg_const_tl(const_val); \ 1555 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 1556 cpu_gpr[rA(ctx->opcode)], t0, \ 1557 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1558 tcg_temp_free(t0); \ 1559 } 1560 /* subf subf. subfo subfo. */ 1561 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 1562 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 1563 /* subfc subfc. subfco subfco. */ 1564 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 1565 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 1566 /* subfe subfe. subfeo subfo. */ 1567 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 1568 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 1569 /* subfme subfme. subfmeo subfmeo. */ 1570 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 1571 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 1572 /* subfze subfze. subfzeo subfzeo.*/ 1573 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 1574 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 1575 1576 /* subfic */ 1577 static void gen_subfic(DisasContext *ctx) 1578 { 1579 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1580 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1581 c, 0, 1, 0, 0); 1582 tcg_temp_free(c); 1583 } 1584 1585 /* neg neg. nego nego. */ 1586 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 1587 { 1588 TCGv zero = tcg_const_tl(0); 1589 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1590 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 1591 tcg_temp_free(zero); 1592 } 1593 1594 static void gen_neg(DisasContext *ctx) 1595 { 1596 gen_op_arith_neg(ctx, 0); 1597 } 1598 1599 static void gen_nego(DisasContext *ctx) 1600 { 1601 gen_op_arith_neg(ctx, 1); 1602 } 1603 1604 /*** Integer logical ***/ 1605 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 1606 static void glue(gen_, name)(DisasContext *ctx) \ 1607 { \ 1608 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 1609 cpu_gpr[rB(ctx->opcode)]); \ 1610 if (unlikely(Rc(ctx->opcode) != 0)) \ 1611 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1612 } 1613 1614 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 1615 static void glue(gen_, name)(DisasContext *ctx) \ 1616 { \ 1617 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 1618 if (unlikely(Rc(ctx->opcode) != 0)) \ 1619 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 1620 } 1621 1622 /* and & and. */ 1623 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 1624 /* andc & andc. */ 1625 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 1626 1627 /* andi. */ 1628 static void gen_andi_(DisasContext *ctx) 1629 { 1630 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); 1631 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1632 } 1633 1634 /* andis. */ 1635 static void gen_andis_(DisasContext *ctx) 1636 { 1637 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); 1638 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1639 } 1640 1641 /* cntlzw */ 1642 static void gen_cntlzw(DisasContext *ctx) 1643 { 1644 TCGv_i32 t = tcg_temp_new_i32(); 1645 1646 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1647 tcg_gen_clzi_i32(t, t, 32); 1648 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1649 tcg_temp_free_i32(t); 1650 1651 if (unlikely(Rc(ctx->opcode) != 0)) 1652 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1653 } 1654 1655 /* cnttzw */ 1656 static void gen_cnttzw(DisasContext *ctx) 1657 { 1658 TCGv_i32 t = tcg_temp_new_i32(); 1659 1660 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 1661 tcg_gen_ctzi_i32(t, t, 32); 1662 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 1663 tcg_temp_free_i32(t); 1664 1665 if (unlikely(Rc(ctx->opcode) != 0)) { 1666 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1667 } 1668 } 1669 1670 /* eqv & eqv. */ 1671 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 1672 /* extsb & extsb. */ 1673 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 1674 /* extsh & extsh. */ 1675 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 1676 /* nand & nand. */ 1677 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 1678 /* nor & nor. */ 1679 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 1680 1681 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 1682 static void gen_pause(DisasContext *ctx) 1683 { 1684 TCGv_i32 t0 = tcg_const_i32(0); 1685 tcg_gen_st_i32(t0, cpu_env, 1686 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 1687 tcg_temp_free_i32(t0); 1688 1689 /* Stop translation, this gives other CPUs a chance to run */ 1690 gen_exception_nip(ctx, EXCP_HLT, ctx->nip); 1691 } 1692 #endif /* defined(TARGET_PPC64) */ 1693 1694 /* or & or. */ 1695 static void gen_or(DisasContext *ctx) 1696 { 1697 int rs, ra, rb; 1698 1699 rs = rS(ctx->opcode); 1700 ra = rA(ctx->opcode); 1701 rb = rB(ctx->opcode); 1702 /* Optimisation for mr. ri case */ 1703 if (rs != ra || rs != rb) { 1704 if (rs != rb) 1705 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 1706 else 1707 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 1708 if (unlikely(Rc(ctx->opcode) != 0)) 1709 gen_set_Rc0(ctx, cpu_gpr[ra]); 1710 } else if (unlikely(Rc(ctx->opcode) != 0)) { 1711 gen_set_Rc0(ctx, cpu_gpr[rs]); 1712 #if defined(TARGET_PPC64) 1713 } else if (rs != 0) { /* 0 is nop */ 1714 int prio = 0; 1715 1716 switch (rs) { 1717 case 1: 1718 /* Set process priority to low */ 1719 prio = 2; 1720 break; 1721 case 6: 1722 /* Set process priority to medium-low */ 1723 prio = 3; 1724 break; 1725 case 2: 1726 /* Set process priority to normal */ 1727 prio = 4; 1728 break; 1729 #if !defined(CONFIG_USER_ONLY) 1730 case 31: 1731 if (!ctx->pr) { 1732 /* Set process priority to very low */ 1733 prio = 1; 1734 } 1735 break; 1736 case 5: 1737 if (!ctx->pr) { 1738 /* Set process priority to medium-hight */ 1739 prio = 5; 1740 } 1741 break; 1742 case 3: 1743 if (!ctx->pr) { 1744 /* Set process priority to high */ 1745 prio = 6; 1746 } 1747 break; 1748 case 7: 1749 if (ctx->hv && !ctx->pr) { 1750 /* Set process priority to very high */ 1751 prio = 7; 1752 } 1753 break; 1754 #endif 1755 default: 1756 break; 1757 } 1758 if (prio) { 1759 TCGv t0 = tcg_temp_new(); 1760 gen_load_spr(t0, SPR_PPR); 1761 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 1762 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 1763 gen_store_spr(SPR_PPR, t0); 1764 tcg_temp_free(t0); 1765 } 1766 #if !defined(CONFIG_USER_ONLY) 1767 /* Pause out of TCG otherwise spin loops with smt_low eat too much 1768 * CPU and the kernel hangs. This applies to all encodings other 1769 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30), 1770 * and all currently undefined. 1771 */ 1772 gen_pause(ctx); 1773 #endif 1774 #endif 1775 } 1776 } 1777 /* orc & orc. */ 1778 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 1779 1780 /* xor & xor. */ 1781 static void gen_xor(DisasContext *ctx) 1782 { 1783 /* Optimisation for "set to zero" case */ 1784 if (rS(ctx->opcode) != rB(ctx->opcode)) 1785 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1786 else 1787 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 1788 if (unlikely(Rc(ctx->opcode) != 0)) 1789 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1790 } 1791 1792 /* ori */ 1793 static void gen_ori(DisasContext *ctx) 1794 { 1795 target_ulong uimm = UIMM(ctx->opcode); 1796 1797 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1798 return; 1799 } 1800 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1801 } 1802 1803 /* oris */ 1804 static void gen_oris(DisasContext *ctx) 1805 { 1806 target_ulong uimm = UIMM(ctx->opcode); 1807 1808 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1809 /* NOP */ 1810 return; 1811 } 1812 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1813 } 1814 1815 /* xori */ 1816 static void gen_xori(DisasContext *ctx) 1817 { 1818 target_ulong uimm = UIMM(ctx->opcode); 1819 1820 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1821 /* NOP */ 1822 return; 1823 } 1824 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 1825 } 1826 1827 /* xoris */ 1828 static void gen_xoris(DisasContext *ctx) 1829 { 1830 target_ulong uimm = UIMM(ctx->opcode); 1831 1832 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 1833 /* NOP */ 1834 return; 1835 } 1836 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); 1837 } 1838 1839 /* popcntb : PowerPC 2.03 specification */ 1840 static void gen_popcntb(DisasContext *ctx) 1841 { 1842 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1843 } 1844 1845 static void gen_popcntw(DisasContext *ctx) 1846 { 1847 #if defined(TARGET_PPC64) 1848 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1849 #else 1850 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1851 #endif 1852 } 1853 1854 #if defined(TARGET_PPC64) 1855 /* popcntd: PowerPC 2.06 specification */ 1856 static void gen_popcntd(DisasContext *ctx) 1857 { 1858 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 1859 } 1860 #endif 1861 1862 /* prtyw: PowerPC 2.05 specification */ 1863 static void gen_prtyw(DisasContext *ctx) 1864 { 1865 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1866 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1867 TCGv t0 = tcg_temp_new(); 1868 tcg_gen_shri_tl(t0, rs, 16); 1869 tcg_gen_xor_tl(ra, rs, t0); 1870 tcg_gen_shri_tl(t0, ra, 8); 1871 tcg_gen_xor_tl(ra, ra, t0); 1872 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 1873 tcg_temp_free(t0); 1874 } 1875 1876 #if defined(TARGET_PPC64) 1877 /* prtyd: PowerPC 2.05 specification */ 1878 static void gen_prtyd(DisasContext *ctx) 1879 { 1880 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1881 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 1882 TCGv t0 = tcg_temp_new(); 1883 tcg_gen_shri_tl(t0, rs, 32); 1884 tcg_gen_xor_tl(ra, rs, t0); 1885 tcg_gen_shri_tl(t0, ra, 16); 1886 tcg_gen_xor_tl(ra, ra, t0); 1887 tcg_gen_shri_tl(t0, ra, 8); 1888 tcg_gen_xor_tl(ra, ra, t0); 1889 tcg_gen_andi_tl(ra, ra, 1); 1890 tcg_temp_free(t0); 1891 } 1892 #endif 1893 1894 #if defined(TARGET_PPC64) 1895 /* bpermd */ 1896 static void gen_bpermd(DisasContext *ctx) 1897 { 1898 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 1899 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 1900 } 1901 #endif 1902 1903 #if defined(TARGET_PPC64) 1904 /* extsw & extsw. */ 1905 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 1906 1907 /* cntlzd */ 1908 static void gen_cntlzd(DisasContext *ctx) 1909 { 1910 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1911 if (unlikely(Rc(ctx->opcode) != 0)) 1912 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1913 } 1914 1915 /* cnttzd */ 1916 static void gen_cnttzd(DisasContext *ctx) 1917 { 1918 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 1919 if (unlikely(Rc(ctx->opcode) != 0)) { 1920 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 1921 } 1922 } 1923 1924 /* darn */ 1925 static void gen_darn(DisasContext *ctx) 1926 { 1927 int l = L(ctx->opcode); 1928 1929 if (l == 0) { 1930 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 1931 } else if (l <= 2) { 1932 /* Return 64-bit random for both CRN and RRN */ 1933 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 1934 } else { 1935 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 1936 } 1937 } 1938 #endif 1939 1940 /*** Integer rotate ***/ 1941 1942 /* rlwimi & rlwimi. */ 1943 static void gen_rlwimi(DisasContext *ctx) 1944 { 1945 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1946 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1947 uint32_t sh = SH(ctx->opcode); 1948 uint32_t mb = MB(ctx->opcode); 1949 uint32_t me = ME(ctx->opcode); 1950 1951 if (sh == (31-me) && mb <= me) { 1952 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 1953 } else { 1954 target_ulong mask; 1955 TCGv t1; 1956 1957 #if defined(TARGET_PPC64) 1958 mb += 32; 1959 me += 32; 1960 #endif 1961 mask = MASK(mb, me); 1962 1963 t1 = tcg_temp_new(); 1964 if (mask <= 0xffffffffu) { 1965 TCGv_i32 t0 = tcg_temp_new_i32(); 1966 tcg_gen_trunc_tl_i32(t0, t_rs); 1967 tcg_gen_rotli_i32(t0, t0, sh); 1968 tcg_gen_extu_i32_tl(t1, t0); 1969 tcg_temp_free_i32(t0); 1970 } else { 1971 #if defined(TARGET_PPC64) 1972 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 1973 tcg_gen_rotli_i64(t1, t1, sh); 1974 #else 1975 g_assert_not_reached(); 1976 #endif 1977 } 1978 1979 tcg_gen_andi_tl(t1, t1, mask); 1980 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 1981 tcg_gen_or_tl(t_ra, t_ra, t1); 1982 tcg_temp_free(t1); 1983 } 1984 if (unlikely(Rc(ctx->opcode) != 0)) { 1985 gen_set_Rc0(ctx, t_ra); 1986 } 1987 } 1988 1989 /* rlwinm & rlwinm. */ 1990 static void gen_rlwinm(DisasContext *ctx) 1991 { 1992 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 1993 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 1994 int sh = SH(ctx->opcode); 1995 int mb = MB(ctx->opcode); 1996 int me = ME(ctx->opcode); 1997 int len = me - mb + 1; 1998 int rsh = (32 - sh) & 31; 1999 2000 if (sh != 0 && len > 0 && me == (31 - sh)) { 2001 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2002 } else if (me == 31 && rsh + len <= 32) { 2003 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2004 } else { 2005 target_ulong mask; 2006 #if defined(TARGET_PPC64) 2007 mb += 32; 2008 me += 32; 2009 #endif 2010 mask = MASK(mb, me); 2011 if (sh == 0) { 2012 tcg_gen_andi_tl(t_ra, t_rs, mask); 2013 } else if (mask <= 0xffffffffu) { 2014 TCGv_i32 t0 = tcg_temp_new_i32(); 2015 tcg_gen_trunc_tl_i32(t0, t_rs); 2016 tcg_gen_rotli_i32(t0, t0, sh); 2017 tcg_gen_andi_i32(t0, t0, mask); 2018 tcg_gen_extu_i32_tl(t_ra, t0); 2019 tcg_temp_free_i32(t0); 2020 } else { 2021 #if defined(TARGET_PPC64) 2022 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2023 tcg_gen_rotli_i64(t_ra, t_ra, sh); 2024 tcg_gen_andi_i64(t_ra, t_ra, mask); 2025 #else 2026 g_assert_not_reached(); 2027 #endif 2028 } 2029 } 2030 if (unlikely(Rc(ctx->opcode) != 0)) { 2031 gen_set_Rc0(ctx, t_ra); 2032 } 2033 } 2034 2035 /* rlwnm & rlwnm. */ 2036 static void gen_rlwnm(DisasContext *ctx) 2037 { 2038 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2039 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2040 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2041 uint32_t mb = MB(ctx->opcode); 2042 uint32_t me = ME(ctx->opcode); 2043 target_ulong mask; 2044 2045 #if defined(TARGET_PPC64) 2046 mb += 32; 2047 me += 32; 2048 #endif 2049 mask = MASK(mb, me); 2050 2051 if (mask <= 0xffffffffu) { 2052 TCGv_i32 t0 = tcg_temp_new_i32(); 2053 TCGv_i32 t1 = tcg_temp_new_i32(); 2054 tcg_gen_trunc_tl_i32(t0, t_rb); 2055 tcg_gen_trunc_tl_i32(t1, t_rs); 2056 tcg_gen_andi_i32(t0, t0, 0x1f); 2057 tcg_gen_rotl_i32(t1, t1, t0); 2058 tcg_gen_extu_i32_tl(t_ra, t1); 2059 tcg_temp_free_i32(t0); 2060 tcg_temp_free_i32(t1); 2061 } else { 2062 #if defined(TARGET_PPC64) 2063 TCGv_i64 t0 = tcg_temp_new_i64(); 2064 tcg_gen_andi_i64(t0, t_rb, 0x1f); 2065 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2066 tcg_gen_rotl_i64(t_ra, t_ra, t0); 2067 tcg_temp_free_i64(t0); 2068 #else 2069 g_assert_not_reached(); 2070 #endif 2071 } 2072 2073 tcg_gen_andi_tl(t_ra, t_ra, mask); 2074 2075 if (unlikely(Rc(ctx->opcode) != 0)) { 2076 gen_set_Rc0(ctx, t_ra); 2077 } 2078 } 2079 2080 #if defined(TARGET_PPC64) 2081 #define GEN_PPC64_R2(name, opc1, opc2) \ 2082 static void glue(gen_, name##0)(DisasContext *ctx) \ 2083 { \ 2084 gen_##name(ctx, 0); \ 2085 } \ 2086 \ 2087 static void glue(gen_, name##1)(DisasContext *ctx) \ 2088 { \ 2089 gen_##name(ctx, 1); \ 2090 } 2091 #define GEN_PPC64_R4(name, opc1, opc2) \ 2092 static void glue(gen_, name##0)(DisasContext *ctx) \ 2093 { \ 2094 gen_##name(ctx, 0, 0); \ 2095 } \ 2096 \ 2097 static void glue(gen_, name##1)(DisasContext *ctx) \ 2098 { \ 2099 gen_##name(ctx, 0, 1); \ 2100 } \ 2101 \ 2102 static void glue(gen_, name##2)(DisasContext *ctx) \ 2103 { \ 2104 gen_##name(ctx, 1, 0); \ 2105 } \ 2106 \ 2107 static void glue(gen_, name##3)(DisasContext *ctx) \ 2108 { \ 2109 gen_##name(ctx, 1, 1); \ 2110 } 2111 2112 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2113 { 2114 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2115 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2116 int len = me - mb + 1; 2117 int rsh = (64 - sh) & 63; 2118 2119 if (sh != 0 && len > 0 && me == (63 - sh)) { 2120 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2121 } else if (me == 63 && rsh + len <= 64) { 2122 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2123 } else { 2124 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2125 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2126 } 2127 if (unlikely(Rc(ctx->opcode) != 0)) { 2128 gen_set_Rc0(ctx, t_ra); 2129 } 2130 } 2131 2132 /* rldicl - rldicl. */ 2133 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2134 { 2135 uint32_t sh, mb; 2136 2137 sh = SH(ctx->opcode) | (shn << 5); 2138 mb = MB(ctx->opcode) | (mbn << 5); 2139 gen_rldinm(ctx, mb, 63, sh); 2140 } 2141 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2142 2143 /* rldicr - rldicr. */ 2144 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2145 { 2146 uint32_t sh, me; 2147 2148 sh = SH(ctx->opcode) | (shn << 5); 2149 me = MB(ctx->opcode) | (men << 5); 2150 gen_rldinm(ctx, 0, me, sh); 2151 } 2152 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2153 2154 /* rldic - rldic. */ 2155 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2156 { 2157 uint32_t sh, mb; 2158 2159 sh = SH(ctx->opcode) | (shn << 5); 2160 mb = MB(ctx->opcode) | (mbn << 5); 2161 gen_rldinm(ctx, mb, 63 - sh, sh); 2162 } 2163 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2164 2165 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2166 { 2167 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2168 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2169 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2170 TCGv t0; 2171 2172 t0 = tcg_temp_new(); 2173 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2174 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2175 tcg_temp_free(t0); 2176 2177 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2178 if (unlikely(Rc(ctx->opcode) != 0)) { 2179 gen_set_Rc0(ctx, t_ra); 2180 } 2181 } 2182 2183 /* rldcl - rldcl. */ 2184 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2185 { 2186 uint32_t mb; 2187 2188 mb = MB(ctx->opcode) | (mbn << 5); 2189 gen_rldnm(ctx, mb, 63); 2190 } 2191 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2192 2193 /* rldcr - rldcr. */ 2194 static inline void gen_rldcr(DisasContext *ctx, int men) 2195 { 2196 uint32_t me; 2197 2198 me = MB(ctx->opcode) | (men << 5); 2199 gen_rldnm(ctx, 0, me); 2200 } 2201 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2202 2203 /* rldimi - rldimi. */ 2204 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2205 { 2206 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2207 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2208 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2209 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2210 uint32_t me = 63 - sh; 2211 2212 if (mb <= me) { 2213 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2214 } else { 2215 target_ulong mask = MASK(mb, me); 2216 TCGv t1 = tcg_temp_new(); 2217 2218 tcg_gen_rotli_tl(t1, t_rs, sh); 2219 tcg_gen_andi_tl(t1, t1, mask); 2220 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2221 tcg_gen_or_tl(t_ra, t_ra, t1); 2222 tcg_temp_free(t1); 2223 } 2224 if (unlikely(Rc(ctx->opcode) != 0)) { 2225 gen_set_Rc0(ctx, t_ra); 2226 } 2227 } 2228 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2229 #endif 2230 2231 /*** Integer shift ***/ 2232 2233 /* slw & slw. */ 2234 static void gen_slw(DisasContext *ctx) 2235 { 2236 TCGv t0, t1; 2237 2238 t0 = tcg_temp_new(); 2239 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2240 #if defined(TARGET_PPC64) 2241 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2242 tcg_gen_sari_tl(t0, t0, 0x3f); 2243 #else 2244 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2245 tcg_gen_sari_tl(t0, t0, 0x1f); 2246 #endif 2247 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2248 t1 = tcg_temp_new(); 2249 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2250 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2251 tcg_temp_free(t1); 2252 tcg_temp_free(t0); 2253 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2254 if (unlikely(Rc(ctx->opcode) != 0)) 2255 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2256 } 2257 2258 /* sraw & sraw. */ 2259 static void gen_sraw(DisasContext *ctx) 2260 { 2261 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2262 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2263 if (unlikely(Rc(ctx->opcode) != 0)) 2264 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2265 } 2266 2267 /* srawi & srawi. */ 2268 static void gen_srawi(DisasContext *ctx) 2269 { 2270 int sh = SH(ctx->opcode); 2271 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2272 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2273 if (sh == 0) { 2274 tcg_gen_ext32s_tl(dst, src); 2275 tcg_gen_movi_tl(cpu_ca, 0); 2276 } else { 2277 TCGv t0; 2278 tcg_gen_ext32s_tl(dst, src); 2279 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2280 t0 = tcg_temp_new(); 2281 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2282 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2283 tcg_temp_free(t0); 2284 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2285 tcg_gen_sari_tl(dst, dst, sh); 2286 } 2287 if (unlikely(Rc(ctx->opcode) != 0)) { 2288 gen_set_Rc0(ctx, dst); 2289 } 2290 } 2291 2292 /* srw & srw. */ 2293 static void gen_srw(DisasContext *ctx) 2294 { 2295 TCGv t0, t1; 2296 2297 t0 = tcg_temp_new(); 2298 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2299 #if defined(TARGET_PPC64) 2300 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2301 tcg_gen_sari_tl(t0, t0, 0x3f); 2302 #else 2303 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2304 tcg_gen_sari_tl(t0, t0, 0x1f); 2305 #endif 2306 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2307 tcg_gen_ext32u_tl(t0, t0); 2308 t1 = tcg_temp_new(); 2309 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2310 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2311 tcg_temp_free(t1); 2312 tcg_temp_free(t0); 2313 if (unlikely(Rc(ctx->opcode) != 0)) 2314 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2315 } 2316 2317 #if defined(TARGET_PPC64) 2318 /* sld & sld. */ 2319 static void gen_sld(DisasContext *ctx) 2320 { 2321 TCGv t0, t1; 2322 2323 t0 = tcg_temp_new(); 2324 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2325 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2326 tcg_gen_sari_tl(t0, t0, 0x3f); 2327 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2328 t1 = tcg_temp_new(); 2329 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2330 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2331 tcg_temp_free(t1); 2332 tcg_temp_free(t0); 2333 if (unlikely(Rc(ctx->opcode) != 0)) 2334 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2335 } 2336 2337 /* srad & srad. */ 2338 static void gen_srad(DisasContext *ctx) 2339 { 2340 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 2341 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2342 if (unlikely(Rc(ctx->opcode) != 0)) 2343 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2344 } 2345 /* sradi & sradi. */ 2346 static inline void gen_sradi(DisasContext *ctx, int n) 2347 { 2348 int sh = SH(ctx->opcode) + (n << 5); 2349 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2350 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2351 if (sh == 0) { 2352 tcg_gen_mov_tl(dst, src); 2353 tcg_gen_movi_tl(cpu_ca, 0); 2354 } else { 2355 TCGv t0; 2356 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 2357 t0 = tcg_temp_new(); 2358 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 2359 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2360 tcg_temp_free(t0); 2361 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2362 tcg_gen_sari_tl(dst, src, sh); 2363 } 2364 if (unlikely(Rc(ctx->opcode) != 0)) { 2365 gen_set_Rc0(ctx, dst); 2366 } 2367 } 2368 2369 static void gen_sradi0(DisasContext *ctx) 2370 { 2371 gen_sradi(ctx, 0); 2372 } 2373 2374 static void gen_sradi1(DisasContext *ctx) 2375 { 2376 gen_sradi(ctx, 1); 2377 } 2378 2379 /* extswsli & extswsli. */ 2380 static inline void gen_extswsli(DisasContext *ctx, int n) 2381 { 2382 int sh = SH(ctx->opcode) + (n << 5); 2383 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2384 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2385 2386 tcg_gen_ext32s_tl(dst, src); 2387 tcg_gen_shli_tl(dst, dst, sh); 2388 if (unlikely(Rc(ctx->opcode) != 0)) { 2389 gen_set_Rc0(ctx, dst); 2390 } 2391 } 2392 2393 static void gen_extswsli0(DisasContext *ctx) 2394 { 2395 gen_extswsli(ctx, 0); 2396 } 2397 2398 static void gen_extswsli1(DisasContext *ctx) 2399 { 2400 gen_extswsli(ctx, 1); 2401 } 2402 2403 /* srd & srd. */ 2404 static void gen_srd(DisasContext *ctx) 2405 { 2406 TCGv t0, t1; 2407 2408 t0 = tcg_temp_new(); 2409 /* AND rS with a mask that is 0 when rB >= 0x40 */ 2410 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 2411 tcg_gen_sari_tl(t0, t0, 0x3f); 2412 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2413 t1 = tcg_temp_new(); 2414 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 2415 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2416 tcg_temp_free(t1); 2417 tcg_temp_free(t0); 2418 if (unlikely(Rc(ctx->opcode) != 0)) 2419 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2420 } 2421 #endif 2422 2423 /*** Addressing modes ***/ 2424 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 2425 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 2426 target_long maskl) 2427 { 2428 target_long simm = SIMM(ctx->opcode); 2429 2430 simm &= ~maskl; 2431 if (rA(ctx->opcode) == 0) { 2432 if (NARROW_MODE(ctx)) { 2433 simm = (uint32_t)simm; 2434 } 2435 tcg_gen_movi_tl(EA, simm); 2436 } else if (likely(simm != 0)) { 2437 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 2438 if (NARROW_MODE(ctx)) { 2439 tcg_gen_ext32u_tl(EA, EA); 2440 } 2441 } else { 2442 if (NARROW_MODE(ctx)) { 2443 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2444 } else { 2445 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2446 } 2447 } 2448 } 2449 2450 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 2451 { 2452 if (rA(ctx->opcode) == 0) { 2453 if (NARROW_MODE(ctx)) { 2454 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2455 } else { 2456 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 2457 } 2458 } else { 2459 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2460 if (NARROW_MODE(ctx)) { 2461 tcg_gen_ext32u_tl(EA, EA); 2462 } 2463 } 2464 } 2465 2466 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 2467 { 2468 if (rA(ctx->opcode) == 0) { 2469 tcg_gen_movi_tl(EA, 0); 2470 } else if (NARROW_MODE(ctx)) { 2471 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2472 } else { 2473 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 2474 } 2475 } 2476 2477 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 2478 target_long val) 2479 { 2480 tcg_gen_addi_tl(ret, arg1, val); 2481 if (NARROW_MODE(ctx)) { 2482 tcg_gen_ext32u_tl(ret, ret); 2483 } 2484 } 2485 2486 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask) 2487 { 2488 TCGLabel *l1 = gen_new_label(); 2489 TCGv t0 = tcg_temp_new(); 2490 TCGv_i32 t1, t2; 2491 tcg_gen_andi_tl(t0, EA, mask); 2492 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 2493 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN); 2494 t2 = tcg_const_i32(ctx->opcode & 0x03FF0000); 2495 gen_update_nip(ctx, ctx->nip - 4); 2496 gen_helper_raise_exception_err(cpu_env, t1, t2); 2497 tcg_temp_free_i32(t1); 2498 tcg_temp_free_i32(t2); 2499 gen_set_label(l1); 2500 tcg_temp_free(t0); 2501 } 2502 2503 static inline void gen_align_no_le(DisasContext *ctx) 2504 { 2505 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 2506 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 2507 } 2508 2509 /*** Integer load ***/ 2510 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 2511 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 2512 2513 #define GEN_QEMU_LOAD_TL(ldop, op) \ 2514 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 2515 TCGv val, \ 2516 TCGv addr) \ 2517 { \ 2518 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 2519 } 2520 2521 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 2522 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 2523 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 2524 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 2525 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 2526 2527 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 2528 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 2529 2530 #define GEN_QEMU_LOAD_64(ldop, op) \ 2531 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 2532 TCGv_i64 val, \ 2533 TCGv addr) \ 2534 { \ 2535 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 2536 } 2537 2538 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 2539 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 2540 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 2541 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 2542 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 2543 2544 #if defined(TARGET_PPC64) 2545 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 2546 #endif 2547 2548 #define GEN_QEMU_STORE_TL(stop, op) \ 2549 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 2550 TCGv val, \ 2551 TCGv addr) \ 2552 { \ 2553 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 2554 } 2555 2556 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 2557 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 2558 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 2559 2560 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 2561 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 2562 2563 #define GEN_QEMU_STORE_64(stop, op) \ 2564 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 2565 TCGv_i64 val, \ 2566 TCGv addr) \ 2567 { \ 2568 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 2569 } 2570 2571 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 2572 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 2573 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 2574 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 2575 2576 #if defined(TARGET_PPC64) 2577 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 2578 #endif 2579 2580 #define GEN_LD(name, ldop, opc, type) \ 2581 static void glue(gen_, name)(DisasContext *ctx) \ 2582 { \ 2583 TCGv EA; \ 2584 gen_set_access_type(ctx, ACCESS_INT); \ 2585 EA = tcg_temp_new(); \ 2586 gen_addr_imm_index(ctx, EA, 0); \ 2587 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2588 tcg_temp_free(EA); \ 2589 } 2590 2591 #define GEN_LDU(name, ldop, opc, type) \ 2592 static void glue(gen_, name##u)(DisasContext *ctx) \ 2593 { \ 2594 TCGv EA; \ 2595 if (unlikely(rA(ctx->opcode) == 0 || \ 2596 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2597 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2598 return; \ 2599 } \ 2600 gen_set_access_type(ctx, ACCESS_INT); \ 2601 EA = tcg_temp_new(); \ 2602 if (type == PPC_64B) \ 2603 gen_addr_imm_index(ctx, EA, 0x03); \ 2604 else \ 2605 gen_addr_imm_index(ctx, EA, 0); \ 2606 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2607 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2608 tcg_temp_free(EA); \ 2609 } 2610 2611 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 2612 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2613 { \ 2614 TCGv EA; \ 2615 if (unlikely(rA(ctx->opcode) == 0 || \ 2616 rA(ctx->opcode) == rD(ctx->opcode))) { \ 2617 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2618 return; \ 2619 } \ 2620 gen_set_access_type(ctx, ACCESS_INT); \ 2621 EA = tcg_temp_new(); \ 2622 gen_addr_reg_index(ctx, EA); \ 2623 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2624 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2625 tcg_temp_free(EA); \ 2626 } 2627 2628 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 2629 static void glue(gen_, name##x)(DisasContext *ctx) \ 2630 { \ 2631 TCGv EA; \ 2632 chk; \ 2633 gen_set_access_type(ctx, ACCESS_INT); \ 2634 EA = tcg_temp_new(); \ 2635 gen_addr_reg_index(ctx, EA); \ 2636 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 2637 tcg_temp_free(EA); \ 2638 } 2639 2640 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 2641 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2642 2643 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 2644 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2645 2646 #define GEN_LDS(name, ldop, op, type) \ 2647 GEN_LD(name, ldop, op | 0x20, type); \ 2648 GEN_LDU(name, ldop, op | 0x21, type); \ 2649 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ 2650 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 2651 2652 /* lbz lbzu lbzux lbzx */ 2653 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); 2654 /* lha lhau lhaux lhax */ 2655 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); 2656 /* lhz lhzu lhzux lhzx */ 2657 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); 2658 /* lwz lwzu lwzux lwzx */ 2659 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); 2660 #if defined(TARGET_PPC64) 2661 /* lwaux */ 2662 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); 2663 /* lwax */ 2664 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); 2665 /* ldux */ 2666 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B); 2667 /* ldx */ 2668 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B); 2669 2670 /* CI load/store variants */ 2671 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 2672 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 2673 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 2674 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 2675 2676 static void gen_ld(DisasContext *ctx) 2677 { 2678 TCGv EA; 2679 if (Rc(ctx->opcode)) { 2680 if (unlikely(rA(ctx->opcode) == 0 || 2681 rA(ctx->opcode) == rD(ctx->opcode))) { 2682 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2683 return; 2684 } 2685 } 2686 gen_set_access_type(ctx, ACCESS_INT); 2687 EA = tcg_temp_new(); 2688 gen_addr_imm_index(ctx, EA, 0x03); 2689 if (ctx->opcode & 0x02) { 2690 /* lwa (lwau is undefined) */ 2691 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2692 } else { 2693 /* ld - ldu */ 2694 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA); 2695 } 2696 if (Rc(ctx->opcode)) 2697 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2698 tcg_temp_free(EA); 2699 } 2700 2701 /* lq */ 2702 static void gen_lq(DisasContext *ctx) 2703 { 2704 int ra, rd; 2705 TCGv EA; 2706 2707 /* lq is a legal user mode instruction starting in ISA 2.07 */ 2708 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2709 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2710 2711 if (!legal_in_user_mode && ctx->pr) { 2712 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2713 return; 2714 } 2715 2716 if (!le_is_supported && ctx->le_mode) { 2717 gen_align_no_le(ctx); 2718 return; 2719 } 2720 ra = rA(ctx->opcode); 2721 rd = rD(ctx->opcode); 2722 if (unlikely((rd & 1) || rd == ra)) { 2723 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2724 return; 2725 } 2726 2727 gen_set_access_type(ctx, ACCESS_INT); 2728 EA = tcg_temp_new(); 2729 gen_addr_imm_index(ctx, EA, 0x0F); 2730 2731 /* We only need to swap high and low halves. gen_qemu_ld64_i64 does 2732 necessary 64-bit byteswap already. */ 2733 if (unlikely(ctx->le_mode)) { 2734 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA); 2735 gen_addr_add(ctx, EA, EA, 8); 2736 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA); 2737 } else { 2738 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA); 2739 gen_addr_add(ctx, EA, EA, 8); 2740 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA); 2741 } 2742 tcg_temp_free(EA); 2743 } 2744 #endif 2745 2746 /*** Integer store ***/ 2747 #define GEN_ST(name, stop, opc, type) \ 2748 static void glue(gen_, name)(DisasContext *ctx) \ 2749 { \ 2750 TCGv EA; \ 2751 gen_set_access_type(ctx, ACCESS_INT); \ 2752 EA = tcg_temp_new(); \ 2753 gen_addr_imm_index(ctx, EA, 0); \ 2754 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2755 tcg_temp_free(EA); \ 2756 } 2757 2758 #define GEN_STU(name, stop, opc, type) \ 2759 static void glue(gen_, stop##u)(DisasContext *ctx) \ 2760 { \ 2761 TCGv EA; \ 2762 if (unlikely(rA(ctx->opcode) == 0)) { \ 2763 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2764 return; \ 2765 } \ 2766 gen_set_access_type(ctx, ACCESS_INT); \ 2767 EA = tcg_temp_new(); \ 2768 if (type == PPC_64B) \ 2769 gen_addr_imm_index(ctx, EA, 0x03); \ 2770 else \ 2771 gen_addr_imm_index(ctx, EA, 0); \ 2772 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2773 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2774 tcg_temp_free(EA); \ 2775 } 2776 2777 #define GEN_STUX(name, stop, opc2, opc3, type) \ 2778 static void glue(gen_, name##ux)(DisasContext *ctx) \ 2779 { \ 2780 TCGv EA; \ 2781 if (unlikely(rA(ctx->opcode) == 0)) { \ 2782 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 2783 return; \ 2784 } \ 2785 gen_set_access_type(ctx, ACCESS_INT); \ 2786 EA = tcg_temp_new(); \ 2787 gen_addr_reg_index(ctx, EA); \ 2788 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2789 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 2790 tcg_temp_free(EA); \ 2791 } 2792 2793 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 2794 static void glue(gen_, name##x)(DisasContext *ctx) \ 2795 { \ 2796 TCGv EA; \ 2797 chk; \ 2798 gen_set_access_type(ctx, ACCESS_INT); \ 2799 EA = tcg_temp_new(); \ 2800 gen_addr_reg_index(ctx, EA); \ 2801 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 2802 tcg_temp_free(EA); \ 2803 } 2804 #define GEN_STX(name, stop, opc2, opc3, type) \ 2805 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 2806 2807 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 2808 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 2809 2810 #define GEN_STS(name, stop, op, type) \ 2811 GEN_ST(name, stop, op | 0x20, type); \ 2812 GEN_STU(name, stop, op | 0x21, type); \ 2813 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ 2814 GEN_STX(name, stop, 0x17, op | 0x00, type) 2815 2816 /* stb stbu stbux stbx */ 2817 GEN_STS(stb, st8, 0x06, PPC_INTEGER); 2818 /* sth sthu sthux sthx */ 2819 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); 2820 /* stw stwu stwux stwx */ 2821 GEN_STS(stw, st32, 0x04, PPC_INTEGER); 2822 #if defined(TARGET_PPC64) 2823 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B); 2824 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B); 2825 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 2826 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 2827 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 2828 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 2829 2830 static void gen_std(DisasContext *ctx) 2831 { 2832 int rs; 2833 TCGv EA; 2834 2835 rs = rS(ctx->opcode); 2836 if ((ctx->opcode & 0x3) == 0x2) { /* stq */ 2837 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2838 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0; 2839 2840 if (!(ctx->insns_flags & PPC_64BX)) { 2841 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2842 } 2843 2844 if (!legal_in_user_mode && ctx->pr) { 2845 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); 2846 return; 2847 } 2848 2849 if (!le_is_supported && ctx->le_mode) { 2850 gen_align_no_le(ctx); 2851 return; 2852 } 2853 2854 if (unlikely(rs & 1)) { 2855 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2856 return; 2857 } 2858 gen_set_access_type(ctx, ACCESS_INT); 2859 EA = tcg_temp_new(); 2860 gen_addr_imm_index(ctx, EA, 0x03); 2861 2862 /* We only need to swap high and low halves. gen_qemu_st64_i64 does 2863 necessary 64-bit byteswap already. */ 2864 if (unlikely(ctx->le_mode)) { 2865 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA); 2866 gen_addr_add(ctx, EA, EA, 8); 2867 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2868 } else { 2869 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2870 gen_addr_add(ctx, EA, EA, 8); 2871 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA); 2872 } 2873 tcg_temp_free(EA); 2874 } else { 2875 /* std / stdu*/ 2876 if (Rc(ctx->opcode)) { 2877 if (unlikely(rA(ctx->opcode) == 0)) { 2878 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 2879 return; 2880 } 2881 } 2882 gen_set_access_type(ctx, ACCESS_INT); 2883 EA = tcg_temp_new(); 2884 gen_addr_imm_index(ctx, EA, 0x03); 2885 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA); 2886 if (Rc(ctx->opcode)) 2887 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); 2888 tcg_temp_free(EA); 2889 } 2890 } 2891 #endif 2892 /*** Integer load and store with byte reverse ***/ 2893 2894 /* lhbrx */ 2895 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 2896 2897 /* lwbrx */ 2898 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 2899 2900 #if defined(TARGET_PPC64) 2901 /* ldbrx */ 2902 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 2903 /* stdbrx */ 2904 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 2905 #endif /* TARGET_PPC64 */ 2906 2907 /* sthbrx */ 2908 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 2909 /* stwbrx */ 2910 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 2911 2912 /*** Integer load and store multiple ***/ 2913 2914 /* lmw */ 2915 static void gen_lmw(DisasContext *ctx) 2916 { 2917 TCGv t0; 2918 TCGv_i32 t1; 2919 2920 if (ctx->le_mode) { 2921 gen_align_no_le(ctx); 2922 return; 2923 } 2924 gen_set_access_type(ctx, ACCESS_INT); 2925 t0 = tcg_temp_new(); 2926 t1 = tcg_const_i32(rD(ctx->opcode)); 2927 gen_addr_imm_index(ctx, t0, 0); 2928 gen_helper_lmw(cpu_env, t0, t1); 2929 tcg_temp_free(t0); 2930 tcg_temp_free_i32(t1); 2931 } 2932 2933 /* stmw */ 2934 static void gen_stmw(DisasContext *ctx) 2935 { 2936 TCGv t0; 2937 TCGv_i32 t1; 2938 2939 if (ctx->le_mode) { 2940 gen_align_no_le(ctx); 2941 return; 2942 } 2943 gen_set_access_type(ctx, ACCESS_INT); 2944 t0 = tcg_temp_new(); 2945 t1 = tcg_const_i32(rS(ctx->opcode)); 2946 gen_addr_imm_index(ctx, t0, 0); 2947 gen_helper_stmw(cpu_env, t0, t1); 2948 tcg_temp_free(t0); 2949 tcg_temp_free_i32(t1); 2950 } 2951 2952 /*** Integer load and store strings ***/ 2953 2954 /* lswi */ 2955 /* PowerPC32 specification says we must generate an exception if 2956 * rA is in the range of registers to be loaded. 2957 * In an other hand, IBM says this is valid, but rA won't be loaded. 2958 * For now, I'll follow the spec... 2959 */ 2960 static void gen_lswi(DisasContext *ctx) 2961 { 2962 TCGv t0; 2963 TCGv_i32 t1, t2; 2964 int nb = NB(ctx->opcode); 2965 int start = rD(ctx->opcode); 2966 int ra = rA(ctx->opcode); 2967 int nr; 2968 2969 if (ctx->le_mode) { 2970 gen_align_no_le(ctx); 2971 return; 2972 } 2973 if (nb == 0) 2974 nb = 32; 2975 nr = (nb + 3) / 4; 2976 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 2977 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 2978 return; 2979 } 2980 gen_set_access_type(ctx, ACCESS_INT); 2981 t0 = tcg_temp_new(); 2982 gen_addr_register(ctx, t0); 2983 t1 = tcg_const_i32(nb); 2984 t2 = tcg_const_i32(start); 2985 gen_helper_lsw(cpu_env, t0, t1, t2); 2986 tcg_temp_free(t0); 2987 tcg_temp_free_i32(t1); 2988 tcg_temp_free_i32(t2); 2989 } 2990 2991 /* lswx */ 2992 static void gen_lswx(DisasContext *ctx) 2993 { 2994 TCGv t0; 2995 TCGv_i32 t1, t2, t3; 2996 2997 if (ctx->le_mode) { 2998 gen_align_no_le(ctx); 2999 return; 3000 } 3001 gen_set_access_type(ctx, ACCESS_INT); 3002 t0 = tcg_temp_new(); 3003 gen_addr_reg_index(ctx, t0); 3004 t1 = tcg_const_i32(rD(ctx->opcode)); 3005 t2 = tcg_const_i32(rA(ctx->opcode)); 3006 t3 = tcg_const_i32(rB(ctx->opcode)); 3007 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 3008 tcg_temp_free(t0); 3009 tcg_temp_free_i32(t1); 3010 tcg_temp_free_i32(t2); 3011 tcg_temp_free_i32(t3); 3012 } 3013 3014 /* stswi */ 3015 static void gen_stswi(DisasContext *ctx) 3016 { 3017 TCGv t0; 3018 TCGv_i32 t1, t2; 3019 int nb = NB(ctx->opcode); 3020 3021 if (ctx->le_mode) { 3022 gen_align_no_le(ctx); 3023 return; 3024 } 3025 gen_set_access_type(ctx, ACCESS_INT); 3026 t0 = tcg_temp_new(); 3027 gen_addr_register(ctx, t0); 3028 if (nb == 0) 3029 nb = 32; 3030 t1 = tcg_const_i32(nb); 3031 t2 = tcg_const_i32(rS(ctx->opcode)); 3032 gen_helper_stsw(cpu_env, t0, t1, t2); 3033 tcg_temp_free(t0); 3034 tcg_temp_free_i32(t1); 3035 tcg_temp_free_i32(t2); 3036 } 3037 3038 /* stswx */ 3039 static void gen_stswx(DisasContext *ctx) 3040 { 3041 TCGv t0; 3042 TCGv_i32 t1, t2; 3043 3044 if (ctx->le_mode) { 3045 gen_align_no_le(ctx); 3046 return; 3047 } 3048 gen_set_access_type(ctx, ACCESS_INT); 3049 t0 = tcg_temp_new(); 3050 gen_addr_reg_index(ctx, t0); 3051 t1 = tcg_temp_new_i32(); 3052 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3053 tcg_gen_andi_i32(t1, t1, 0x7F); 3054 t2 = tcg_const_i32(rS(ctx->opcode)); 3055 gen_helper_stsw(cpu_env, t0, t1, t2); 3056 tcg_temp_free(t0); 3057 tcg_temp_free_i32(t1); 3058 tcg_temp_free_i32(t2); 3059 } 3060 3061 /*** Memory synchronisation ***/ 3062 /* eieio */ 3063 static void gen_eieio(DisasContext *ctx) 3064 { 3065 } 3066 3067 #if !defined(CONFIG_USER_ONLY) 3068 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3069 { 3070 TCGv_i32 t; 3071 TCGLabel *l; 3072 3073 if (!ctx->lazy_tlb_flush) { 3074 return; 3075 } 3076 l = gen_new_label(); 3077 t = tcg_temp_new_i32(); 3078 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3079 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3080 if (global) { 3081 gen_helper_check_tlb_flush_global(cpu_env); 3082 } else { 3083 gen_helper_check_tlb_flush_local(cpu_env); 3084 } 3085 gen_set_label(l); 3086 tcg_temp_free_i32(t); 3087 } 3088 #else 3089 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3090 #endif 3091 3092 /* isync */ 3093 static void gen_isync(DisasContext *ctx) 3094 { 3095 /* 3096 * We need to check for a pending TLB flush. This can only happen in 3097 * kernel mode however so check MSR_PR 3098 */ 3099 if (!ctx->pr) { 3100 gen_check_tlb_flush(ctx, false); 3101 } 3102 gen_stop_exception(ctx); 3103 } 3104 3105 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3106 3107 #define LARX(name, memop) \ 3108 static void gen_##name(DisasContext *ctx) \ 3109 { \ 3110 TCGv t0; \ 3111 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; \ 3112 int len = MEMOP_GET_SIZE(memop); \ 3113 gen_set_access_type(ctx, ACCESS_RES); \ 3114 t0 = tcg_temp_local_new(); \ 3115 gen_addr_reg_index(ctx, t0); \ 3116 if ((len) > 1) { \ 3117 gen_check_align(ctx, t0, (len)-1); \ 3118 } \ 3119 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop); \ 3120 tcg_gen_mov_tl(cpu_reserve, t0); \ 3121 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val)); \ 3122 tcg_temp_free(t0); \ 3123 } 3124 3125 /* lwarx */ 3126 LARX(lbarx, DEF_MEMOP(MO_UB)) 3127 LARX(lharx, DEF_MEMOP(MO_UW)) 3128 LARX(lwarx, DEF_MEMOP(MO_UL)) 3129 3130 #if defined(CONFIG_USER_ONLY) 3131 static void gen_conditional_store(DisasContext *ctx, TCGv EA, 3132 int reg, int memop) 3133 { 3134 TCGv t0 = tcg_temp_new(); 3135 3136 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUPPCState, reserve_ea)); 3137 tcg_gen_movi_tl(t0, (MEMOP_GET_SIZE(memop) << 5) | reg); 3138 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, reserve_info)); 3139 tcg_temp_free(t0); 3140 gen_exception_err(ctx, POWERPC_EXCP_STCX, 0); 3141 } 3142 #else 3143 static void gen_conditional_store(DisasContext *ctx, TCGv EA, 3144 int reg, int memop) 3145 { 3146 TCGLabel *l1; 3147 3148 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3149 l1 = gen_new_label(); 3150 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1); 3151 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); 3152 tcg_gen_qemu_st_tl(cpu_gpr[reg], EA, ctx->mem_idx, memop); 3153 gen_set_label(l1); 3154 tcg_gen_movi_tl(cpu_reserve, -1); 3155 } 3156 #endif 3157 3158 #define STCX(name, memop) \ 3159 static void gen_##name(DisasContext *ctx) \ 3160 { \ 3161 TCGv t0; \ 3162 int len = MEMOP_GET_SIZE(memop); \ 3163 gen_set_access_type(ctx, ACCESS_RES); \ 3164 t0 = tcg_temp_local_new(); \ 3165 gen_addr_reg_index(ctx, t0); \ 3166 if (len > 1) { \ 3167 gen_check_align(ctx, t0, (len) - 1); \ 3168 } \ 3169 gen_conditional_store(ctx, t0, rS(ctx->opcode), memop); \ 3170 tcg_temp_free(t0); \ 3171 } 3172 3173 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3174 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3175 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3176 3177 #if defined(TARGET_PPC64) 3178 /* ldarx */ 3179 LARX(ldarx, DEF_MEMOP(MO_Q)) 3180 /* stdcx. */ 3181 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3182 3183 /* lqarx */ 3184 static void gen_lqarx(DisasContext *ctx) 3185 { 3186 TCGv EA; 3187 int rd = rD(ctx->opcode); 3188 TCGv gpr1, gpr2; 3189 3190 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3191 (rd == rB(ctx->opcode)))) { 3192 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3193 return; 3194 } 3195 3196 gen_set_access_type(ctx, ACCESS_RES); 3197 EA = tcg_temp_local_new(); 3198 gen_addr_reg_index(ctx, EA); 3199 gen_check_align(ctx, EA, 15); 3200 if (unlikely(ctx->le_mode)) { 3201 gpr1 = cpu_gpr[rd+1]; 3202 gpr2 = cpu_gpr[rd]; 3203 } else { 3204 gpr1 = cpu_gpr[rd]; 3205 gpr2 = cpu_gpr[rd+1]; 3206 } 3207 tcg_gen_qemu_ld_i64(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3208 tcg_gen_mov_tl(cpu_reserve, EA); 3209 gen_addr_add(ctx, EA, EA, 8); 3210 tcg_gen_qemu_ld_i64(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3211 3212 tcg_gen_st_tl(gpr1, cpu_env, offsetof(CPUPPCState, reserve_val)); 3213 tcg_gen_st_tl(gpr2, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3214 tcg_temp_free(EA); 3215 } 3216 3217 /* stqcx. */ 3218 static void gen_stqcx_(DisasContext *ctx) 3219 { 3220 TCGv EA; 3221 int reg = rS(ctx->opcode); 3222 int len = 16; 3223 #if !defined(CONFIG_USER_ONLY) 3224 TCGLabel *l1; 3225 TCGv gpr1, gpr2; 3226 #endif 3227 3228 if (unlikely((rD(ctx->opcode) & 1))) { 3229 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3230 return; 3231 } 3232 gen_set_access_type(ctx, ACCESS_RES); 3233 EA = tcg_temp_local_new(); 3234 gen_addr_reg_index(ctx, EA); 3235 if (len > 1) { 3236 gen_check_align(ctx, EA, (len) - 1); 3237 } 3238 3239 #if defined(CONFIG_USER_ONLY) 3240 gen_conditional_store(ctx, EA, reg, 16); 3241 #else 3242 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3243 l1 = gen_new_label(); 3244 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1); 3245 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); 3246 3247 if (unlikely(ctx->le_mode)) { 3248 gpr1 = cpu_gpr[reg + 1]; 3249 gpr2 = cpu_gpr[reg]; 3250 } else { 3251 gpr1 = cpu_gpr[reg]; 3252 gpr2 = cpu_gpr[reg + 1]; 3253 } 3254 tcg_gen_qemu_st_tl(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3255 gen_addr_add(ctx, EA, EA, 8); 3256 tcg_gen_qemu_st_tl(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 3257 3258 gen_set_label(l1); 3259 tcg_gen_movi_tl(cpu_reserve, -1); 3260 #endif 3261 tcg_temp_free(EA); 3262 } 3263 3264 #endif /* defined(TARGET_PPC64) */ 3265 3266 /* sync */ 3267 static void gen_sync(DisasContext *ctx) 3268 { 3269 uint32_t l = (ctx->opcode >> 21) & 3; 3270 3271 /* 3272 * We may need to check for a pending TLB flush. 3273 * 3274 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 3275 * 3276 * Additionally, this can only happen in kernel mode however so 3277 * check MSR_PR as well. 3278 */ 3279 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 3280 gen_check_tlb_flush(ctx, true); 3281 } 3282 } 3283 3284 /* wait */ 3285 static void gen_wait(DisasContext *ctx) 3286 { 3287 TCGv_i32 t0 = tcg_const_i32(1); 3288 tcg_gen_st_i32(t0, cpu_env, 3289 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 3290 tcg_temp_free_i32(t0); 3291 /* Stop translation, as the CPU is supposed to sleep from now */ 3292 gen_exception_nip(ctx, EXCP_HLT, ctx->nip); 3293 } 3294 3295 #if defined(TARGET_PPC64) 3296 static void gen_doze(DisasContext *ctx) 3297 { 3298 #if defined(CONFIG_USER_ONLY) 3299 GEN_PRIV; 3300 #else 3301 TCGv_i32 t; 3302 3303 CHK_HV; 3304 t = tcg_const_i32(PPC_PM_DOZE); 3305 gen_helper_pminsn(cpu_env, t); 3306 tcg_temp_free_i32(t); 3307 gen_stop_exception(ctx); 3308 #endif /* defined(CONFIG_USER_ONLY) */ 3309 } 3310 3311 static void gen_nap(DisasContext *ctx) 3312 { 3313 #if defined(CONFIG_USER_ONLY) 3314 GEN_PRIV; 3315 #else 3316 TCGv_i32 t; 3317 3318 CHK_HV; 3319 t = tcg_const_i32(PPC_PM_NAP); 3320 gen_helper_pminsn(cpu_env, t); 3321 tcg_temp_free_i32(t); 3322 gen_stop_exception(ctx); 3323 #endif /* defined(CONFIG_USER_ONLY) */ 3324 } 3325 3326 static void gen_sleep(DisasContext *ctx) 3327 { 3328 #if defined(CONFIG_USER_ONLY) 3329 GEN_PRIV; 3330 #else 3331 TCGv_i32 t; 3332 3333 CHK_HV; 3334 t = tcg_const_i32(PPC_PM_SLEEP); 3335 gen_helper_pminsn(cpu_env, t); 3336 tcg_temp_free_i32(t); 3337 gen_stop_exception(ctx); 3338 #endif /* defined(CONFIG_USER_ONLY) */ 3339 } 3340 3341 static void gen_rvwinkle(DisasContext *ctx) 3342 { 3343 #if defined(CONFIG_USER_ONLY) 3344 GEN_PRIV; 3345 #else 3346 TCGv_i32 t; 3347 3348 CHK_HV; 3349 t = tcg_const_i32(PPC_PM_RVWINKLE); 3350 gen_helper_pminsn(cpu_env, t); 3351 tcg_temp_free_i32(t); 3352 gen_stop_exception(ctx); 3353 #endif /* defined(CONFIG_USER_ONLY) */ 3354 } 3355 #endif /* #if defined(TARGET_PPC64) */ 3356 3357 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 3358 { 3359 #if defined(TARGET_PPC64) 3360 if (ctx->has_cfar) 3361 tcg_gen_movi_tl(cpu_cfar, nip); 3362 #endif 3363 } 3364 3365 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 3366 { 3367 if (unlikely(ctx->singlestep_enabled)) { 3368 return false; 3369 } 3370 3371 #ifndef CONFIG_USER_ONLY 3372 return (ctx->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); 3373 #else 3374 return true; 3375 #endif 3376 } 3377 3378 /*** Branch ***/ 3379 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 3380 { 3381 if (NARROW_MODE(ctx)) { 3382 dest = (uint32_t) dest; 3383 } 3384 if (use_goto_tb(ctx, dest)) { 3385 tcg_gen_goto_tb(n); 3386 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3387 tcg_gen_exit_tb((uintptr_t)ctx->tb + n); 3388 } else { 3389 tcg_gen_movi_tl(cpu_nip, dest & ~3); 3390 if (unlikely(ctx->singlestep_enabled)) { 3391 if ((ctx->singlestep_enabled & 3392 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) && 3393 (ctx->exception == POWERPC_EXCP_BRANCH || 3394 ctx->exception == POWERPC_EXCP_TRACE)) { 3395 gen_exception_nip(ctx, POWERPC_EXCP_TRACE, dest); 3396 } 3397 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) { 3398 gen_debug_exception(ctx); 3399 } 3400 } 3401 tcg_gen_exit_tb(0); 3402 } 3403 } 3404 3405 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 3406 { 3407 if (NARROW_MODE(ctx)) { 3408 nip = (uint32_t)nip; 3409 } 3410 tcg_gen_movi_tl(cpu_lr, nip); 3411 } 3412 3413 /* b ba bl bla */ 3414 static void gen_b(DisasContext *ctx) 3415 { 3416 target_ulong li, target; 3417 3418 ctx->exception = POWERPC_EXCP_BRANCH; 3419 /* sign extend LI */ 3420 li = LI(ctx->opcode); 3421 li = (li ^ 0x02000000) - 0x02000000; 3422 if (likely(AA(ctx->opcode) == 0)) { 3423 target = ctx->nip + li - 4; 3424 } else { 3425 target = li; 3426 } 3427 if (LK(ctx->opcode)) { 3428 gen_setlr(ctx, ctx->nip); 3429 } 3430 gen_update_cfar(ctx, ctx->nip - 4); 3431 gen_goto_tb(ctx, 0, target); 3432 } 3433 3434 #define BCOND_IM 0 3435 #define BCOND_LR 1 3436 #define BCOND_CTR 2 3437 #define BCOND_TAR 3 3438 3439 static inline void gen_bcond(DisasContext *ctx, int type) 3440 { 3441 uint32_t bo = BO(ctx->opcode); 3442 TCGLabel *l1; 3443 TCGv target; 3444 3445 ctx->exception = POWERPC_EXCP_BRANCH; 3446 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3447 target = tcg_temp_local_new(); 3448 if (type == BCOND_CTR) 3449 tcg_gen_mov_tl(target, cpu_ctr); 3450 else if (type == BCOND_TAR) 3451 gen_load_spr(target, SPR_TAR); 3452 else 3453 tcg_gen_mov_tl(target, cpu_lr); 3454 } else { 3455 TCGV_UNUSED(target); 3456 } 3457 if (LK(ctx->opcode)) 3458 gen_setlr(ctx, ctx->nip); 3459 l1 = gen_new_label(); 3460 if ((bo & 0x4) == 0) { 3461 /* Decrement and test CTR */ 3462 TCGv temp = tcg_temp_new(); 3463 if (unlikely(type == BCOND_CTR)) { 3464 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3465 return; 3466 } 3467 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 3468 if (NARROW_MODE(ctx)) { 3469 tcg_gen_ext32u_tl(temp, cpu_ctr); 3470 } else { 3471 tcg_gen_mov_tl(temp, cpu_ctr); 3472 } 3473 if (bo & 0x2) { 3474 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 3475 } else { 3476 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 3477 } 3478 tcg_temp_free(temp); 3479 } 3480 if ((bo & 0x10) == 0) { 3481 /* Test CR */ 3482 uint32_t bi = BI(ctx->opcode); 3483 uint32_t mask = 0x08 >> (bi & 0x03); 3484 TCGv_i32 temp = tcg_temp_new_i32(); 3485 3486 if (bo & 0x8) { 3487 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3488 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 3489 } else { 3490 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 3491 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 3492 } 3493 tcg_temp_free_i32(temp); 3494 } 3495 gen_update_cfar(ctx, ctx->nip - 4); 3496 if (type == BCOND_IM) { 3497 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 3498 if (likely(AA(ctx->opcode) == 0)) { 3499 gen_goto_tb(ctx, 0, ctx->nip + li - 4); 3500 } else { 3501 gen_goto_tb(ctx, 0, li); 3502 } 3503 if ((bo & 0x14) != 0x14) { 3504 gen_set_label(l1); 3505 gen_goto_tb(ctx, 1, ctx->nip); 3506 } 3507 } else { 3508 if (NARROW_MODE(ctx)) { 3509 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 3510 } else { 3511 tcg_gen_andi_tl(cpu_nip, target, ~3); 3512 } 3513 tcg_gen_exit_tb(0); 3514 if ((bo & 0x14) != 0x14) { 3515 gen_set_label(l1); 3516 gen_update_nip(ctx, ctx->nip); 3517 tcg_gen_exit_tb(0); 3518 } 3519 } 3520 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 3521 tcg_temp_free(target); 3522 } 3523 } 3524 3525 static void gen_bc(DisasContext *ctx) 3526 { 3527 gen_bcond(ctx, BCOND_IM); 3528 } 3529 3530 static void gen_bcctr(DisasContext *ctx) 3531 { 3532 gen_bcond(ctx, BCOND_CTR); 3533 } 3534 3535 static void gen_bclr(DisasContext *ctx) 3536 { 3537 gen_bcond(ctx, BCOND_LR); 3538 } 3539 3540 static void gen_bctar(DisasContext *ctx) 3541 { 3542 gen_bcond(ctx, BCOND_TAR); 3543 } 3544 3545 /*** Condition register logical ***/ 3546 #define GEN_CRLOGIC(name, tcg_op, opc) \ 3547 static void glue(gen_, name)(DisasContext *ctx) \ 3548 { \ 3549 uint8_t bitmask; \ 3550 int sh; \ 3551 TCGv_i32 t0, t1; \ 3552 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 3553 t0 = tcg_temp_new_i32(); \ 3554 if (sh > 0) \ 3555 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 3556 else if (sh < 0) \ 3557 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 3558 else \ 3559 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 3560 t1 = tcg_temp_new_i32(); \ 3561 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 3562 if (sh > 0) \ 3563 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 3564 else if (sh < 0) \ 3565 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 3566 else \ 3567 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 3568 tcg_op(t0, t0, t1); \ 3569 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 3570 tcg_gen_andi_i32(t0, t0, bitmask); \ 3571 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 3572 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 3573 tcg_temp_free_i32(t0); \ 3574 tcg_temp_free_i32(t1); \ 3575 } 3576 3577 /* crand */ 3578 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 3579 /* crandc */ 3580 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 3581 /* creqv */ 3582 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 3583 /* crnand */ 3584 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 3585 /* crnor */ 3586 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 3587 /* cror */ 3588 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 3589 /* crorc */ 3590 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 3591 /* crxor */ 3592 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 3593 3594 /* mcrf */ 3595 static void gen_mcrf(DisasContext *ctx) 3596 { 3597 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 3598 } 3599 3600 /*** System linkage ***/ 3601 3602 /* rfi (supervisor only) */ 3603 static void gen_rfi(DisasContext *ctx) 3604 { 3605 #if defined(CONFIG_USER_ONLY) 3606 GEN_PRIV; 3607 #else 3608 /* This instruction doesn't exist anymore on 64-bit server 3609 * processors compliant with arch 2.x 3610 */ 3611 if (ctx->insns_flags & PPC_SEGMENT_64B) { 3612 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3613 return; 3614 } 3615 /* Restore CPU state */ 3616 CHK_SV; 3617 gen_update_cfar(ctx, ctx->nip - 4); 3618 gen_helper_rfi(cpu_env); 3619 gen_sync_exception(ctx); 3620 #endif 3621 } 3622 3623 #if defined(TARGET_PPC64) 3624 static void gen_rfid(DisasContext *ctx) 3625 { 3626 #if defined(CONFIG_USER_ONLY) 3627 GEN_PRIV; 3628 #else 3629 /* Restore CPU state */ 3630 CHK_SV; 3631 gen_update_cfar(ctx, ctx->nip - 4); 3632 gen_helper_rfid(cpu_env); 3633 gen_sync_exception(ctx); 3634 #endif 3635 } 3636 3637 static void gen_hrfid(DisasContext *ctx) 3638 { 3639 #if defined(CONFIG_USER_ONLY) 3640 GEN_PRIV; 3641 #else 3642 /* Restore CPU state */ 3643 CHK_HV; 3644 gen_helper_hrfid(cpu_env); 3645 gen_sync_exception(ctx); 3646 #endif 3647 } 3648 #endif 3649 3650 /* sc */ 3651 #if defined(CONFIG_USER_ONLY) 3652 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 3653 #else 3654 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 3655 #endif 3656 static void gen_sc(DisasContext *ctx) 3657 { 3658 uint32_t lev; 3659 3660 lev = (ctx->opcode >> 5) & 0x7F; 3661 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 3662 } 3663 3664 /*** Trap ***/ 3665 3666 /* Check for unconditional traps (always or never) */ 3667 static bool check_unconditional_trap(DisasContext *ctx) 3668 { 3669 /* Trap never */ 3670 if (TO(ctx->opcode) == 0) { 3671 return true; 3672 } 3673 /* Trap always */ 3674 if (TO(ctx->opcode) == 31) { 3675 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 3676 return true; 3677 } 3678 return false; 3679 } 3680 3681 /* tw */ 3682 static void gen_tw(DisasContext *ctx) 3683 { 3684 TCGv_i32 t0; 3685 3686 if (check_unconditional_trap(ctx)) { 3687 return; 3688 } 3689 t0 = tcg_const_i32(TO(ctx->opcode)); 3690 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3691 t0); 3692 tcg_temp_free_i32(t0); 3693 } 3694 3695 /* twi */ 3696 static void gen_twi(DisasContext *ctx) 3697 { 3698 TCGv t0; 3699 TCGv_i32 t1; 3700 3701 if (check_unconditional_trap(ctx)) { 3702 return; 3703 } 3704 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3705 t1 = tcg_const_i32(TO(ctx->opcode)); 3706 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3707 tcg_temp_free(t0); 3708 tcg_temp_free_i32(t1); 3709 } 3710 3711 #if defined(TARGET_PPC64) 3712 /* td */ 3713 static void gen_td(DisasContext *ctx) 3714 { 3715 TCGv_i32 t0; 3716 3717 if (check_unconditional_trap(ctx)) { 3718 return; 3719 } 3720 t0 = tcg_const_i32(TO(ctx->opcode)); 3721 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 3722 t0); 3723 tcg_temp_free_i32(t0); 3724 } 3725 3726 /* tdi */ 3727 static void gen_tdi(DisasContext *ctx) 3728 { 3729 TCGv t0; 3730 TCGv_i32 t1; 3731 3732 if (check_unconditional_trap(ctx)) { 3733 return; 3734 } 3735 t0 = tcg_const_tl(SIMM(ctx->opcode)); 3736 t1 = tcg_const_i32(TO(ctx->opcode)); 3737 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 3738 tcg_temp_free(t0); 3739 tcg_temp_free_i32(t1); 3740 } 3741 #endif 3742 3743 /*** Processor control ***/ 3744 3745 static void gen_read_xer(TCGv dst) 3746 { 3747 TCGv t0 = tcg_temp_new(); 3748 TCGv t1 = tcg_temp_new(); 3749 TCGv t2 = tcg_temp_new(); 3750 tcg_gen_mov_tl(dst, cpu_xer); 3751 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 3752 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 3753 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 3754 tcg_gen_or_tl(t0, t0, t1); 3755 tcg_gen_or_tl(dst, dst, t2); 3756 tcg_gen_or_tl(dst, dst, t0); 3757 tcg_temp_free(t0); 3758 tcg_temp_free(t1); 3759 tcg_temp_free(t2); 3760 } 3761 3762 static void gen_write_xer(TCGv src) 3763 { 3764 tcg_gen_andi_tl(cpu_xer, src, 3765 ~((1u << XER_SO) | (1u << XER_OV) | (1u << XER_CA))); 3766 tcg_gen_shri_tl(cpu_so, src, XER_SO); 3767 tcg_gen_shri_tl(cpu_ov, src, XER_OV); 3768 tcg_gen_shri_tl(cpu_ca, src, XER_CA); 3769 tcg_gen_andi_tl(cpu_so, cpu_so, 1); 3770 tcg_gen_andi_tl(cpu_ov, cpu_ov, 1); 3771 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1); 3772 } 3773 3774 /* mcrxr */ 3775 static void gen_mcrxr(DisasContext *ctx) 3776 { 3777 TCGv_i32 t0 = tcg_temp_new_i32(); 3778 TCGv_i32 t1 = tcg_temp_new_i32(); 3779 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 3780 3781 tcg_gen_trunc_tl_i32(t0, cpu_so); 3782 tcg_gen_trunc_tl_i32(t1, cpu_ov); 3783 tcg_gen_trunc_tl_i32(dst, cpu_ca); 3784 tcg_gen_shli_i32(t0, t0, 3); 3785 tcg_gen_shli_i32(t1, t1, 2); 3786 tcg_gen_shli_i32(dst, dst, 1); 3787 tcg_gen_or_i32(dst, dst, t0); 3788 tcg_gen_or_i32(dst, dst, t1); 3789 tcg_temp_free_i32(t0); 3790 tcg_temp_free_i32(t1); 3791 3792 tcg_gen_movi_tl(cpu_so, 0); 3793 tcg_gen_movi_tl(cpu_ov, 0); 3794 tcg_gen_movi_tl(cpu_ca, 0); 3795 } 3796 3797 /* mfcr mfocrf */ 3798 static void gen_mfcr(DisasContext *ctx) 3799 { 3800 uint32_t crm, crn; 3801 3802 if (likely(ctx->opcode & 0x00100000)) { 3803 crm = CRM(ctx->opcode); 3804 if (likely(crm && ((crm & (crm - 1)) == 0))) { 3805 crn = ctz32 (crm); 3806 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 3807 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 3808 cpu_gpr[rD(ctx->opcode)], crn * 4); 3809 } 3810 } else { 3811 TCGv_i32 t0 = tcg_temp_new_i32(); 3812 tcg_gen_mov_i32(t0, cpu_crf[0]); 3813 tcg_gen_shli_i32(t0, t0, 4); 3814 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 3815 tcg_gen_shli_i32(t0, t0, 4); 3816 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 3817 tcg_gen_shli_i32(t0, t0, 4); 3818 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 3819 tcg_gen_shli_i32(t0, t0, 4); 3820 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 3821 tcg_gen_shli_i32(t0, t0, 4); 3822 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 3823 tcg_gen_shli_i32(t0, t0, 4); 3824 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 3825 tcg_gen_shli_i32(t0, t0, 4); 3826 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 3827 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 3828 tcg_temp_free_i32(t0); 3829 } 3830 } 3831 3832 /* mfmsr */ 3833 static void gen_mfmsr(DisasContext *ctx) 3834 { 3835 CHK_SV; 3836 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 3837 } 3838 3839 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 3840 { 3841 #if 0 3842 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 3843 printf("ERROR: try to access SPR %d !\n", sprn); 3844 #endif 3845 } 3846 #define SPR_NOACCESS (&spr_noaccess) 3847 3848 /* mfspr */ 3849 static inline void gen_op_mfspr(DisasContext *ctx) 3850 { 3851 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 3852 uint32_t sprn = SPR(ctx->opcode); 3853 3854 #if defined(CONFIG_USER_ONLY) 3855 read_cb = ctx->spr_cb[sprn].uea_read; 3856 #else 3857 if (ctx->pr) { 3858 read_cb = ctx->spr_cb[sprn].uea_read; 3859 } else if (ctx->hv) { 3860 read_cb = ctx->spr_cb[sprn].hea_read; 3861 } else { 3862 read_cb = ctx->spr_cb[sprn].oea_read; 3863 } 3864 #endif 3865 if (likely(read_cb != NULL)) { 3866 if (likely(read_cb != SPR_NOACCESS)) { 3867 (*read_cb)(ctx, rD(ctx->opcode), sprn); 3868 } else { 3869 /* Privilege exception */ 3870 /* This is a hack to avoid warnings when running Linux: 3871 * this OS breaks the PowerPC virtualisation model, 3872 * allowing userland application to read the PVR 3873 */ 3874 if (sprn != SPR_PVR) { 3875 fprintf(stderr, "Trying to read privileged spr %d (0x%03x) at " 3876 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3877 if (qemu_log_separate()) { 3878 qemu_log("Trying to read privileged spr %d (0x%03x) at " 3879 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3880 } 3881 } 3882 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 3883 } 3884 } else { 3885 /* ISA 2.07 defines these as no-ops */ 3886 if ((ctx->insns_flags2 & PPC2_ISA207S) && 3887 (sprn >= 808 && sprn <= 811)) { 3888 /* This is a nop */ 3889 return; 3890 } 3891 /* Not defined */ 3892 fprintf(stderr, "Trying to read invalid spr %d (0x%03x) at " 3893 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3894 if (qemu_log_separate()) { 3895 qemu_log("Trying to read invalid spr %d (0x%03x) at " 3896 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 3897 } 3898 3899 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 3900 * it can generate a priv, a hv emu or a no-op 3901 */ 3902 if (sprn & 0x10) { 3903 if (ctx->pr) { 3904 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 3905 } 3906 } else { 3907 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 3908 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 3909 } 3910 } 3911 } 3912 } 3913 3914 static void gen_mfspr(DisasContext *ctx) 3915 { 3916 gen_op_mfspr(ctx); 3917 } 3918 3919 /* mftb */ 3920 static void gen_mftb(DisasContext *ctx) 3921 { 3922 gen_op_mfspr(ctx); 3923 } 3924 3925 /* mtcrf mtocrf*/ 3926 static void gen_mtcrf(DisasContext *ctx) 3927 { 3928 uint32_t crm, crn; 3929 3930 crm = CRM(ctx->opcode); 3931 if (likely((ctx->opcode & 0x00100000))) { 3932 if (crm && ((crm & (crm - 1)) == 0)) { 3933 TCGv_i32 temp = tcg_temp_new_i32(); 3934 crn = ctz32 (crm); 3935 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 3936 tcg_gen_shri_i32(temp, temp, crn * 4); 3937 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 3938 tcg_temp_free_i32(temp); 3939 } 3940 } else { 3941 TCGv_i32 temp = tcg_temp_new_i32(); 3942 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 3943 for (crn = 0 ; crn < 8 ; crn++) { 3944 if (crm & (1 << crn)) { 3945 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 3946 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 3947 } 3948 } 3949 tcg_temp_free_i32(temp); 3950 } 3951 } 3952 3953 /* mtmsr */ 3954 #if defined(TARGET_PPC64) 3955 static void gen_mtmsrd(DisasContext *ctx) 3956 { 3957 CHK_SV; 3958 3959 #if !defined(CONFIG_USER_ONLY) 3960 if (ctx->opcode & 0x00010000) { 3961 /* Special form that does not need any synchronisation */ 3962 TCGv t0 = tcg_temp_new(); 3963 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 3964 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 3965 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 3966 tcg_temp_free(t0); 3967 } else { 3968 /* XXX: we need to update nip before the store 3969 * if we enter power saving mode, we will exit the loop 3970 * directly from ppc_store_msr 3971 */ 3972 gen_update_nip(ctx, ctx->nip); 3973 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]); 3974 /* Must stop the translation as machine state (may have) changed */ 3975 /* Note that mtmsr is not always defined as context-synchronizing */ 3976 gen_stop_exception(ctx); 3977 } 3978 #endif /* !defined(CONFIG_USER_ONLY) */ 3979 } 3980 #endif /* defined(TARGET_PPC64) */ 3981 3982 static void gen_mtmsr(DisasContext *ctx) 3983 { 3984 CHK_SV; 3985 3986 #if !defined(CONFIG_USER_ONLY) 3987 if (ctx->opcode & 0x00010000) { 3988 /* Special form that does not need any synchronisation */ 3989 TCGv t0 = tcg_temp_new(); 3990 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); 3991 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE))); 3992 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 3993 tcg_temp_free(t0); 3994 } else { 3995 TCGv msr = tcg_temp_new(); 3996 3997 /* XXX: we need to update nip before the store 3998 * if we enter power saving mode, we will exit the loop 3999 * directly from ppc_store_msr 4000 */ 4001 gen_update_nip(ctx, ctx->nip); 4002 #if defined(TARGET_PPC64) 4003 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32); 4004 #else 4005 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]); 4006 #endif 4007 gen_helper_store_msr(cpu_env, msr); 4008 tcg_temp_free(msr); 4009 /* Must stop the translation as machine state (may have) changed */ 4010 /* Note that mtmsr is not always defined as context-synchronizing */ 4011 gen_stop_exception(ctx); 4012 } 4013 #endif 4014 } 4015 4016 /* mtspr */ 4017 static void gen_mtspr(DisasContext *ctx) 4018 { 4019 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4020 uint32_t sprn = SPR(ctx->opcode); 4021 4022 #if defined(CONFIG_USER_ONLY) 4023 write_cb = ctx->spr_cb[sprn].uea_write; 4024 #else 4025 if (ctx->pr) { 4026 write_cb = ctx->spr_cb[sprn].uea_write; 4027 } else if (ctx->hv) { 4028 write_cb = ctx->spr_cb[sprn].hea_write; 4029 } else { 4030 write_cb = ctx->spr_cb[sprn].oea_write; 4031 } 4032 #endif 4033 if (likely(write_cb != NULL)) { 4034 if (likely(write_cb != SPR_NOACCESS)) { 4035 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4036 } else { 4037 /* Privilege exception */ 4038 fprintf(stderr, "Trying to write privileged spr %d (0x%03x) at " 4039 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4040 if (qemu_log_separate()) { 4041 qemu_log("Trying to write privileged spr %d (0x%03x) at " 4042 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4043 } 4044 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4045 } 4046 } else { 4047 /* ISA 2.07 defines these as no-ops */ 4048 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4049 (sprn >= 808 && sprn <= 811)) { 4050 /* This is a nop */ 4051 return; 4052 } 4053 4054 /* Not defined */ 4055 if (qemu_log_separate()) { 4056 qemu_log("Trying to write invalid spr %d (0x%03x) at " 4057 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4058 } 4059 fprintf(stderr, "Trying to write invalid spr %d (0x%03x) at " 4060 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4); 4061 4062 4063 /* The behaviour depends on MSR:PR and SPR# bit 0x10, 4064 * it can generate a priv, a hv emu or a no-op 4065 */ 4066 if (sprn & 0x10) { 4067 if (ctx->pr) { 4068 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4069 } 4070 } else { 4071 if (ctx->pr || sprn == 0) { 4072 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4073 } 4074 } 4075 } 4076 } 4077 4078 #if defined(TARGET_PPC64) 4079 /* setb */ 4080 static void gen_setb(DisasContext *ctx) 4081 { 4082 TCGv_i32 t0 = tcg_temp_new_i32(); 4083 TCGv_i32 t8 = tcg_temp_new_i32(); 4084 TCGv_i32 tm1 = tcg_temp_new_i32(); 4085 int crf = crfS(ctx->opcode); 4086 4087 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 4088 tcg_gen_movi_i32(t8, 8); 4089 tcg_gen_movi_i32(tm1, -1); 4090 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 4091 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4092 4093 tcg_temp_free_i32(t0); 4094 tcg_temp_free_i32(t8); 4095 tcg_temp_free_i32(tm1); 4096 } 4097 #endif 4098 4099 /*** Cache management ***/ 4100 4101 /* dcbf */ 4102 static void gen_dcbf(DisasContext *ctx) 4103 { 4104 /* XXX: specification says this is treated as a load by the MMU */ 4105 TCGv t0; 4106 gen_set_access_type(ctx, ACCESS_CACHE); 4107 t0 = tcg_temp_new(); 4108 gen_addr_reg_index(ctx, t0); 4109 gen_qemu_ld8u(ctx, t0, t0); 4110 tcg_temp_free(t0); 4111 } 4112 4113 /* dcbi (Supervisor only) */ 4114 static void gen_dcbi(DisasContext *ctx) 4115 { 4116 #if defined(CONFIG_USER_ONLY) 4117 GEN_PRIV; 4118 #else 4119 TCGv EA, val; 4120 4121 CHK_SV; 4122 EA = tcg_temp_new(); 4123 gen_set_access_type(ctx, ACCESS_CACHE); 4124 gen_addr_reg_index(ctx, EA); 4125 val = tcg_temp_new(); 4126 /* XXX: specification says this should be treated as a store by the MMU */ 4127 gen_qemu_ld8u(ctx, val, EA); 4128 gen_qemu_st8(ctx, val, EA); 4129 tcg_temp_free(val); 4130 tcg_temp_free(EA); 4131 #endif /* defined(CONFIG_USER_ONLY) */ 4132 } 4133 4134 /* dcdst */ 4135 static void gen_dcbst(DisasContext *ctx) 4136 { 4137 /* XXX: specification say this is treated as a load by the MMU */ 4138 TCGv t0; 4139 gen_set_access_type(ctx, ACCESS_CACHE); 4140 t0 = tcg_temp_new(); 4141 gen_addr_reg_index(ctx, t0); 4142 gen_qemu_ld8u(ctx, t0, t0); 4143 tcg_temp_free(t0); 4144 } 4145 4146 /* dcbt */ 4147 static void gen_dcbt(DisasContext *ctx) 4148 { 4149 /* interpreted as no-op */ 4150 /* XXX: specification say this is treated as a load by the MMU 4151 * but does not generate any exception 4152 */ 4153 } 4154 4155 /* dcbtst */ 4156 static void gen_dcbtst(DisasContext *ctx) 4157 { 4158 /* interpreted as no-op */ 4159 /* XXX: specification say this is treated as a load by the MMU 4160 * but does not generate any exception 4161 */ 4162 } 4163 4164 /* dcbtls */ 4165 static void gen_dcbtls(DisasContext *ctx) 4166 { 4167 /* Always fails locking the cache */ 4168 TCGv t0 = tcg_temp_new(); 4169 gen_load_spr(t0, SPR_Exxx_L1CSR0); 4170 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 4171 gen_store_spr(SPR_Exxx_L1CSR0, t0); 4172 tcg_temp_free(t0); 4173 } 4174 4175 /* dcbz */ 4176 static void gen_dcbz(DisasContext *ctx) 4177 { 4178 TCGv tcgv_addr; 4179 TCGv_i32 tcgv_op; 4180 4181 gen_set_access_type(ctx, ACCESS_CACHE); 4182 tcgv_addr = tcg_temp_new(); 4183 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 4184 gen_addr_reg_index(ctx, tcgv_addr); 4185 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 4186 tcg_temp_free(tcgv_addr); 4187 tcg_temp_free_i32(tcgv_op); 4188 } 4189 4190 /* dst / dstt */ 4191 static void gen_dst(DisasContext *ctx) 4192 { 4193 if (rA(ctx->opcode) == 0) { 4194 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4195 } else { 4196 /* interpreted as no-op */ 4197 } 4198 } 4199 4200 /* dstst /dststt */ 4201 static void gen_dstst(DisasContext *ctx) 4202 { 4203 if (rA(ctx->opcode) == 0) { 4204 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4205 } else { 4206 /* interpreted as no-op */ 4207 } 4208 4209 } 4210 4211 /* dss / dssall */ 4212 static void gen_dss(DisasContext *ctx) 4213 { 4214 /* interpreted as no-op */ 4215 } 4216 4217 /* icbi */ 4218 static void gen_icbi(DisasContext *ctx) 4219 { 4220 TCGv t0; 4221 gen_set_access_type(ctx, ACCESS_CACHE); 4222 t0 = tcg_temp_new(); 4223 gen_addr_reg_index(ctx, t0); 4224 gen_helper_icbi(cpu_env, t0); 4225 tcg_temp_free(t0); 4226 } 4227 4228 /* Optional: */ 4229 /* dcba */ 4230 static void gen_dcba(DisasContext *ctx) 4231 { 4232 /* interpreted as no-op */ 4233 /* XXX: specification say this is treated as a store by the MMU 4234 * but does not generate any exception 4235 */ 4236 } 4237 4238 /*** Segment register manipulation ***/ 4239 /* Supervisor only: */ 4240 4241 /* mfsr */ 4242 static void gen_mfsr(DisasContext *ctx) 4243 { 4244 #if defined(CONFIG_USER_ONLY) 4245 GEN_PRIV; 4246 #else 4247 TCGv t0; 4248 4249 CHK_SV; 4250 t0 = tcg_const_tl(SR(ctx->opcode)); 4251 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4252 tcg_temp_free(t0); 4253 #endif /* defined(CONFIG_USER_ONLY) */ 4254 } 4255 4256 /* mfsrin */ 4257 static void gen_mfsrin(DisasContext *ctx) 4258 { 4259 #if defined(CONFIG_USER_ONLY) 4260 GEN_PRIV; 4261 #else 4262 TCGv t0; 4263 4264 CHK_SV; 4265 t0 = tcg_temp_new(); 4266 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4267 tcg_gen_andi_tl(t0, t0, 0xF); 4268 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4269 tcg_temp_free(t0); 4270 #endif /* defined(CONFIG_USER_ONLY) */ 4271 } 4272 4273 /* mtsr */ 4274 static void gen_mtsr(DisasContext *ctx) 4275 { 4276 #if defined(CONFIG_USER_ONLY) 4277 GEN_PRIV; 4278 #else 4279 TCGv t0; 4280 4281 CHK_SV; 4282 t0 = tcg_const_tl(SR(ctx->opcode)); 4283 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4284 tcg_temp_free(t0); 4285 #endif /* defined(CONFIG_USER_ONLY) */ 4286 } 4287 4288 /* mtsrin */ 4289 static void gen_mtsrin(DisasContext *ctx) 4290 { 4291 #if defined(CONFIG_USER_ONLY) 4292 GEN_PRIV; 4293 #else 4294 TCGv t0; 4295 CHK_SV; 4296 4297 t0 = tcg_temp_new(); 4298 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4299 tcg_gen_andi_tl(t0, t0, 0xF); 4300 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 4301 tcg_temp_free(t0); 4302 #endif /* defined(CONFIG_USER_ONLY) */ 4303 } 4304 4305 #if defined(TARGET_PPC64) 4306 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 4307 4308 /* mfsr */ 4309 static void gen_mfsr_64b(DisasContext *ctx) 4310 { 4311 #if defined(CONFIG_USER_ONLY) 4312 GEN_PRIV; 4313 #else 4314 TCGv t0; 4315 4316 CHK_SV; 4317 t0 = tcg_const_tl(SR(ctx->opcode)); 4318 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4319 tcg_temp_free(t0); 4320 #endif /* defined(CONFIG_USER_ONLY) */ 4321 } 4322 4323 /* mfsrin */ 4324 static void gen_mfsrin_64b(DisasContext *ctx) 4325 { 4326 #if defined(CONFIG_USER_ONLY) 4327 GEN_PRIV; 4328 #else 4329 TCGv t0; 4330 4331 CHK_SV; 4332 t0 = tcg_temp_new(); 4333 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4334 tcg_gen_andi_tl(t0, t0, 0xF); 4335 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4336 tcg_temp_free(t0); 4337 #endif /* defined(CONFIG_USER_ONLY) */ 4338 } 4339 4340 /* mtsr */ 4341 static void gen_mtsr_64b(DisasContext *ctx) 4342 { 4343 #if defined(CONFIG_USER_ONLY) 4344 GEN_PRIV; 4345 #else 4346 TCGv t0; 4347 4348 CHK_SV; 4349 t0 = tcg_const_tl(SR(ctx->opcode)); 4350 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4351 tcg_temp_free(t0); 4352 #endif /* defined(CONFIG_USER_ONLY) */ 4353 } 4354 4355 /* mtsrin */ 4356 static void gen_mtsrin_64b(DisasContext *ctx) 4357 { 4358 #if defined(CONFIG_USER_ONLY) 4359 GEN_PRIV; 4360 #else 4361 TCGv t0; 4362 4363 CHK_SV; 4364 t0 = tcg_temp_new(); 4365 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); 4366 tcg_gen_andi_tl(t0, t0, 0xF); 4367 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 4368 tcg_temp_free(t0); 4369 #endif /* defined(CONFIG_USER_ONLY) */ 4370 } 4371 4372 /* slbmte */ 4373 static void gen_slbmte(DisasContext *ctx) 4374 { 4375 #if defined(CONFIG_USER_ONLY) 4376 GEN_PRIV; 4377 #else 4378 CHK_SV; 4379 4380 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 4381 cpu_gpr[rS(ctx->opcode)]); 4382 #endif /* defined(CONFIG_USER_ONLY) */ 4383 } 4384 4385 static void gen_slbmfee(DisasContext *ctx) 4386 { 4387 #if defined(CONFIG_USER_ONLY) 4388 GEN_PRIV; 4389 #else 4390 CHK_SV; 4391 4392 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4393 cpu_gpr[rB(ctx->opcode)]); 4394 #endif /* defined(CONFIG_USER_ONLY) */ 4395 } 4396 4397 static void gen_slbmfev(DisasContext *ctx) 4398 { 4399 #if defined(CONFIG_USER_ONLY) 4400 GEN_PRIV; 4401 #else 4402 CHK_SV; 4403 4404 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4405 cpu_gpr[rB(ctx->opcode)]); 4406 #endif /* defined(CONFIG_USER_ONLY) */ 4407 } 4408 4409 static void gen_slbfee_(DisasContext *ctx) 4410 { 4411 #if defined(CONFIG_USER_ONLY) 4412 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4413 #else 4414 TCGLabel *l1, *l2; 4415 4416 if (unlikely(ctx->pr)) { 4417 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 4418 return; 4419 } 4420 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 4421 cpu_gpr[rB(ctx->opcode)]); 4422 l1 = gen_new_label(); 4423 l2 = gen_new_label(); 4424 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4425 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 4426 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); 4427 tcg_gen_br(l2); 4428 gen_set_label(l1); 4429 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 4430 gen_set_label(l2); 4431 #endif 4432 } 4433 #endif /* defined(TARGET_PPC64) */ 4434 4435 /*** Lookaside buffer management ***/ 4436 /* Optional & supervisor only: */ 4437 4438 /* tlbia */ 4439 static void gen_tlbia(DisasContext *ctx) 4440 { 4441 #if defined(CONFIG_USER_ONLY) 4442 GEN_PRIV; 4443 #else 4444 CHK_HV; 4445 4446 gen_helper_tlbia(cpu_env); 4447 #endif /* defined(CONFIG_USER_ONLY) */ 4448 } 4449 4450 /* tlbiel */ 4451 static void gen_tlbiel(DisasContext *ctx) 4452 { 4453 #if defined(CONFIG_USER_ONLY) 4454 GEN_PRIV; 4455 #else 4456 CHK_SV; 4457 4458 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4459 #endif /* defined(CONFIG_USER_ONLY) */ 4460 } 4461 4462 /* tlbie */ 4463 static void gen_tlbie(DisasContext *ctx) 4464 { 4465 #if defined(CONFIG_USER_ONLY) 4466 GEN_PRIV; 4467 #else 4468 TCGv_i32 t1; 4469 CHK_HV; 4470 4471 if (NARROW_MODE(ctx)) { 4472 TCGv t0 = tcg_temp_new(); 4473 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 4474 gen_helper_tlbie(cpu_env, t0); 4475 tcg_temp_free(t0); 4476 } else { 4477 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4478 } 4479 t1 = tcg_temp_new_i32(); 4480 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4481 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 4482 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 4483 tcg_temp_free_i32(t1); 4484 #endif /* defined(CONFIG_USER_ONLY) */ 4485 } 4486 4487 /* tlbsync */ 4488 static void gen_tlbsync(DisasContext *ctx) 4489 { 4490 #if defined(CONFIG_USER_ONLY) 4491 GEN_PRIV; 4492 #else 4493 CHK_HV; 4494 4495 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 4496 if (ctx->insns_flags & PPC_BOOKE) { 4497 gen_check_tlb_flush(ctx, true); 4498 } 4499 #endif /* defined(CONFIG_USER_ONLY) */ 4500 } 4501 4502 #if defined(TARGET_PPC64) 4503 /* slbia */ 4504 static void gen_slbia(DisasContext *ctx) 4505 { 4506 #if defined(CONFIG_USER_ONLY) 4507 GEN_PRIV; 4508 #else 4509 CHK_SV; 4510 4511 gen_helper_slbia(cpu_env); 4512 #endif /* defined(CONFIG_USER_ONLY) */ 4513 } 4514 4515 /* slbie */ 4516 static void gen_slbie(DisasContext *ctx) 4517 { 4518 #if defined(CONFIG_USER_ONLY) 4519 GEN_PRIV; 4520 #else 4521 CHK_SV; 4522 4523 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 4524 #endif /* defined(CONFIG_USER_ONLY) */ 4525 } 4526 #endif /* defined(TARGET_PPC64) */ 4527 4528 /*** External control ***/ 4529 /* Optional: */ 4530 4531 /* eciwx */ 4532 static void gen_eciwx(DisasContext *ctx) 4533 { 4534 TCGv t0; 4535 /* Should check EAR[E] ! */ 4536 gen_set_access_type(ctx, ACCESS_EXT); 4537 t0 = tcg_temp_new(); 4538 gen_addr_reg_index(ctx, t0); 4539 gen_check_align(ctx, t0, 0x03); 4540 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0); 4541 tcg_temp_free(t0); 4542 } 4543 4544 /* ecowx */ 4545 static void gen_ecowx(DisasContext *ctx) 4546 { 4547 TCGv t0; 4548 /* Should check EAR[E] ! */ 4549 gen_set_access_type(ctx, ACCESS_EXT); 4550 t0 = tcg_temp_new(); 4551 gen_addr_reg_index(ctx, t0); 4552 gen_check_align(ctx, t0, 0x03); 4553 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0); 4554 tcg_temp_free(t0); 4555 } 4556 4557 /* PowerPC 601 specific instructions */ 4558 4559 /* abs - abs. */ 4560 static void gen_abs(DisasContext *ctx) 4561 { 4562 TCGLabel *l1 = gen_new_label(); 4563 TCGLabel *l2 = gen_new_label(); 4564 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); 4565 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4566 tcg_gen_br(l2); 4567 gen_set_label(l1); 4568 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4569 gen_set_label(l2); 4570 if (unlikely(Rc(ctx->opcode) != 0)) 4571 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4572 } 4573 4574 /* abso - abso. */ 4575 static void gen_abso(DisasContext *ctx) 4576 { 4577 TCGLabel *l1 = gen_new_label(); 4578 TCGLabel *l2 = gen_new_label(); 4579 TCGLabel *l3 = gen_new_label(); 4580 /* Start with XER OV disabled, the most likely case */ 4581 tcg_gen_movi_tl(cpu_ov, 0); 4582 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); 4583 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); 4584 tcg_gen_movi_tl(cpu_ov, 1); 4585 tcg_gen_movi_tl(cpu_so, 1); 4586 tcg_gen_br(l2); 4587 gen_set_label(l1); 4588 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4589 tcg_gen_br(l3); 4590 gen_set_label(l2); 4591 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4592 gen_set_label(l3); 4593 if (unlikely(Rc(ctx->opcode) != 0)) 4594 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4595 } 4596 4597 /* clcs */ 4598 static void gen_clcs(DisasContext *ctx) 4599 { 4600 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 4601 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 4602 tcg_temp_free_i32(t0); 4603 /* Rc=1 sets CR0 to an undefined state */ 4604 } 4605 4606 /* div - div. */ 4607 static void gen_div(DisasContext *ctx) 4608 { 4609 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4610 cpu_gpr[rB(ctx->opcode)]); 4611 if (unlikely(Rc(ctx->opcode) != 0)) 4612 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4613 } 4614 4615 /* divo - divo. */ 4616 static void gen_divo(DisasContext *ctx) 4617 { 4618 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4619 cpu_gpr[rB(ctx->opcode)]); 4620 if (unlikely(Rc(ctx->opcode) != 0)) 4621 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4622 } 4623 4624 /* divs - divs. */ 4625 static void gen_divs(DisasContext *ctx) 4626 { 4627 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 4628 cpu_gpr[rB(ctx->opcode)]); 4629 if (unlikely(Rc(ctx->opcode) != 0)) 4630 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4631 } 4632 4633 /* divso - divso. */ 4634 static void gen_divso(DisasContext *ctx) 4635 { 4636 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 4637 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4638 if (unlikely(Rc(ctx->opcode) != 0)) 4639 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4640 } 4641 4642 /* doz - doz. */ 4643 static void gen_doz(DisasContext *ctx) 4644 { 4645 TCGLabel *l1 = gen_new_label(); 4646 TCGLabel *l2 = gen_new_label(); 4647 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4648 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4649 tcg_gen_br(l2); 4650 gen_set_label(l1); 4651 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4652 gen_set_label(l2); 4653 if (unlikely(Rc(ctx->opcode) != 0)) 4654 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4655 } 4656 4657 /* dozo - dozo. */ 4658 static void gen_dozo(DisasContext *ctx) 4659 { 4660 TCGLabel *l1 = gen_new_label(); 4661 TCGLabel *l2 = gen_new_label(); 4662 TCGv t0 = tcg_temp_new(); 4663 TCGv t1 = tcg_temp_new(); 4664 TCGv t2 = tcg_temp_new(); 4665 /* Start with XER OV disabled, the most likely case */ 4666 tcg_gen_movi_tl(cpu_ov, 0); 4667 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); 4668 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4669 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4670 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 4671 tcg_gen_andc_tl(t1, t1, t2); 4672 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 4673 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 4674 tcg_gen_movi_tl(cpu_ov, 1); 4675 tcg_gen_movi_tl(cpu_so, 1); 4676 tcg_gen_br(l2); 4677 gen_set_label(l1); 4678 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4679 gen_set_label(l2); 4680 tcg_temp_free(t0); 4681 tcg_temp_free(t1); 4682 tcg_temp_free(t2); 4683 if (unlikely(Rc(ctx->opcode) != 0)) 4684 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4685 } 4686 4687 /* dozi */ 4688 static void gen_dozi(DisasContext *ctx) 4689 { 4690 target_long simm = SIMM(ctx->opcode); 4691 TCGLabel *l1 = gen_new_label(); 4692 TCGLabel *l2 = gen_new_label(); 4693 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 4694 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 4695 tcg_gen_br(l2); 4696 gen_set_label(l1); 4697 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 4698 gen_set_label(l2); 4699 if (unlikely(Rc(ctx->opcode) != 0)) 4700 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4701 } 4702 4703 /* lscbx - lscbx. */ 4704 static void gen_lscbx(DisasContext *ctx) 4705 { 4706 TCGv t0 = tcg_temp_new(); 4707 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 4708 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 4709 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 4710 4711 gen_addr_reg_index(ctx, t0); 4712 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 4713 tcg_temp_free_i32(t1); 4714 tcg_temp_free_i32(t2); 4715 tcg_temp_free_i32(t3); 4716 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 4717 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 4718 if (unlikely(Rc(ctx->opcode) != 0)) 4719 gen_set_Rc0(ctx, t0); 4720 tcg_temp_free(t0); 4721 } 4722 4723 /* maskg - maskg. */ 4724 static void gen_maskg(DisasContext *ctx) 4725 { 4726 TCGLabel *l1 = gen_new_label(); 4727 TCGv t0 = tcg_temp_new(); 4728 TCGv t1 = tcg_temp_new(); 4729 TCGv t2 = tcg_temp_new(); 4730 TCGv t3 = tcg_temp_new(); 4731 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 4732 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4733 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 4734 tcg_gen_addi_tl(t2, t0, 1); 4735 tcg_gen_shr_tl(t2, t3, t2); 4736 tcg_gen_shr_tl(t3, t3, t1); 4737 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 4738 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 4739 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4740 gen_set_label(l1); 4741 tcg_temp_free(t0); 4742 tcg_temp_free(t1); 4743 tcg_temp_free(t2); 4744 tcg_temp_free(t3); 4745 if (unlikely(Rc(ctx->opcode) != 0)) 4746 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4747 } 4748 4749 /* maskir - maskir. */ 4750 static void gen_maskir(DisasContext *ctx) 4751 { 4752 TCGv t0 = tcg_temp_new(); 4753 TCGv t1 = tcg_temp_new(); 4754 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4755 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 4756 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4757 tcg_temp_free(t0); 4758 tcg_temp_free(t1); 4759 if (unlikely(Rc(ctx->opcode) != 0)) 4760 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4761 } 4762 4763 /* mul - mul. */ 4764 static void gen_mul(DisasContext *ctx) 4765 { 4766 TCGv_i64 t0 = tcg_temp_new_i64(); 4767 TCGv_i64 t1 = tcg_temp_new_i64(); 4768 TCGv t2 = tcg_temp_new(); 4769 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 4770 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 4771 tcg_gen_mul_i64(t0, t0, t1); 4772 tcg_gen_trunc_i64_tl(t2, t0); 4773 gen_store_spr(SPR_MQ, t2); 4774 tcg_gen_shri_i64(t1, t0, 32); 4775 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 4776 tcg_temp_free_i64(t0); 4777 tcg_temp_free_i64(t1); 4778 tcg_temp_free(t2); 4779 if (unlikely(Rc(ctx->opcode) != 0)) 4780 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4781 } 4782 4783 /* mulo - mulo. */ 4784 static void gen_mulo(DisasContext *ctx) 4785 { 4786 TCGLabel *l1 = gen_new_label(); 4787 TCGv_i64 t0 = tcg_temp_new_i64(); 4788 TCGv_i64 t1 = tcg_temp_new_i64(); 4789 TCGv t2 = tcg_temp_new(); 4790 /* Start with XER OV disabled, the most likely case */ 4791 tcg_gen_movi_tl(cpu_ov, 0); 4792 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 4793 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 4794 tcg_gen_mul_i64(t0, t0, t1); 4795 tcg_gen_trunc_i64_tl(t2, t0); 4796 gen_store_spr(SPR_MQ, t2); 4797 tcg_gen_shri_i64(t1, t0, 32); 4798 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 4799 tcg_gen_ext32s_i64(t1, t0); 4800 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 4801 tcg_gen_movi_tl(cpu_ov, 1); 4802 tcg_gen_movi_tl(cpu_so, 1); 4803 gen_set_label(l1); 4804 tcg_temp_free_i64(t0); 4805 tcg_temp_free_i64(t1); 4806 tcg_temp_free(t2); 4807 if (unlikely(Rc(ctx->opcode) != 0)) 4808 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4809 } 4810 4811 /* nabs - nabs. */ 4812 static void gen_nabs(DisasContext *ctx) 4813 { 4814 TCGLabel *l1 = gen_new_label(); 4815 TCGLabel *l2 = gen_new_label(); 4816 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 4817 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4818 tcg_gen_br(l2); 4819 gen_set_label(l1); 4820 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4821 gen_set_label(l2); 4822 if (unlikely(Rc(ctx->opcode) != 0)) 4823 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4824 } 4825 4826 /* nabso - nabso. */ 4827 static void gen_nabso(DisasContext *ctx) 4828 { 4829 TCGLabel *l1 = gen_new_label(); 4830 TCGLabel *l2 = gen_new_label(); 4831 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); 4832 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4833 tcg_gen_br(l2); 4834 gen_set_label(l1); 4835 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 4836 gen_set_label(l2); 4837 /* nabs never overflows */ 4838 tcg_gen_movi_tl(cpu_ov, 0); 4839 if (unlikely(Rc(ctx->opcode) != 0)) 4840 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 4841 } 4842 4843 /* rlmi - rlmi. */ 4844 static void gen_rlmi(DisasContext *ctx) 4845 { 4846 uint32_t mb = MB(ctx->opcode); 4847 uint32_t me = ME(ctx->opcode); 4848 TCGv t0 = tcg_temp_new(); 4849 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4850 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4851 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 4852 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); 4853 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 4854 tcg_temp_free(t0); 4855 if (unlikely(Rc(ctx->opcode) != 0)) 4856 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4857 } 4858 4859 /* rrib - rrib. */ 4860 static void gen_rrib(DisasContext *ctx) 4861 { 4862 TCGv t0 = tcg_temp_new(); 4863 TCGv t1 = tcg_temp_new(); 4864 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4865 tcg_gen_movi_tl(t1, 0x80000000); 4866 tcg_gen_shr_tl(t1, t1, t0); 4867 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4868 tcg_gen_and_tl(t0, t0, t1); 4869 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 4870 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4871 tcg_temp_free(t0); 4872 tcg_temp_free(t1); 4873 if (unlikely(Rc(ctx->opcode) != 0)) 4874 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4875 } 4876 4877 /* sle - sle. */ 4878 static void gen_sle(DisasContext *ctx) 4879 { 4880 TCGv t0 = tcg_temp_new(); 4881 TCGv t1 = tcg_temp_new(); 4882 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 4883 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 4884 tcg_gen_subfi_tl(t1, 32, t1); 4885 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 4886 tcg_gen_or_tl(t1, t0, t1); 4887 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 4888 gen_store_spr(SPR_MQ, t1); 4889 tcg_temp_free(t0); 4890 tcg_temp_free(t1); 4891 if (unlikely(Rc(ctx->opcode) != 0)) 4892 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4893 } 4894 4895 /* sleq - sleq. */ 4896 static void gen_sleq(DisasContext *ctx) 4897 { 4898 TCGv t0 = tcg_temp_new(); 4899 TCGv t1 = tcg_temp_new(); 4900 TCGv t2 = tcg_temp_new(); 4901 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 4902 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 4903 tcg_gen_shl_tl(t2, t2, t0); 4904 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 4905 gen_load_spr(t1, SPR_MQ); 4906 gen_store_spr(SPR_MQ, t0); 4907 tcg_gen_and_tl(t0, t0, t2); 4908 tcg_gen_andc_tl(t1, t1, t2); 4909 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4910 tcg_temp_free(t0); 4911 tcg_temp_free(t1); 4912 tcg_temp_free(t2); 4913 if (unlikely(Rc(ctx->opcode) != 0)) 4914 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4915 } 4916 4917 /* sliq - sliq. */ 4918 static void gen_sliq(DisasContext *ctx) 4919 { 4920 int sh = SH(ctx->opcode); 4921 TCGv t0 = tcg_temp_new(); 4922 TCGv t1 = tcg_temp_new(); 4923 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 4924 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 4925 tcg_gen_or_tl(t1, t0, t1); 4926 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 4927 gen_store_spr(SPR_MQ, t1); 4928 tcg_temp_free(t0); 4929 tcg_temp_free(t1); 4930 if (unlikely(Rc(ctx->opcode) != 0)) 4931 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4932 } 4933 4934 /* slliq - slliq. */ 4935 static void gen_slliq(DisasContext *ctx) 4936 { 4937 int sh = SH(ctx->opcode); 4938 TCGv t0 = tcg_temp_new(); 4939 TCGv t1 = tcg_temp_new(); 4940 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 4941 gen_load_spr(t1, SPR_MQ); 4942 gen_store_spr(SPR_MQ, t0); 4943 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 4944 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 4945 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4946 tcg_temp_free(t0); 4947 tcg_temp_free(t1); 4948 if (unlikely(Rc(ctx->opcode) != 0)) 4949 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4950 } 4951 4952 /* sllq - sllq. */ 4953 static void gen_sllq(DisasContext *ctx) 4954 { 4955 TCGLabel *l1 = gen_new_label(); 4956 TCGLabel *l2 = gen_new_label(); 4957 TCGv t0 = tcg_temp_local_new(); 4958 TCGv t1 = tcg_temp_local_new(); 4959 TCGv t2 = tcg_temp_local_new(); 4960 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 4961 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 4962 tcg_gen_shl_tl(t1, t1, t2); 4963 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 4964 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 4965 gen_load_spr(t0, SPR_MQ); 4966 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4967 tcg_gen_br(l2); 4968 gen_set_label(l1); 4969 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 4970 gen_load_spr(t2, SPR_MQ); 4971 tcg_gen_andc_tl(t1, t2, t1); 4972 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 4973 gen_set_label(l2); 4974 tcg_temp_free(t0); 4975 tcg_temp_free(t1); 4976 tcg_temp_free(t2); 4977 if (unlikely(Rc(ctx->opcode) != 0)) 4978 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 4979 } 4980 4981 /* slq - slq. */ 4982 static void gen_slq(DisasContext *ctx) 4983 { 4984 TCGLabel *l1 = gen_new_label(); 4985 TCGv t0 = tcg_temp_new(); 4986 TCGv t1 = tcg_temp_new(); 4987 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 4988 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 4989 tcg_gen_subfi_tl(t1, 32, t1); 4990 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 4991 tcg_gen_or_tl(t1, t0, t1); 4992 gen_store_spr(SPR_MQ, t1); 4993 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 4994 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 4995 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 4996 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 4997 gen_set_label(l1); 4998 tcg_temp_free(t0); 4999 tcg_temp_free(t1); 5000 if (unlikely(Rc(ctx->opcode) != 0)) 5001 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5002 } 5003 5004 /* sraiq - sraiq. */ 5005 static void gen_sraiq(DisasContext *ctx) 5006 { 5007 int sh = SH(ctx->opcode); 5008 TCGLabel *l1 = gen_new_label(); 5009 TCGv t0 = tcg_temp_new(); 5010 TCGv t1 = tcg_temp_new(); 5011 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5012 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5013 tcg_gen_or_tl(t0, t0, t1); 5014 gen_store_spr(SPR_MQ, t0); 5015 tcg_gen_movi_tl(cpu_ca, 0); 5016 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 5017 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 5018 tcg_gen_movi_tl(cpu_ca, 1); 5019 gen_set_label(l1); 5020 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 5021 tcg_temp_free(t0); 5022 tcg_temp_free(t1); 5023 if (unlikely(Rc(ctx->opcode) != 0)) 5024 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5025 } 5026 5027 /* sraq - sraq. */ 5028 static void gen_sraq(DisasContext *ctx) 5029 { 5030 TCGLabel *l1 = gen_new_label(); 5031 TCGLabel *l2 = gen_new_label(); 5032 TCGv t0 = tcg_temp_new(); 5033 TCGv t1 = tcg_temp_local_new(); 5034 TCGv t2 = tcg_temp_local_new(); 5035 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5036 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5037 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 5038 tcg_gen_subfi_tl(t2, 32, t2); 5039 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 5040 tcg_gen_or_tl(t0, t0, t2); 5041 gen_store_spr(SPR_MQ, t0); 5042 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5043 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 5044 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 5045 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 5046 gen_set_label(l1); 5047 tcg_temp_free(t0); 5048 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 5049 tcg_gen_movi_tl(cpu_ca, 0); 5050 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5051 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 5052 tcg_gen_movi_tl(cpu_ca, 1); 5053 gen_set_label(l2); 5054 tcg_temp_free(t1); 5055 tcg_temp_free(t2); 5056 if (unlikely(Rc(ctx->opcode) != 0)) 5057 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5058 } 5059 5060 /* sre - sre. */ 5061 static void gen_sre(DisasContext *ctx) 5062 { 5063 TCGv t0 = tcg_temp_new(); 5064 TCGv t1 = tcg_temp_new(); 5065 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5066 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5067 tcg_gen_subfi_tl(t1, 32, t1); 5068 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5069 tcg_gen_or_tl(t1, t0, t1); 5070 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5071 gen_store_spr(SPR_MQ, t1); 5072 tcg_temp_free(t0); 5073 tcg_temp_free(t1); 5074 if (unlikely(Rc(ctx->opcode) != 0)) 5075 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5076 } 5077 5078 /* srea - srea. */ 5079 static void gen_srea(DisasContext *ctx) 5080 { 5081 TCGv t0 = tcg_temp_new(); 5082 TCGv t1 = tcg_temp_new(); 5083 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5084 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5085 gen_store_spr(SPR_MQ, t0); 5086 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 5087 tcg_temp_free(t0); 5088 tcg_temp_free(t1); 5089 if (unlikely(Rc(ctx->opcode) != 0)) 5090 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5091 } 5092 5093 /* sreq */ 5094 static void gen_sreq(DisasContext *ctx) 5095 { 5096 TCGv t0 = tcg_temp_new(); 5097 TCGv t1 = tcg_temp_new(); 5098 TCGv t2 = tcg_temp_new(); 5099 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5100 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5101 tcg_gen_shr_tl(t1, t1, t0); 5102 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5103 gen_load_spr(t2, SPR_MQ); 5104 gen_store_spr(SPR_MQ, t0); 5105 tcg_gen_and_tl(t0, t0, t1); 5106 tcg_gen_andc_tl(t2, t2, t1); 5107 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5108 tcg_temp_free(t0); 5109 tcg_temp_free(t1); 5110 tcg_temp_free(t2); 5111 if (unlikely(Rc(ctx->opcode) != 0)) 5112 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5113 } 5114 5115 /* sriq */ 5116 static void gen_sriq(DisasContext *ctx) 5117 { 5118 int sh = SH(ctx->opcode); 5119 TCGv t0 = tcg_temp_new(); 5120 TCGv t1 = tcg_temp_new(); 5121 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5122 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5123 tcg_gen_or_tl(t1, t0, t1); 5124 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5125 gen_store_spr(SPR_MQ, t1); 5126 tcg_temp_free(t0); 5127 tcg_temp_free(t1); 5128 if (unlikely(Rc(ctx->opcode) != 0)) 5129 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5130 } 5131 5132 /* srliq */ 5133 static void gen_srliq(DisasContext *ctx) 5134 { 5135 int sh = SH(ctx->opcode); 5136 TCGv t0 = tcg_temp_new(); 5137 TCGv t1 = tcg_temp_new(); 5138 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5139 gen_load_spr(t1, SPR_MQ); 5140 gen_store_spr(SPR_MQ, t0); 5141 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 5142 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 5143 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5144 tcg_temp_free(t0); 5145 tcg_temp_free(t1); 5146 if (unlikely(Rc(ctx->opcode) != 0)) 5147 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5148 } 5149 5150 /* srlq */ 5151 static void gen_srlq(DisasContext *ctx) 5152 { 5153 TCGLabel *l1 = gen_new_label(); 5154 TCGLabel *l2 = gen_new_label(); 5155 TCGv t0 = tcg_temp_local_new(); 5156 TCGv t1 = tcg_temp_local_new(); 5157 TCGv t2 = tcg_temp_local_new(); 5158 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 5159 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 5160 tcg_gen_shr_tl(t2, t1, t2); 5161 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 5162 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5163 gen_load_spr(t0, SPR_MQ); 5164 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 5165 tcg_gen_br(l2); 5166 gen_set_label(l1); 5167 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 5168 tcg_gen_and_tl(t0, t0, t2); 5169 gen_load_spr(t1, SPR_MQ); 5170 tcg_gen_andc_tl(t1, t1, t2); 5171 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5172 gen_set_label(l2); 5173 tcg_temp_free(t0); 5174 tcg_temp_free(t1); 5175 tcg_temp_free(t2); 5176 if (unlikely(Rc(ctx->opcode) != 0)) 5177 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5178 } 5179 5180 /* srq */ 5181 static void gen_srq(DisasContext *ctx) 5182 { 5183 TCGLabel *l1 = gen_new_label(); 5184 TCGv t0 = tcg_temp_new(); 5185 TCGv t1 = tcg_temp_new(); 5186 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5187 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5188 tcg_gen_subfi_tl(t1, 32, t1); 5189 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5190 tcg_gen_or_tl(t1, t0, t1); 5191 gen_store_spr(SPR_MQ, t1); 5192 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 5193 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5194 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 5195 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 5196 gen_set_label(l1); 5197 tcg_temp_free(t0); 5198 tcg_temp_free(t1); 5199 if (unlikely(Rc(ctx->opcode) != 0)) 5200 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5201 } 5202 5203 /* PowerPC 602 specific instructions */ 5204 5205 /* dsa */ 5206 static void gen_dsa(DisasContext *ctx) 5207 { 5208 /* XXX: TODO */ 5209 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5210 } 5211 5212 /* esa */ 5213 static void gen_esa(DisasContext *ctx) 5214 { 5215 /* XXX: TODO */ 5216 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5217 } 5218 5219 /* mfrom */ 5220 static void gen_mfrom(DisasContext *ctx) 5221 { 5222 #if defined(CONFIG_USER_ONLY) 5223 GEN_PRIV; 5224 #else 5225 CHK_SV; 5226 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5227 #endif /* defined(CONFIG_USER_ONLY) */ 5228 } 5229 5230 /* 602 - 603 - G2 TLB management */ 5231 5232 /* tlbld */ 5233 static void gen_tlbld_6xx(DisasContext *ctx) 5234 { 5235 #if defined(CONFIG_USER_ONLY) 5236 GEN_PRIV; 5237 #else 5238 CHK_SV; 5239 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5240 #endif /* defined(CONFIG_USER_ONLY) */ 5241 } 5242 5243 /* tlbli */ 5244 static void gen_tlbli_6xx(DisasContext *ctx) 5245 { 5246 #if defined(CONFIG_USER_ONLY) 5247 GEN_PRIV; 5248 #else 5249 CHK_SV; 5250 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5251 #endif /* defined(CONFIG_USER_ONLY) */ 5252 } 5253 5254 /* 74xx TLB management */ 5255 5256 /* tlbld */ 5257 static void gen_tlbld_74xx(DisasContext *ctx) 5258 { 5259 #if defined(CONFIG_USER_ONLY) 5260 GEN_PRIV; 5261 #else 5262 CHK_SV; 5263 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5264 #endif /* defined(CONFIG_USER_ONLY) */ 5265 } 5266 5267 /* tlbli */ 5268 static void gen_tlbli_74xx(DisasContext *ctx) 5269 { 5270 #if defined(CONFIG_USER_ONLY) 5271 GEN_PRIV; 5272 #else 5273 CHK_SV; 5274 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5275 #endif /* defined(CONFIG_USER_ONLY) */ 5276 } 5277 5278 /* POWER instructions not in PowerPC 601 */ 5279 5280 /* clf */ 5281 static void gen_clf(DisasContext *ctx) 5282 { 5283 /* Cache line flush: implemented as no-op */ 5284 } 5285 5286 /* cli */ 5287 static void gen_cli(DisasContext *ctx) 5288 { 5289 #if defined(CONFIG_USER_ONLY) 5290 GEN_PRIV; 5291 #else 5292 /* Cache line invalidate: privileged and treated as no-op */ 5293 CHK_SV; 5294 #endif /* defined(CONFIG_USER_ONLY) */ 5295 } 5296 5297 /* dclst */ 5298 static void gen_dclst(DisasContext *ctx) 5299 { 5300 /* Data cache line store: treated as no-op */ 5301 } 5302 5303 static void gen_mfsri(DisasContext *ctx) 5304 { 5305 #if defined(CONFIG_USER_ONLY) 5306 GEN_PRIV; 5307 #else 5308 int ra = rA(ctx->opcode); 5309 int rd = rD(ctx->opcode); 5310 TCGv t0; 5311 5312 CHK_SV; 5313 t0 = tcg_temp_new(); 5314 gen_addr_reg_index(ctx, t0); 5315 tcg_gen_shri_tl(t0, t0, 28); 5316 tcg_gen_andi_tl(t0, t0, 0xF); 5317 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 5318 tcg_temp_free(t0); 5319 if (ra != 0 && ra != rd) 5320 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 5321 #endif /* defined(CONFIG_USER_ONLY) */ 5322 } 5323 5324 static void gen_rac(DisasContext *ctx) 5325 { 5326 #if defined(CONFIG_USER_ONLY) 5327 GEN_PRIV; 5328 #else 5329 TCGv t0; 5330 5331 CHK_SV; 5332 t0 = tcg_temp_new(); 5333 gen_addr_reg_index(ctx, t0); 5334 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5335 tcg_temp_free(t0); 5336 #endif /* defined(CONFIG_USER_ONLY) */ 5337 } 5338 5339 static void gen_rfsvc(DisasContext *ctx) 5340 { 5341 #if defined(CONFIG_USER_ONLY) 5342 GEN_PRIV; 5343 #else 5344 CHK_SV; 5345 5346 gen_helper_rfsvc(cpu_env); 5347 gen_sync_exception(ctx); 5348 #endif /* defined(CONFIG_USER_ONLY) */ 5349 } 5350 5351 /* svc is not implemented for now */ 5352 5353 /* BookE specific instructions */ 5354 5355 /* XXX: not implemented on 440 ? */ 5356 static void gen_mfapidi(DisasContext *ctx) 5357 { 5358 /* XXX: TODO */ 5359 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5360 } 5361 5362 /* XXX: not implemented on 440 ? */ 5363 static void gen_tlbiva(DisasContext *ctx) 5364 { 5365 #if defined(CONFIG_USER_ONLY) 5366 GEN_PRIV; 5367 #else 5368 TCGv t0; 5369 5370 CHK_SV; 5371 t0 = tcg_temp_new(); 5372 gen_addr_reg_index(ctx, t0); 5373 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5374 tcg_temp_free(t0); 5375 #endif /* defined(CONFIG_USER_ONLY) */ 5376 } 5377 5378 /* All 405 MAC instructions are translated here */ 5379 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 5380 int ra, int rb, int rt, int Rc) 5381 { 5382 TCGv t0, t1; 5383 5384 t0 = tcg_temp_local_new(); 5385 t1 = tcg_temp_local_new(); 5386 5387 switch (opc3 & 0x0D) { 5388 case 0x05: 5389 /* macchw - macchw. - macchwo - macchwo. */ 5390 /* macchws - macchws. - macchwso - macchwso. */ 5391 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 5392 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 5393 /* mulchw - mulchw. */ 5394 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5395 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5396 tcg_gen_ext16s_tl(t1, t1); 5397 break; 5398 case 0x04: 5399 /* macchwu - macchwu. - macchwuo - macchwuo. */ 5400 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 5401 /* mulchwu - mulchwu. */ 5402 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5403 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5404 tcg_gen_ext16u_tl(t1, t1); 5405 break; 5406 case 0x01: 5407 /* machhw - machhw. - machhwo - machhwo. */ 5408 /* machhws - machhws. - machhwso - machhwso. */ 5409 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 5410 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 5411 /* mulhhw - mulhhw. */ 5412 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 5413 tcg_gen_ext16s_tl(t0, t0); 5414 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 5415 tcg_gen_ext16s_tl(t1, t1); 5416 break; 5417 case 0x00: 5418 /* machhwu - machhwu. - machhwuo - machhwuo. */ 5419 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 5420 /* mulhhwu - mulhhwu. */ 5421 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 5422 tcg_gen_ext16u_tl(t0, t0); 5423 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 5424 tcg_gen_ext16u_tl(t1, t1); 5425 break; 5426 case 0x0D: 5427 /* maclhw - maclhw. - maclhwo - maclhwo. */ 5428 /* maclhws - maclhws. - maclhwso - maclhwso. */ 5429 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 5430 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 5431 /* mullhw - mullhw. */ 5432 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 5433 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 5434 break; 5435 case 0x0C: 5436 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 5437 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 5438 /* mullhwu - mullhwu. */ 5439 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 5440 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 5441 break; 5442 } 5443 if (opc2 & 0x04) { 5444 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 5445 tcg_gen_mul_tl(t1, t0, t1); 5446 if (opc2 & 0x02) { 5447 /* nmultiply-and-accumulate (0x0E) */ 5448 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 5449 } else { 5450 /* multiply-and-accumulate (0x0C) */ 5451 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 5452 } 5453 5454 if (opc3 & 0x12) { 5455 /* Check overflow and/or saturate */ 5456 TCGLabel *l1 = gen_new_label(); 5457 5458 if (opc3 & 0x10) { 5459 /* Start with XER OV disabled, the most likely case */ 5460 tcg_gen_movi_tl(cpu_ov, 0); 5461 } 5462 if (opc3 & 0x01) { 5463 /* Signed */ 5464 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 5465 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 5466 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 5467 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 5468 if (opc3 & 0x02) { 5469 /* Saturate */ 5470 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 5471 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 5472 } 5473 } else { 5474 /* Unsigned */ 5475 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 5476 if (opc3 & 0x02) { 5477 /* Saturate */ 5478 tcg_gen_movi_tl(t0, UINT32_MAX); 5479 } 5480 } 5481 if (opc3 & 0x10) { 5482 /* Check overflow */ 5483 tcg_gen_movi_tl(cpu_ov, 1); 5484 tcg_gen_movi_tl(cpu_so, 1); 5485 } 5486 gen_set_label(l1); 5487 tcg_gen_mov_tl(cpu_gpr[rt], t0); 5488 } 5489 } else { 5490 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 5491 } 5492 tcg_temp_free(t0); 5493 tcg_temp_free(t1); 5494 if (unlikely(Rc) != 0) { 5495 /* Update Rc0 */ 5496 gen_set_Rc0(ctx, cpu_gpr[rt]); 5497 } 5498 } 5499 5500 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 5501 static void glue(gen_, name)(DisasContext *ctx) \ 5502 { \ 5503 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 5504 rD(ctx->opcode), Rc(ctx->opcode)); \ 5505 } 5506 5507 /* macchw - macchw. */ 5508 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 5509 /* macchwo - macchwo. */ 5510 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 5511 /* macchws - macchws. */ 5512 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 5513 /* macchwso - macchwso. */ 5514 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 5515 /* macchwsu - macchwsu. */ 5516 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 5517 /* macchwsuo - macchwsuo. */ 5518 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 5519 /* macchwu - macchwu. */ 5520 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 5521 /* macchwuo - macchwuo. */ 5522 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 5523 /* machhw - machhw. */ 5524 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 5525 /* machhwo - machhwo. */ 5526 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 5527 /* machhws - machhws. */ 5528 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 5529 /* machhwso - machhwso. */ 5530 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 5531 /* machhwsu - machhwsu. */ 5532 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 5533 /* machhwsuo - machhwsuo. */ 5534 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 5535 /* machhwu - machhwu. */ 5536 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 5537 /* machhwuo - machhwuo. */ 5538 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 5539 /* maclhw - maclhw. */ 5540 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 5541 /* maclhwo - maclhwo. */ 5542 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 5543 /* maclhws - maclhws. */ 5544 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 5545 /* maclhwso - maclhwso. */ 5546 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 5547 /* maclhwu - maclhwu. */ 5548 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 5549 /* maclhwuo - maclhwuo. */ 5550 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 5551 /* maclhwsu - maclhwsu. */ 5552 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 5553 /* maclhwsuo - maclhwsuo. */ 5554 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 5555 /* nmacchw - nmacchw. */ 5556 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 5557 /* nmacchwo - nmacchwo. */ 5558 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 5559 /* nmacchws - nmacchws. */ 5560 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 5561 /* nmacchwso - nmacchwso. */ 5562 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 5563 /* nmachhw - nmachhw. */ 5564 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 5565 /* nmachhwo - nmachhwo. */ 5566 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 5567 /* nmachhws - nmachhws. */ 5568 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 5569 /* nmachhwso - nmachhwso. */ 5570 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 5571 /* nmaclhw - nmaclhw. */ 5572 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 5573 /* nmaclhwo - nmaclhwo. */ 5574 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 5575 /* nmaclhws - nmaclhws. */ 5576 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 5577 /* nmaclhwso - nmaclhwso. */ 5578 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 5579 5580 /* mulchw - mulchw. */ 5581 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 5582 /* mulchwu - mulchwu. */ 5583 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 5584 /* mulhhw - mulhhw. */ 5585 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 5586 /* mulhhwu - mulhhwu. */ 5587 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 5588 /* mullhw - mullhw. */ 5589 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 5590 /* mullhwu - mullhwu. */ 5591 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 5592 5593 /* mfdcr */ 5594 static void gen_mfdcr(DisasContext *ctx) 5595 { 5596 #if defined(CONFIG_USER_ONLY) 5597 GEN_PRIV; 5598 #else 5599 TCGv dcrn; 5600 5601 CHK_SV; 5602 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5603 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 5604 tcg_temp_free(dcrn); 5605 #endif /* defined(CONFIG_USER_ONLY) */ 5606 } 5607 5608 /* mtdcr */ 5609 static void gen_mtdcr(DisasContext *ctx) 5610 { 5611 #if defined(CONFIG_USER_ONLY) 5612 GEN_PRIV; 5613 #else 5614 TCGv dcrn; 5615 5616 CHK_SV; 5617 dcrn = tcg_const_tl(SPR(ctx->opcode)); 5618 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 5619 tcg_temp_free(dcrn); 5620 #endif /* defined(CONFIG_USER_ONLY) */ 5621 } 5622 5623 /* mfdcrx */ 5624 /* XXX: not implemented on 440 ? */ 5625 static void gen_mfdcrx(DisasContext *ctx) 5626 { 5627 #if defined(CONFIG_USER_ONLY) 5628 GEN_PRIV; 5629 #else 5630 CHK_SV; 5631 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5632 cpu_gpr[rA(ctx->opcode)]); 5633 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5634 #endif /* defined(CONFIG_USER_ONLY) */ 5635 } 5636 5637 /* mtdcrx */ 5638 /* XXX: not implemented on 440 ? */ 5639 static void gen_mtdcrx(DisasContext *ctx) 5640 { 5641 #if defined(CONFIG_USER_ONLY) 5642 GEN_PRIV; 5643 #else 5644 CHK_SV; 5645 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5646 cpu_gpr[rS(ctx->opcode)]); 5647 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5648 #endif /* defined(CONFIG_USER_ONLY) */ 5649 } 5650 5651 /* mfdcrux (PPC 460) : user-mode access to DCR */ 5652 static void gen_mfdcrux(DisasContext *ctx) 5653 { 5654 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 5655 cpu_gpr[rA(ctx->opcode)]); 5656 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5657 } 5658 5659 /* mtdcrux (PPC 460) : user-mode access to DCR */ 5660 static void gen_mtdcrux(DisasContext *ctx) 5661 { 5662 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 5663 cpu_gpr[rS(ctx->opcode)]); 5664 /* Note: Rc update flag set leads to undefined state of Rc0 */ 5665 } 5666 5667 /* dccci */ 5668 static void gen_dccci(DisasContext *ctx) 5669 { 5670 CHK_SV; 5671 /* interpreted as no-op */ 5672 } 5673 5674 /* dcread */ 5675 static void gen_dcread(DisasContext *ctx) 5676 { 5677 #if defined(CONFIG_USER_ONLY) 5678 GEN_PRIV; 5679 #else 5680 TCGv EA, val; 5681 5682 CHK_SV; 5683 gen_set_access_type(ctx, ACCESS_CACHE); 5684 EA = tcg_temp_new(); 5685 gen_addr_reg_index(ctx, EA); 5686 val = tcg_temp_new(); 5687 gen_qemu_ld32u(ctx, val, EA); 5688 tcg_temp_free(val); 5689 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 5690 tcg_temp_free(EA); 5691 #endif /* defined(CONFIG_USER_ONLY) */ 5692 } 5693 5694 /* icbt */ 5695 static void gen_icbt_40x(DisasContext *ctx) 5696 { 5697 /* interpreted as no-op */ 5698 /* XXX: specification say this is treated as a load by the MMU 5699 * but does not generate any exception 5700 */ 5701 } 5702 5703 /* iccci */ 5704 static void gen_iccci(DisasContext *ctx) 5705 { 5706 CHK_SV; 5707 /* interpreted as no-op */ 5708 } 5709 5710 /* icread */ 5711 static void gen_icread(DisasContext *ctx) 5712 { 5713 CHK_SV; 5714 /* interpreted as no-op */ 5715 } 5716 5717 /* rfci (supervisor only) */ 5718 static void gen_rfci_40x(DisasContext *ctx) 5719 { 5720 #if defined(CONFIG_USER_ONLY) 5721 GEN_PRIV; 5722 #else 5723 CHK_SV; 5724 /* Restore CPU state */ 5725 gen_helper_40x_rfci(cpu_env); 5726 gen_sync_exception(ctx); 5727 #endif /* defined(CONFIG_USER_ONLY) */ 5728 } 5729 5730 static void gen_rfci(DisasContext *ctx) 5731 { 5732 #if defined(CONFIG_USER_ONLY) 5733 GEN_PRIV; 5734 #else 5735 CHK_SV; 5736 /* Restore CPU state */ 5737 gen_helper_rfci(cpu_env); 5738 gen_sync_exception(ctx); 5739 #endif /* defined(CONFIG_USER_ONLY) */ 5740 } 5741 5742 /* BookE specific */ 5743 5744 /* XXX: not implemented on 440 ? */ 5745 static void gen_rfdi(DisasContext *ctx) 5746 { 5747 #if defined(CONFIG_USER_ONLY) 5748 GEN_PRIV; 5749 #else 5750 CHK_SV; 5751 /* Restore CPU state */ 5752 gen_helper_rfdi(cpu_env); 5753 gen_sync_exception(ctx); 5754 #endif /* defined(CONFIG_USER_ONLY) */ 5755 } 5756 5757 /* XXX: not implemented on 440 ? */ 5758 static void gen_rfmci(DisasContext *ctx) 5759 { 5760 #if defined(CONFIG_USER_ONLY) 5761 GEN_PRIV; 5762 #else 5763 CHK_SV; 5764 /* Restore CPU state */ 5765 gen_helper_rfmci(cpu_env); 5766 gen_sync_exception(ctx); 5767 #endif /* defined(CONFIG_USER_ONLY) */ 5768 } 5769 5770 /* TLB management - PowerPC 405 implementation */ 5771 5772 /* tlbre */ 5773 static void gen_tlbre_40x(DisasContext *ctx) 5774 { 5775 #if defined(CONFIG_USER_ONLY) 5776 GEN_PRIV; 5777 #else 5778 CHK_SV; 5779 switch (rB(ctx->opcode)) { 5780 case 0: 5781 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 5782 cpu_gpr[rA(ctx->opcode)]); 5783 break; 5784 case 1: 5785 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 5786 cpu_gpr[rA(ctx->opcode)]); 5787 break; 5788 default: 5789 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5790 break; 5791 } 5792 #endif /* defined(CONFIG_USER_ONLY) */ 5793 } 5794 5795 /* tlbsx - tlbsx. */ 5796 static void gen_tlbsx_40x(DisasContext *ctx) 5797 { 5798 #if defined(CONFIG_USER_ONLY) 5799 GEN_PRIV; 5800 #else 5801 TCGv t0; 5802 5803 CHK_SV; 5804 t0 = tcg_temp_new(); 5805 gen_addr_reg_index(ctx, t0); 5806 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5807 tcg_temp_free(t0); 5808 if (Rc(ctx->opcode)) { 5809 TCGLabel *l1 = gen_new_label(); 5810 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5811 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 5812 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 5813 gen_set_label(l1); 5814 } 5815 #endif /* defined(CONFIG_USER_ONLY) */ 5816 } 5817 5818 /* tlbwe */ 5819 static void gen_tlbwe_40x(DisasContext *ctx) 5820 { 5821 #if defined(CONFIG_USER_ONLY) 5822 GEN_PRIV; 5823 #else 5824 CHK_SV; 5825 5826 switch (rB(ctx->opcode)) { 5827 case 0: 5828 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 5829 cpu_gpr[rS(ctx->opcode)]); 5830 break; 5831 case 1: 5832 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 5833 cpu_gpr[rS(ctx->opcode)]); 5834 break; 5835 default: 5836 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5837 break; 5838 } 5839 #endif /* defined(CONFIG_USER_ONLY) */ 5840 } 5841 5842 /* TLB management - PowerPC 440 implementation */ 5843 5844 /* tlbre */ 5845 static void gen_tlbre_440(DisasContext *ctx) 5846 { 5847 #if defined(CONFIG_USER_ONLY) 5848 GEN_PRIV; 5849 #else 5850 CHK_SV; 5851 5852 switch (rB(ctx->opcode)) { 5853 case 0: 5854 case 1: 5855 case 2: 5856 { 5857 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 5858 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 5859 t0, cpu_gpr[rA(ctx->opcode)]); 5860 tcg_temp_free_i32(t0); 5861 } 5862 break; 5863 default: 5864 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5865 break; 5866 } 5867 #endif /* defined(CONFIG_USER_ONLY) */ 5868 } 5869 5870 /* tlbsx - tlbsx. */ 5871 static void gen_tlbsx_440(DisasContext *ctx) 5872 { 5873 #if defined(CONFIG_USER_ONLY) 5874 GEN_PRIV; 5875 #else 5876 TCGv t0; 5877 5878 CHK_SV; 5879 t0 = tcg_temp_new(); 5880 gen_addr_reg_index(ctx, t0); 5881 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5882 tcg_temp_free(t0); 5883 if (Rc(ctx->opcode)) { 5884 TCGLabel *l1 = gen_new_label(); 5885 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5886 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 5887 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 5888 gen_set_label(l1); 5889 } 5890 #endif /* defined(CONFIG_USER_ONLY) */ 5891 } 5892 5893 /* tlbwe */ 5894 static void gen_tlbwe_440(DisasContext *ctx) 5895 { 5896 #if defined(CONFIG_USER_ONLY) 5897 GEN_PRIV; 5898 #else 5899 CHK_SV; 5900 switch (rB(ctx->opcode)) { 5901 case 0: 5902 case 1: 5903 case 2: 5904 { 5905 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 5906 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 5907 cpu_gpr[rS(ctx->opcode)]); 5908 tcg_temp_free_i32(t0); 5909 } 5910 break; 5911 default: 5912 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5913 break; 5914 } 5915 #endif /* defined(CONFIG_USER_ONLY) */ 5916 } 5917 5918 /* TLB management - PowerPC BookE 2.06 implementation */ 5919 5920 /* tlbre */ 5921 static void gen_tlbre_booke206(DisasContext *ctx) 5922 { 5923 #if defined(CONFIG_USER_ONLY) 5924 GEN_PRIV; 5925 #else 5926 CHK_SV; 5927 gen_helper_booke206_tlbre(cpu_env); 5928 #endif /* defined(CONFIG_USER_ONLY) */ 5929 } 5930 5931 /* tlbsx - tlbsx. */ 5932 static void gen_tlbsx_booke206(DisasContext *ctx) 5933 { 5934 #if defined(CONFIG_USER_ONLY) 5935 GEN_PRIV; 5936 #else 5937 TCGv t0; 5938 5939 CHK_SV; 5940 if (rA(ctx->opcode)) { 5941 t0 = tcg_temp_new(); 5942 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 5943 } else { 5944 t0 = tcg_const_tl(0); 5945 } 5946 5947 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 5948 gen_helper_booke206_tlbsx(cpu_env, t0); 5949 tcg_temp_free(t0); 5950 #endif /* defined(CONFIG_USER_ONLY) */ 5951 } 5952 5953 /* tlbwe */ 5954 static void gen_tlbwe_booke206(DisasContext *ctx) 5955 { 5956 #if defined(CONFIG_USER_ONLY) 5957 GEN_PRIV; 5958 #else 5959 CHK_SV; 5960 gen_helper_booke206_tlbwe(cpu_env); 5961 #endif /* defined(CONFIG_USER_ONLY) */ 5962 } 5963 5964 static void gen_tlbivax_booke206(DisasContext *ctx) 5965 { 5966 #if defined(CONFIG_USER_ONLY) 5967 GEN_PRIV; 5968 #else 5969 TCGv t0; 5970 5971 CHK_SV; 5972 t0 = tcg_temp_new(); 5973 gen_addr_reg_index(ctx, t0); 5974 gen_helper_booke206_tlbivax(cpu_env, t0); 5975 tcg_temp_free(t0); 5976 #endif /* defined(CONFIG_USER_ONLY) */ 5977 } 5978 5979 static void gen_tlbilx_booke206(DisasContext *ctx) 5980 { 5981 #if defined(CONFIG_USER_ONLY) 5982 GEN_PRIV; 5983 #else 5984 TCGv t0; 5985 5986 CHK_SV; 5987 t0 = tcg_temp_new(); 5988 gen_addr_reg_index(ctx, t0); 5989 5990 switch((ctx->opcode >> 21) & 0x3) { 5991 case 0: 5992 gen_helper_booke206_tlbilx0(cpu_env, t0); 5993 break; 5994 case 1: 5995 gen_helper_booke206_tlbilx1(cpu_env, t0); 5996 break; 5997 case 3: 5998 gen_helper_booke206_tlbilx3(cpu_env, t0); 5999 break; 6000 default: 6001 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6002 break; 6003 } 6004 6005 tcg_temp_free(t0); 6006 #endif /* defined(CONFIG_USER_ONLY) */ 6007 } 6008 6009 6010 /* wrtee */ 6011 static void gen_wrtee(DisasContext *ctx) 6012 { 6013 #if defined(CONFIG_USER_ONLY) 6014 GEN_PRIV; 6015 #else 6016 TCGv t0; 6017 6018 CHK_SV; 6019 t0 = tcg_temp_new(); 6020 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 6021 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6022 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 6023 tcg_temp_free(t0); 6024 /* Stop translation to have a chance to raise an exception 6025 * if we just set msr_ee to 1 6026 */ 6027 gen_stop_exception(ctx); 6028 #endif /* defined(CONFIG_USER_ONLY) */ 6029 } 6030 6031 /* wrteei */ 6032 static void gen_wrteei(DisasContext *ctx) 6033 { 6034 #if defined(CONFIG_USER_ONLY) 6035 GEN_PRIV; 6036 #else 6037 CHK_SV; 6038 if (ctx->opcode & 0x00008000) { 6039 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 6040 /* Stop translation to have a chance to raise an exception */ 6041 gen_stop_exception(ctx); 6042 } else { 6043 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 6044 } 6045 #endif /* defined(CONFIG_USER_ONLY) */ 6046 } 6047 6048 /* PowerPC 440 specific instructions */ 6049 6050 /* dlmzb */ 6051 static void gen_dlmzb(DisasContext *ctx) 6052 { 6053 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 6054 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 6055 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 6056 tcg_temp_free_i32(t0); 6057 } 6058 6059 /* mbar replaces eieio on 440 */ 6060 static void gen_mbar(DisasContext *ctx) 6061 { 6062 /* interpreted as no-op */ 6063 } 6064 6065 /* msync replaces sync on 440 */ 6066 static void gen_msync_4xx(DisasContext *ctx) 6067 { 6068 /* interpreted as no-op */ 6069 } 6070 6071 /* icbt */ 6072 static void gen_icbt_440(DisasContext *ctx) 6073 { 6074 /* interpreted as no-op */ 6075 /* XXX: specification say this is treated as a load by the MMU 6076 * but does not generate any exception 6077 */ 6078 } 6079 6080 /* Embedded.Processor Control */ 6081 6082 static void gen_msgclr(DisasContext *ctx) 6083 { 6084 #if defined(CONFIG_USER_ONLY) 6085 GEN_PRIV; 6086 #else 6087 CHK_SV; 6088 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6089 #endif /* defined(CONFIG_USER_ONLY) */ 6090 } 6091 6092 static void gen_msgsnd(DisasContext *ctx) 6093 { 6094 #if defined(CONFIG_USER_ONLY) 6095 GEN_PRIV; 6096 #else 6097 CHK_SV; 6098 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 6099 #endif /* defined(CONFIG_USER_ONLY) */ 6100 } 6101 6102 6103 #if defined(TARGET_PPC64) 6104 static void gen_maddld(DisasContext *ctx) 6105 { 6106 TCGv_i64 t1 = tcg_temp_new_i64(); 6107 6108 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 6109 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 6110 tcg_temp_free_i64(t1); 6111 } 6112 6113 /* maddhd maddhdu */ 6114 static void gen_maddhd_maddhdu(DisasContext *ctx) 6115 { 6116 TCGv_i64 lo = tcg_temp_new_i64(); 6117 TCGv_i64 hi = tcg_temp_new_i64(); 6118 TCGv_i64 t1 = tcg_temp_new_i64(); 6119 6120 if (Rc(ctx->opcode)) { 6121 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6122 cpu_gpr[rB(ctx->opcode)]); 6123 tcg_gen_movi_i64(t1, 0); 6124 } else { 6125 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 6126 cpu_gpr[rB(ctx->opcode)]); 6127 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 6128 } 6129 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 6130 cpu_gpr[rC(ctx->opcode)], t1); 6131 tcg_temp_free_i64(lo); 6132 tcg_temp_free_i64(hi); 6133 tcg_temp_free_i64(t1); 6134 } 6135 #endif /* defined(TARGET_PPC64) */ 6136 6137 static void gen_tbegin(DisasContext *ctx) 6138 { 6139 if (unlikely(!ctx->tm_enabled)) { 6140 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6141 return; 6142 } 6143 gen_helper_tbegin(cpu_env); 6144 } 6145 6146 #define GEN_TM_NOOP(name) \ 6147 static inline void gen_##name(DisasContext *ctx) \ 6148 { \ 6149 if (unlikely(!ctx->tm_enabled)) { \ 6150 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6151 return; \ 6152 } \ 6153 /* Because tbegin always fails in QEMU, these user \ 6154 * space instructions all have a simple implementation: \ 6155 * \ 6156 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6157 * = 0b0 || 0b00 || 0b0 \ 6158 */ \ 6159 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6160 } 6161 6162 GEN_TM_NOOP(tend); 6163 GEN_TM_NOOP(tabort); 6164 GEN_TM_NOOP(tabortwc); 6165 GEN_TM_NOOP(tabortwci); 6166 GEN_TM_NOOP(tabortdc); 6167 GEN_TM_NOOP(tabortdci); 6168 GEN_TM_NOOP(tsr); 6169 6170 static void gen_tcheck(DisasContext *ctx) 6171 { 6172 if (unlikely(!ctx->tm_enabled)) { 6173 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 6174 return; 6175 } 6176 /* Because tbegin always fails, the tcheck implementation 6177 * is simple: 6178 * 6179 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 6180 * = 0b1 || 0b00 || 0b0 6181 */ 6182 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 6183 } 6184 6185 #if defined(CONFIG_USER_ONLY) 6186 #define GEN_TM_PRIV_NOOP(name) \ 6187 static inline void gen_##name(DisasContext *ctx) \ 6188 { \ 6189 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 6190 } 6191 6192 #else 6193 6194 #define GEN_TM_PRIV_NOOP(name) \ 6195 static inline void gen_##name(DisasContext *ctx) \ 6196 { \ 6197 CHK_SV; \ 6198 if (unlikely(!ctx->tm_enabled)) { \ 6199 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 6200 return; \ 6201 } \ 6202 /* Because tbegin always fails, the implementation is \ 6203 * simple: \ 6204 * \ 6205 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 6206 * = 0b0 || 0b00 | 0b0 \ 6207 */ \ 6208 tcg_gen_movi_i32(cpu_crf[0], 0); \ 6209 } 6210 6211 #endif 6212 6213 GEN_TM_PRIV_NOOP(treclaim); 6214 GEN_TM_PRIV_NOOP(trechkpt); 6215 6216 #include "translate/fp-impl.inc.c" 6217 6218 #include "translate/vmx-impl.inc.c" 6219 6220 #include "translate/vsx-impl.inc.c" 6221 6222 #include "translate/dfp-impl.inc.c" 6223 6224 #include "translate/spe-impl.inc.c" 6225 6226 static opcode_t opcodes[] = { 6227 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 6228 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER), 6229 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6230 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER), 6231 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER), 6232 #if defined(TARGET_PPC64) 6233 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 6234 #endif 6235 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 6236 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 6237 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 6238 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6239 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6240 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6241 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6242 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6243 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 6244 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 6245 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 6246 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 6247 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6248 #if defined(TARGET_PPC64) 6249 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 6250 #endif 6251 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 6252 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 6253 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6254 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6255 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6256 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 6257 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 6258 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 6259 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 6260 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6261 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6262 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6263 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6264 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 6265 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 6266 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 6267 #if defined(TARGET_PPC64) 6268 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 6269 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 6270 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 6271 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 6272 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 6273 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 6274 #endif 6275 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6276 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6277 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6278 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 6279 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 6280 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 6281 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 6282 #if defined(TARGET_PPC64) 6283 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 6284 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 6285 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 6286 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 6287 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 6288 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 6289 PPC_NONE, PPC2_ISA300), 6290 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 6291 PPC_NONE, PPC2_ISA300), 6292 #endif 6293 #if defined(TARGET_PPC64) 6294 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B), 6295 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX), 6296 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B), 6297 #endif 6298 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6299 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 6300 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 6301 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 6302 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 6303 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 6304 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO), 6305 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 6306 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6307 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6308 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 6309 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6310 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 6311 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 6312 #if defined(TARGET_PPC64) 6313 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 6314 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 6315 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 6316 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 6317 #endif 6318 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 6319 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 6320 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6321 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6322 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 6323 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 6324 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 6325 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 6326 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 6327 #if defined(TARGET_PPC64) 6328 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 6329 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6330 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6331 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6332 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 6333 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 6334 #endif 6335 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW), 6336 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 6337 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 6338 #if defined(TARGET_PPC64) 6339 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 6340 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 6341 #endif 6342 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 6343 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 6344 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 6345 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 6346 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 6347 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 6348 #if defined(TARGET_PPC64) 6349 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 6350 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 6351 #endif 6352 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 6353 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 6354 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 6355 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 6356 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 6357 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 6358 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 6359 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 6360 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 6361 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 6362 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC), 6363 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 6364 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 6365 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 6366 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 6367 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 6368 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 6369 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 6370 #if defined(TARGET_PPC64) 6371 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 6372 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 6373 PPC_SEGMENT_64B), 6374 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 6375 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 6376 PPC_SEGMENT_64B), 6377 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 6378 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 6379 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 6380 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 6381 #endif 6382 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 6383 /* XXX Those instructions will need to be handled differently for 6384 * different ISA versions */ 6385 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 6386 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 6387 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 6388 #if defined(TARGET_PPC64) 6389 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 6390 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 6391 #endif 6392 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 6393 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 6394 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 6395 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 6396 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 6397 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 6398 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 6399 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 6400 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 6401 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 6402 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 6403 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6404 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 6405 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 6406 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 6407 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 6408 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 6409 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 6410 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 6411 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 6412 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 6413 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 6414 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 6415 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 6416 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 6417 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 6418 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 6419 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 6420 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 6421 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 6422 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 6423 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 6424 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 6425 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 6426 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 6427 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 6428 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 6429 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 6430 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 6431 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 6432 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 6433 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB), 6434 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB), 6435 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 6436 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 6437 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 6438 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 6439 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 6440 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 6441 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6442 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6443 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 6444 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 6445 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6446 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 6447 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 6448 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 6449 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 6450 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 6451 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 6452 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 6453 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 6454 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 6455 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 6456 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 6457 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 6458 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 6459 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 6460 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 6461 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 6462 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 6463 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 6464 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 6465 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 6466 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 6467 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 6468 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 6469 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 6470 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 6471 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 6472 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 6473 PPC_NONE, PPC2_BOOKE206), 6474 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 6475 PPC_NONE, PPC2_BOOKE206), 6476 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 6477 PPC_NONE, PPC2_BOOKE206), 6478 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 6479 PPC_NONE, PPC2_BOOKE206), 6480 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 6481 PPC_NONE, PPC2_BOOKE206), 6482 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 6483 PPC_NONE, PPC2_PRCNTL), 6484 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 6485 PPC_NONE, PPC2_PRCNTL), 6486 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 6487 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 6488 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 6489 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 6490 PPC_BOOKE, PPC2_BOOKE206), 6491 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE), 6492 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 6493 PPC_BOOKE, PPC2_BOOKE206), 6494 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 6495 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 6496 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 6497 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 6498 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 6499 #if defined(TARGET_PPC64) 6500 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 6501 PPC2_ISA300), 6502 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 6503 #endif 6504 6505 #undef GEN_INT_ARITH_ADD 6506 #undef GEN_INT_ARITH_ADD_CONST 6507 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 6508 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 6509 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 6510 add_ca, compute_ca, compute_ov) \ 6511 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 6512 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 6513 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 6514 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 6515 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 6516 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 6517 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 6518 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 6519 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 6520 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 6521 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 6522 6523 #undef GEN_INT_ARITH_DIVW 6524 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 6525 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 6526 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 6527 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 6528 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 6529 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 6530 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6531 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6532 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6533 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6534 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6535 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6536 6537 #if defined(TARGET_PPC64) 6538 #undef GEN_INT_ARITH_DIVD 6539 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 6540 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6541 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 6542 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 6543 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 6544 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 6545 6546 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6547 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 6548 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6549 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 6550 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 6551 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 6552 6553 #undef GEN_INT_ARITH_MUL_HELPER 6554 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 6555 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 6556 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 6557 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 6558 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 6559 #endif 6560 6561 #undef GEN_INT_ARITH_SUBF 6562 #undef GEN_INT_ARITH_SUBF_CONST 6563 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 6564 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 6565 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 6566 add_ca, compute_ca, compute_ov) \ 6567 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 6568 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 6569 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 6570 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 6571 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 6572 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 6573 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 6574 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 6575 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 6576 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 6577 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 6578 6579 #undef GEN_LOGICAL1 6580 #undef GEN_LOGICAL2 6581 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 6582 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 6583 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 6584 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 6585 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 6586 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 6587 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 6588 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 6589 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 6590 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 6591 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 6592 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 6593 #if defined(TARGET_PPC64) 6594 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 6595 #endif 6596 6597 #if defined(TARGET_PPC64) 6598 #undef GEN_PPC64_R2 6599 #undef GEN_PPC64_R4 6600 #define GEN_PPC64_R2(name, opc1, opc2) \ 6601 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 6602 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 6603 PPC_64B) 6604 #define GEN_PPC64_R4(name, opc1, opc2) \ 6605 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 6606 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 6607 PPC_64B), \ 6608 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 6609 PPC_64B), \ 6610 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 6611 PPC_64B) 6612 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 6613 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 6614 GEN_PPC64_R4(rldic, 0x1E, 0x04), 6615 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 6616 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 6617 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 6618 #endif 6619 6620 #undef GEN_LD 6621 #undef GEN_LDU 6622 #undef GEN_LDUX 6623 #undef GEN_LDX_E 6624 #undef GEN_LDS 6625 #define GEN_LD(name, ldop, opc, type) \ 6626 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 6627 #define GEN_LDU(name, ldop, opc, type) \ 6628 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type), 6629 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ 6630 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 6631 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 6632 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 6633 #define GEN_LDS(name, ldop, op, type) \ 6634 GEN_LD(name, ldop, op | 0x20, type) \ 6635 GEN_LDU(name, ldop, op | 0x21, type) \ 6636 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \ 6637 GEN_LDX(name, ldop, 0x17, op | 0x00, type) 6638 6639 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER) 6640 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER) 6641 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER) 6642 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER) 6643 #if defined(TARGET_PPC64) 6644 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B) 6645 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B) 6646 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B) 6647 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B) 6648 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 6649 6650 /* HV/P7 and later only */ 6651 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 6652 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 6653 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 6654 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 6655 #endif 6656 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 6657 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 6658 6659 #undef GEN_ST 6660 #undef GEN_STU 6661 #undef GEN_STUX 6662 #undef GEN_STX_E 6663 #undef GEN_STS 6664 #define GEN_ST(name, stop, opc, type) \ 6665 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type), 6666 #define GEN_STU(name, stop, opc, type) \ 6667 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type), 6668 #define GEN_STUX(name, stop, opc2, opc3, type) \ 6669 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type), 6670 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 6671 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 6672 #define GEN_STS(name, stop, op, type) \ 6673 GEN_ST(name, stop, op | 0x20, type) \ 6674 GEN_STU(name, stop, op | 0x21, type) \ 6675 GEN_STUX(name, stop, 0x17, op | 0x01, type) \ 6676 GEN_STX(name, stop, 0x17, op | 0x00, type) 6677 6678 GEN_STS(stb, st8, 0x06, PPC_INTEGER) 6679 GEN_STS(sth, st16, 0x0C, PPC_INTEGER) 6680 GEN_STS(stw, st32, 0x04, PPC_INTEGER) 6681 #if defined(TARGET_PPC64) 6682 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B) 6683 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B) 6684 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 6685 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 6686 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 6687 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 6688 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 6689 #endif 6690 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 6691 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 6692 6693 #undef GEN_CRLOGIC 6694 #define GEN_CRLOGIC(name, tcg_op, opc) \ 6695 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 6696 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 6697 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 6698 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 6699 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 6700 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 6701 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 6702 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 6703 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 6704 6705 #undef GEN_MAC_HANDLER 6706 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 6707 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 6708 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 6709 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 6710 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 6711 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 6712 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 6713 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 6714 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 6715 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 6716 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 6717 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 6718 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 6719 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 6720 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 6721 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 6722 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 6723 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 6724 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 6725 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 6726 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 6727 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 6728 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 6729 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 6730 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 6731 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 6732 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 6733 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 6734 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 6735 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 6736 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 6737 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 6738 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 6739 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 6740 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 6741 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 6742 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 6743 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 6744 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 6745 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 6746 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 6747 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 6748 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 6749 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 6750 6751 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 6752 PPC_NONE, PPC2_TM), 6753 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 6754 PPC_NONE, PPC2_TM), 6755 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 6756 PPC_NONE, PPC2_TM), 6757 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 6758 PPC_NONE, PPC2_TM), 6759 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 6760 PPC_NONE, PPC2_TM), 6761 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 6762 PPC_NONE, PPC2_TM), 6763 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 6764 PPC_NONE, PPC2_TM), 6765 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 6766 PPC_NONE, PPC2_TM), 6767 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 6768 PPC_NONE, PPC2_TM), 6769 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 6770 PPC_NONE, PPC2_TM), 6771 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 6772 PPC_NONE, PPC2_TM), 6773 6774 #include "translate/fp-ops.inc.c" 6775 6776 #include "translate/vmx-ops.inc.c" 6777 6778 #include "translate/vsx-ops.inc.c" 6779 6780 #include "translate/dfp-ops.inc.c" 6781 6782 #include "translate/spe-ops.inc.c" 6783 }; 6784 6785 #include "helper_regs.h" 6786 #include "translate_init.c" 6787 6788 /*****************************************************************************/ 6789 /* Misc PowerPC helpers */ 6790 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf, 6791 int flags) 6792 { 6793 #define RGPL 4 6794 #define RFPL 4 6795 6796 PowerPCCPU *cpu = POWERPC_CPU(cs); 6797 CPUPPCState *env = &cpu->env; 6798 int i; 6799 6800 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR " 6801 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n", 6802 env->nip, env->lr, env->ctr, cpu_read_xer(env), 6803 cs->cpu_index); 6804 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF " 6805 TARGET_FMT_lx " iidx %d didx %d\n", 6806 env->msr, env->spr[SPR_HID0], 6807 env->hflags, env->immu_idx, env->dmmu_idx); 6808 #if !defined(NO_TIMER_DUMP) 6809 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64 6810 #if !defined(CONFIG_USER_ONLY) 6811 " DECR %08" PRIu32 6812 #endif 6813 "\n", 6814 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) 6815 #if !defined(CONFIG_USER_ONLY) 6816 , cpu_ppc_load_decr(env) 6817 #endif 6818 ); 6819 #endif 6820 for (i = 0; i < 32; i++) { 6821 if ((i & (RGPL - 1)) == 0) 6822 cpu_fprintf(f, "GPR%02d", i); 6823 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i)); 6824 if ((i & (RGPL - 1)) == (RGPL - 1)) 6825 cpu_fprintf(f, "\n"); 6826 } 6827 cpu_fprintf(f, "CR "); 6828 for (i = 0; i < 8; i++) 6829 cpu_fprintf(f, "%01x", env->crf[i]); 6830 cpu_fprintf(f, " ["); 6831 for (i = 0; i < 8; i++) { 6832 char a = '-'; 6833 if (env->crf[i] & 0x08) 6834 a = 'L'; 6835 else if (env->crf[i] & 0x04) 6836 a = 'G'; 6837 else if (env->crf[i] & 0x02) 6838 a = 'E'; 6839 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); 6840 } 6841 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n", 6842 env->reserve_addr); 6843 for (i = 0; i < 32; i++) { 6844 if ((i & (RFPL - 1)) == 0) 6845 cpu_fprintf(f, "FPR%02d", i); 6846 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i])); 6847 if ((i & (RFPL - 1)) == (RFPL - 1)) 6848 cpu_fprintf(f, "\n"); 6849 } 6850 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr); 6851 #if !defined(CONFIG_USER_ONLY) 6852 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx 6853 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n", 6854 env->spr[SPR_SRR0], env->spr[SPR_SRR1], 6855 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]); 6856 6857 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx 6858 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n", 6859 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1], 6860 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]); 6861 6862 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx 6863 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n", 6864 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5], 6865 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]); 6866 6867 #if defined(TARGET_PPC64) 6868 if (env->excp_model == POWERPC_EXCP_POWER7 || 6869 env->excp_model == POWERPC_EXCP_POWER8) { 6870 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n", 6871 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]); 6872 } 6873 #endif 6874 if (env->excp_model == POWERPC_EXCP_BOOKE) { 6875 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx 6876 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n", 6877 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1], 6878 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]); 6879 6880 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx 6881 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n", 6882 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR], 6883 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]); 6884 6885 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx 6886 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n", 6887 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR], 6888 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]); 6889 6890 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx 6891 " EPR " TARGET_FMT_lx "\n", 6892 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8], 6893 env->spr[SPR_BOOKE_EPR]); 6894 6895 /* FSL-specific */ 6896 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx 6897 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n", 6898 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1], 6899 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]); 6900 6901 /* 6902 * IVORs are left out as they are large and do not change often -- 6903 * they can be read with "p $ivor0", "p $ivor1", etc. 6904 */ 6905 } 6906 6907 #if defined(TARGET_PPC64) 6908 if (env->flags & POWERPC_FLAG_CFAR) { 6909 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar); 6910 } 6911 #endif 6912 6913 switch (env->mmu_model) { 6914 case POWERPC_MMU_32B: 6915 case POWERPC_MMU_601: 6916 case POWERPC_MMU_SOFT_6xx: 6917 case POWERPC_MMU_SOFT_74xx: 6918 #if defined(TARGET_PPC64) 6919 case POWERPC_MMU_64B: 6920 case POWERPC_MMU_2_03: 6921 case POWERPC_MMU_2_06: 6922 case POWERPC_MMU_2_06a: 6923 case POWERPC_MMU_2_07: 6924 case POWERPC_MMU_2_07a: 6925 #endif 6926 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " DAR " TARGET_FMT_lx 6927 " DSISR " TARGET_FMT_lx "\n", env->spr[SPR_SDR1], 6928 env->spr[SPR_DAR], env->spr[SPR_DSISR]); 6929 break; 6930 case POWERPC_MMU_BOOKE206: 6931 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx 6932 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n", 6933 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1], 6934 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]); 6935 6936 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx 6937 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n", 6938 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6], 6939 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]); 6940 6941 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx 6942 " TLB1CFG " TARGET_FMT_lx "\n", 6943 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG], 6944 env->spr[SPR_BOOKE_TLB1CFG]); 6945 break; 6946 default: 6947 break; 6948 } 6949 #endif 6950 6951 #undef RGPL 6952 #undef RFPL 6953 } 6954 6955 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f, 6956 fprintf_function cpu_fprintf, int flags) 6957 { 6958 #if defined(DO_PPC_STATISTICS) 6959 PowerPCCPU *cpu = POWERPC_CPU(cs); 6960 opc_handler_t **t1, **t2, **t3, *handler; 6961 int op1, op2, op3; 6962 6963 t1 = cpu->env.opcodes; 6964 for (op1 = 0; op1 < 64; op1++) { 6965 handler = t1[op1]; 6966 if (is_indirect_opcode(handler)) { 6967 t2 = ind_table(handler); 6968 for (op2 = 0; op2 < 32; op2++) { 6969 handler = t2[op2]; 6970 if (is_indirect_opcode(handler)) { 6971 t3 = ind_table(handler); 6972 for (op3 = 0; op3 < 32; op3++) { 6973 handler = t3[op3]; 6974 if (handler->count == 0) 6975 continue; 6976 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " 6977 "%016" PRIx64 " %" PRId64 "\n", 6978 op1, op2, op3, op1, (op3 << 5) | op2, 6979 handler->oname, 6980 handler->count, handler->count); 6981 } 6982 } else { 6983 if (handler->count == 0) 6984 continue; 6985 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " 6986 "%016" PRIx64 " %" PRId64 "\n", 6987 op1, op2, op1, op2, handler->oname, 6988 handler->count, handler->count); 6989 } 6990 } 6991 } else { 6992 if (handler->count == 0) 6993 continue; 6994 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64 6995 " %" PRId64 "\n", 6996 op1, op1, handler->oname, 6997 handler->count, handler->count); 6998 } 6999 } 7000 #endif 7001 } 7002 7003 /*****************************************************************************/ 7004 void gen_intermediate_code(CPUPPCState *env, struct TranslationBlock *tb) 7005 { 7006 PowerPCCPU *cpu = ppc_env_get_cpu(env); 7007 CPUState *cs = CPU(cpu); 7008 DisasContext ctx, *ctxp = &ctx; 7009 opc_handler_t **table, *handler; 7010 target_ulong pc_start; 7011 int num_insns; 7012 int max_insns; 7013 7014 pc_start = tb->pc; 7015 ctx.nip = pc_start; 7016 ctx.tb = tb; 7017 ctx.exception = POWERPC_EXCP_NONE; 7018 ctx.spr_cb = env->spr_cb; 7019 ctx.pr = msr_pr; 7020 ctx.mem_idx = env->dmmu_idx; 7021 ctx.dr = msr_dr; 7022 #if !defined(CONFIG_USER_ONLY) 7023 ctx.hv = msr_hv || !env->has_hv_mode; 7024 #endif 7025 ctx.insns_flags = env->insns_flags; 7026 ctx.insns_flags2 = env->insns_flags2; 7027 ctx.access_type = -1; 7028 ctx.need_access_type = !(env->mmu_model & POWERPC_MMU_64B); 7029 ctx.le_mode = !!(env->hflags & (1 << MSR_LE)); 7030 ctx.default_tcg_memop_mask = ctx.le_mode ? MO_LE : MO_BE; 7031 #if defined(TARGET_PPC64) 7032 ctx.sf_mode = msr_is_64bit(env, env->msr); 7033 ctx.has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 7034 #endif 7035 if (env->mmu_model == POWERPC_MMU_32B || 7036 env->mmu_model == POWERPC_MMU_601 || 7037 (env->mmu_model & POWERPC_MMU_64B)) 7038 ctx.lazy_tlb_flush = true; 7039 7040 ctx.fpu_enabled = !!msr_fp; 7041 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) 7042 ctx.spe_enabled = !!msr_spe; 7043 else 7044 ctx.spe_enabled = false; 7045 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) 7046 ctx.altivec_enabled = !!msr_vr; 7047 else 7048 ctx.altivec_enabled = false; 7049 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) { 7050 ctx.vsx_enabled = !!msr_vsx; 7051 } else { 7052 ctx.vsx_enabled = false; 7053 } 7054 #if defined(TARGET_PPC64) 7055 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) { 7056 ctx.tm_enabled = !!msr_tm; 7057 } else { 7058 ctx.tm_enabled = false; 7059 } 7060 #endif 7061 if ((env->flags & POWERPC_FLAG_SE) && msr_se) 7062 ctx.singlestep_enabled = CPU_SINGLE_STEP; 7063 else 7064 ctx.singlestep_enabled = 0; 7065 if ((env->flags & POWERPC_FLAG_BE) && msr_be) 7066 ctx.singlestep_enabled |= CPU_BRANCH_STEP; 7067 if (unlikely(cs->singlestep_enabled)) { 7068 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP; 7069 } 7070 #if defined (DO_SINGLE_STEP) && 0 7071 /* Single step trace mode */ 7072 msr_se = 1; 7073 #endif 7074 num_insns = 0; 7075 max_insns = tb->cflags & CF_COUNT_MASK; 7076 if (max_insns == 0) { 7077 max_insns = CF_COUNT_MASK; 7078 } 7079 if (max_insns > TCG_MAX_INSNS) { 7080 max_insns = TCG_MAX_INSNS; 7081 } 7082 7083 gen_tb_start(tb); 7084 tcg_clear_temp_count(); 7085 /* Set env in case of segfault during code fetch */ 7086 while (ctx.exception == POWERPC_EXCP_NONE && !tcg_op_buf_full()) { 7087 tcg_gen_insn_start(ctx.nip); 7088 num_insns++; 7089 7090 if (unlikely(cpu_breakpoint_test(cs, ctx.nip, BP_ANY))) { 7091 gen_debug_exception(ctxp); 7092 /* The address covered by the breakpoint must be included in 7093 [tb->pc, tb->pc + tb->size) in order to for it to be 7094 properly cleared -- thus we increment the PC here so that 7095 the logic setting tb->size below does the right thing. */ 7096 ctx.nip += 4; 7097 break; 7098 } 7099 7100 LOG_DISAS("----------------\n"); 7101 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 7102 ctx.nip, ctx.mem_idx, (int)msr_ir); 7103 if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) 7104 gen_io_start(); 7105 if (unlikely(need_byteswap(&ctx))) { 7106 ctx.opcode = bswap32(cpu_ldl_code(env, ctx.nip)); 7107 } else { 7108 ctx.opcode = cpu_ldl_code(env, ctx.nip); 7109 } 7110 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 7111 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode), 7112 opc3(ctx.opcode), opc4(ctx.opcode), 7113 ctx.le_mode ? "little" : "big"); 7114 ctx.nip += 4; 7115 table = env->opcodes; 7116 handler = table[opc1(ctx.opcode)]; 7117 if (is_indirect_opcode(handler)) { 7118 table = ind_table(handler); 7119 handler = table[opc2(ctx.opcode)]; 7120 if (is_indirect_opcode(handler)) { 7121 table = ind_table(handler); 7122 handler = table[opc3(ctx.opcode)]; 7123 if (is_indirect_opcode(handler)) { 7124 table = ind_table(handler); 7125 handler = table[opc4(ctx.opcode)]; 7126 } 7127 } 7128 } 7129 /* Is opcode *REALLY* valid ? */ 7130 if (unlikely(handler->handler == &gen_invalid)) { 7131 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 7132 "%02x - %02x - %02x - %02x (%08x) " 7133 TARGET_FMT_lx " %d\n", 7134 opc1(ctx.opcode), opc2(ctx.opcode), 7135 opc3(ctx.opcode), opc4(ctx.opcode), 7136 ctx.opcode, ctx.nip - 4, (int)msr_ir); 7137 } else { 7138 uint32_t inval; 7139 7140 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) && Rc(ctx.opcode))) { 7141 inval = handler->inval2; 7142 } else { 7143 inval = handler->inval1; 7144 } 7145 7146 if (unlikely((ctx.opcode & inval) != 0)) { 7147 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 7148 "%02x - %02x - %02x - %02x (%08x) " 7149 TARGET_FMT_lx "\n", ctx.opcode & inval, 7150 opc1(ctx.opcode), opc2(ctx.opcode), 7151 opc3(ctx.opcode), opc4(ctx.opcode), 7152 ctx.opcode, ctx.nip - 4); 7153 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL); 7154 break; 7155 } 7156 } 7157 (*(handler->handler))(&ctx); 7158 #if defined(DO_PPC_STATISTICS) 7159 handler->count++; 7160 #endif 7161 /* Check trace mode exceptions */ 7162 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP && 7163 (ctx.nip <= 0x100 || ctx.nip > 0xF00) && 7164 ctx.exception != POWERPC_SYSCALL && 7165 ctx.exception != POWERPC_EXCP_TRAP && 7166 ctx.exception != POWERPC_EXCP_BRANCH)) { 7167 gen_exception_nip(ctxp, POWERPC_EXCP_TRACE, ctx.nip); 7168 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) || 7169 (cs->singlestep_enabled) || 7170 singlestep || 7171 num_insns >= max_insns)) { 7172 /* if we reach a page boundary or are single stepping, stop 7173 * generation 7174 */ 7175 break; 7176 } 7177 if (tcg_check_temp_count()) { 7178 fprintf(stderr, "Opcode %02x %02x %02x %02x (%08x) leaked " 7179 "temporaries\n", opc1(ctx.opcode), opc2(ctx.opcode), 7180 opc3(ctx.opcode), opc4(ctx.opcode), ctx.opcode); 7181 exit(1); 7182 } 7183 } 7184 if (tb->cflags & CF_LAST_IO) 7185 gen_io_end(); 7186 if (ctx.exception == POWERPC_EXCP_NONE) { 7187 gen_goto_tb(&ctx, 0, ctx.nip); 7188 } else if (ctx.exception != POWERPC_EXCP_BRANCH) { 7189 if (unlikely(cs->singlestep_enabled)) { 7190 gen_debug_exception(ctxp); 7191 } 7192 /* Generate the return instruction */ 7193 tcg_gen_exit_tb(0); 7194 } 7195 gen_tb_end(tb, num_insns); 7196 7197 tb->size = ctx.nip - pc_start; 7198 tb->icount = num_insns; 7199 7200 #if defined(DEBUG_DISAS) 7201 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM) 7202 && qemu_log_in_addr_range(pc_start)) { 7203 int flags; 7204 flags = env->bfd_mach; 7205 flags |= ctx.le_mode << 16; 7206 qemu_log_lock(); 7207 qemu_log("IN: %s\n", lookup_symbol(pc_start)); 7208 log_target_disas(cs, pc_start, ctx.nip - pc_start, flags); 7209 qemu_log("\n"); 7210 qemu_log_unlock(); 7211 } 7212 #endif 7213 } 7214 7215 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 7216 target_ulong *data) 7217 { 7218 env->nip = data[0]; 7219 } 7220