1/* 2 * translate-fp.c 3 * 4 * Standard FPU translation 5 */ 6 7static inline void gen_reset_fpstatus(void) 8{ 9 gen_helper_reset_fpstatus(cpu_env); 10} 11 12static inline void gen_compute_fprf_float64(TCGv_i64 arg) 13{ 14 gen_helper_compute_fprf_float64(cpu_env, arg); 15 gen_helper_float_check_status(cpu_env); 16} 17 18#if defined(TARGET_PPC64) 19static void gen_set_cr1_from_fpscr(DisasContext *ctx) 20{ 21 TCGv_i32 tmp = tcg_temp_new_i32(); 22 tcg_gen_trunc_tl_i32(tmp, cpu_fpscr); 23 tcg_gen_shri_i32(cpu_crf[1], tmp, 28); 24 tcg_temp_free_i32(tmp); 25} 26#else 27static void gen_set_cr1_from_fpscr(DisasContext *ctx) 28{ 29 tcg_gen_shri_tl(cpu_crf[1], cpu_fpscr, 28); 30} 31#endif 32 33/*** Floating-Point arithmetic ***/ 34#define _GEN_FLOAT_ACB(name, op1, op2, set_fprf, type) \ 35static void gen_f##name(DisasContext *ctx) \ 36{ \ 37 TCGv_i64 t0; \ 38 TCGv_i64 t1; \ 39 TCGv_i64 t2; \ 40 TCGv_i64 t3; \ 41 if (unlikely(!ctx->fpu_enabled)) { \ 42 gen_exception(ctx, POWERPC_EXCP_FPU); \ 43 return; \ 44 } \ 45 t0 = tcg_temp_new_i64(); \ 46 t1 = tcg_temp_new_i64(); \ 47 t2 = tcg_temp_new_i64(); \ 48 t3 = tcg_temp_new_i64(); \ 49 gen_reset_fpstatus(); \ 50 get_fpr(t0, rA(ctx->opcode)); \ 51 get_fpr(t1, rC(ctx->opcode)); \ 52 get_fpr(t2, rB(ctx->opcode)); \ 53 gen_helper_f##name(t3, cpu_env, t0, t1, t2); \ 54 set_fpr(rD(ctx->opcode), t3); \ 55 if (set_fprf) { \ 56 gen_compute_fprf_float64(t3); \ 57 } \ 58 if (unlikely(Rc(ctx->opcode) != 0)) { \ 59 gen_set_cr1_from_fpscr(ctx); \ 60 } \ 61 tcg_temp_free_i64(t0); \ 62 tcg_temp_free_i64(t1); \ 63 tcg_temp_free_i64(t2); \ 64 tcg_temp_free_i64(t3); \ 65} 66 67#define GEN_FLOAT_ACB(name, op2, set_fprf, type) \ 68_GEN_FLOAT_ACB(name, 0x3F, op2, set_fprf, type); \ 69_GEN_FLOAT_ACB(name##s, 0x3B, op2, set_fprf, type); 70 71#define _GEN_FLOAT_AB(name, op1, op2, inval, set_fprf, type) \ 72static void gen_f##name(DisasContext *ctx) \ 73{ \ 74 TCGv_i64 t0; \ 75 TCGv_i64 t1; \ 76 TCGv_i64 t2; \ 77 if (unlikely(!ctx->fpu_enabled)) { \ 78 gen_exception(ctx, POWERPC_EXCP_FPU); \ 79 return; \ 80 } \ 81 t0 = tcg_temp_new_i64(); \ 82 t1 = tcg_temp_new_i64(); \ 83 t2 = tcg_temp_new_i64(); \ 84 gen_reset_fpstatus(); \ 85 get_fpr(t0, rA(ctx->opcode)); \ 86 get_fpr(t1, rB(ctx->opcode)); \ 87 gen_helper_f##name(t2, cpu_env, t0, t1); \ 88 set_fpr(rD(ctx->opcode), t2); \ 89 if (set_fprf) { \ 90 gen_compute_fprf_float64(t2); \ 91 } \ 92 if (unlikely(Rc(ctx->opcode) != 0)) { \ 93 gen_set_cr1_from_fpscr(ctx); \ 94 } \ 95 tcg_temp_free_i64(t0); \ 96 tcg_temp_free_i64(t1); \ 97 tcg_temp_free_i64(t2); \ 98} 99#define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \ 100_GEN_FLOAT_AB(name, 0x3F, op2, inval, set_fprf, type); \ 101_GEN_FLOAT_AB(name##s, 0x3B, op2, inval, set_fprf, type); 102 103#define _GEN_FLOAT_AC(name, op1, op2, inval, set_fprf, type) \ 104static void gen_f##name(DisasContext *ctx) \ 105{ \ 106 TCGv_i64 t0; \ 107 TCGv_i64 t1; \ 108 TCGv_i64 t2; \ 109 if (unlikely(!ctx->fpu_enabled)) { \ 110 gen_exception(ctx, POWERPC_EXCP_FPU); \ 111 return; \ 112 } \ 113 t0 = tcg_temp_new_i64(); \ 114 t1 = tcg_temp_new_i64(); \ 115 t2 = tcg_temp_new_i64(); \ 116 gen_reset_fpstatus(); \ 117 get_fpr(t0, rA(ctx->opcode)); \ 118 get_fpr(t1, rC(ctx->opcode)); \ 119 gen_helper_f##name(t2, cpu_env, t0, t1); \ 120 set_fpr(rD(ctx->opcode), t2); \ 121 if (set_fprf) { \ 122 gen_compute_fprf_float64(t2); \ 123 } \ 124 if (unlikely(Rc(ctx->opcode) != 0)) { \ 125 gen_set_cr1_from_fpscr(ctx); \ 126 } \ 127 tcg_temp_free_i64(t0); \ 128 tcg_temp_free_i64(t1); \ 129 tcg_temp_free_i64(t2); \ 130} 131#define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \ 132_GEN_FLOAT_AC(name, 0x3F, op2, inval, set_fprf, type); \ 133_GEN_FLOAT_AC(name##s, 0x3B, op2, inval, set_fprf, type); 134 135#define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \ 136static void gen_f##name(DisasContext *ctx) \ 137{ \ 138 TCGv_i64 t0; \ 139 TCGv_i64 t1; \ 140 if (unlikely(!ctx->fpu_enabled)) { \ 141 gen_exception(ctx, POWERPC_EXCP_FPU); \ 142 return; \ 143 } \ 144 t0 = tcg_temp_new_i64(); \ 145 t1 = tcg_temp_new_i64(); \ 146 gen_reset_fpstatus(); \ 147 get_fpr(t0, rB(ctx->opcode)); \ 148 gen_helper_f##name(t1, cpu_env, t0); \ 149 set_fpr(rD(ctx->opcode), t1); \ 150 if (set_fprf) { \ 151 gen_helper_compute_fprf_float64(cpu_env, t1); \ 152 } \ 153 gen_helper_float_check_status(cpu_env); \ 154 if (unlikely(Rc(ctx->opcode) != 0)) { \ 155 gen_set_cr1_from_fpscr(ctx); \ 156 } \ 157 tcg_temp_free_i64(t0); \ 158 tcg_temp_free_i64(t1); \ 159} 160 161#define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \ 162static void gen_f##name(DisasContext *ctx) \ 163{ \ 164 TCGv_i64 t0; \ 165 TCGv_i64 t1; \ 166 if (unlikely(!ctx->fpu_enabled)) { \ 167 gen_exception(ctx, POWERPC_EXCP_FPU); \ 168 return; \ 169 } \ 170 t0 = tcg_temp_new_i64(); \ 171 t1 = tcg_temp_new_i64(); \ 172 gen_reset_fpstatus(); \ 173 get_fpr(t0, rB(ctx->opcode)); \ 174 gen_helper_f##name(t1, cpu_env, t0); \ 175 set_fpr(rD(ctx->opcode), t1); \ 176 if (set_fprf) { \ 177 gen_compute_fprf_float64(t1); \ 178 } \ 179 if (unlikely(Rc(ctx->opcode) != 0)) { \ 180 gen_set_cr1_from_fpscr(ctx); \ 181 } \ 182 tcg_temp_free_i64(t0); \ 183 tcg_temp_free_i64(t1); \ 184} 185 186/* fadd - fadds */ 187GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT); 188/* fdiv - fdivs */ 189GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT); 190/* fmul - fmuls */ 191GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT); 192 193/* fre */ 194GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT); 195 196/* fres */ 197GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES); 198 199/* frsqrte */ 200GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE); 201 202/* frsqrtes */ 203static void gen_frsqrtes(DisasContext *ctx) 204{ 205 TCGv_i64 t0; 206 TCGv_i64 t1; 207 if (unlikely(!ctx->fpu_enabled)) { 208 gen_exception(ctx, POWERPC_EXCP_FPU); 209 return; 210 } 211 t0 = tcg_temp_new_i64(); 212 t1 = tcg_temp_new_i64(); 213 gen_reset_fpstatus(); 214 get_fpr(t0, rB(ctx->opcode)); 215 gen_helper_frsqrtes(t1, cpu_env, t0); 216 set_fpr(rD(ctx->opcode), t1); 217 gen_compute_fprf_float64(t1); 218 if (unlikely(Rc(ctx->opcode) != 0)) { 219 gen_set_cr1_from_fpscr(ctx); 220 } 221 tcg_temp_free_i64(t0); 222 tcg_temp_free_i64(t1); 223} 224 225static bool trans_FSEL(DisasContext *ctx, arg_A *a) 226{ 227 TCGv_i64 t0, t1, t2; 228 229 REQUIRE_INSNS_FLAGS(ctx, FLOAT_FSEL); 230 REQUIRE_FPU(ctx); 231 232 t0 = tcg_temp_new_i64(); 233 t1 = tcg_temp_new_i64(); 234 t2 = tcg_temp_new_i64(); 235 236 get_fpr(t0, a->fra); 237 get_fpr(t1, a->frb); 238 get_fpr(t2, a->frc); 239 240 gen_helper_FSEL(t0, t0, t1, t2); 241 set_fpr(a->frt, t0); 242 if (a->rc) { 243 gen_set_cr1_from_fpscr(ctx); 244 } 245 246 tcg_temp_free_i64(t0); 247 tcg_temp_free_i64(t1); 248 tcg_temp_free_i64(t2); 249 250 return true; 251} 252 253/* fsub - fsubs */ 254GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT); 255/* Optional: */ 256 257/* fsqrt */ 258static void gen_fsqrt(DisasContext *ctx) 259{ 260 TCGv_i64 t0; 261 TCGv_i64 t1; 262 if (unlikely(!ctx->fpu_enabled)) { 263 gen_exception(ctx, POWERPC_EXCP_FPU); 264 return; 265 } 266 t0 = tcg_temp_new_i64(); 267 t1 = tcg_temp_new_i64(); 268 gen_reset_fpstatus(); 269 get_fpr(t0, rB(ctx->opcode)); 270 gen_helper_fsqrt(t1, cpu_env, t0); 271 set_fpr(rD(ctx->opcode), t1); 272 gen_compute_fprf_float64(t1); 273 if (unlikely(Rc(ctx->opcode) != 0)) { 274 gen_set_cr1_from_fpscr(ctx); 275 } 276 tcg_temp_free_i64(t0); 277 tcg_temp_free_i64(t1); 278} 279 280static void gen_fsqrts(DisasContext *ctx) 281{ 282 TCGv_i64 t0; 283 TCGv_i64 t1; 284 if (unlikely(!ctx->fpu_enabled)) { 285 gen_exception(ctx, POWERPC_EXCP_FPU); 286 return; 287 } 288 t0 = tcg_temp_new_i64(); 289 t1 = tcg_temp_new_i64(); 290 gen_reset_fpstatus(); 291 get_fpr(t0, rB(ctx->opcode)); 292 gen_helper_fsqrts(t1, cpu_env, t0); 293 set_fpr(rD(ctx->opcode), t1); 294 gen_compute_fprf_float64(t1); 295 if (unlikely(Rc(ctx->opcode) != 0)) { 296 gen_set_cr1_from_fpscr(ctx); 297 } 298 tcg_temp_free_i64(t0); 299 tcg_temp_free_i64(t1); 300} 301 302/*** Floating-Point multiply-and-add ***/ 303/* fmadd - fmadds */ 304GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT); 305/* fmsub - fmsubs */ 306GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT); 307/* fnmadd - fnmadds */ 308GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT); 309/* fnmsub - fnmsubs */ 310GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT); 311 312/*** Floating-Point round & convert ***/ 313/* fctiw */ 314GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT); 315/* fctiwu */ 316GEN_FLOAT_B(ctiwu, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206); 317/* fctiwz */ 318GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT); 319/* fctiwuz */ 320GEN_FLOAT_B(ctiwuz, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206); 321/* frsp */ 322GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT); 323/* fcfid */ 324GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64); 325/* fcfids */ 326GEN_FLOAT_B(cfids, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206); 327/* fcfidu */ 328GEN_FLOAT_B(cfidu, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206); 329/* fcfidus */ 330GEN_FLOAT_B(cfidus, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206); 331/* fctid */ 332GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC2_FP_CVT_S64); 333/* fctidu */ 334GEN_FLOAT_B(ctidu, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206); 335/* fctidz */ 336GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC2_FP_CVT_S64); 337/* fctidu */ 338GEN_FLOAT_B(ctiduz, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206); 339 340/* frin */ 341GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT); 342/* friz */ 343GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT); 344/* frip */ 345GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT); 346/* frim */ 347GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT); 348 349static void gen_ftdiv(DisasContext *ctx) 350{ 351 TCGv_i64 t0; 352 TCGv_i64 t1; 353 if (unlikely(!ctx->fpu_enabled)) { 354 gen_exception(ctx, POWERPC_EXCP_FPU); 355 return; 356 } 357 t0 = tcg_temp_new_i64(); 358 t1 = tcg_temp_new_i64(); 359 get_fpr(t0, rA(ctx->opcode)); 360 get_fpr(t1, rB(ctx->opcode)); 361 gen_helper_ftdiv(cpu_crf[crfD(ctx->opcode)], t0, t1); 362 tcg_temp_free_i64(t0); 363 tcg_temp_free_i64(t1); 364} 365 366static void gen_ftsqrt(DisasContext *ctx) 367{ 368 TCGv_i64 t0; 369 if (unlikely(!ctx->fpu_enabled)) { 370 gen_exception(ctx, POWERPC_EXCP_FPU); 371 return; 372 } 373 t0 = tcg_temp_new_i64(); 374 get_fpr(t0, rB(ctx->opcode)); 375 gen_helper_ftsqrt(cpu_crf[crfD(ctx->opcode)], t0); 376 tcg_temp_free_i64(t0); 377} 378 379 380 381/*** Floating-Point compare ***/ 382 383/* fcmpo */ 384static void gen_fcmpo(DisasContext *ctx) 385{ 386 TCGv_i32 crf; 387 TCGv_i64 t0; 388 TCGv_i64 t1; 389 if (unlikely(!ctx->fpu_enabled)) { 390 gen_exception(ctx, POWERPC_EXCP_FPU); 391 return; 392 } 393 t0 = tcg_temp_new_i64(); 394 t1 = tcg_temp_new_i64(); 395 gen_reset_fpstatus(); 396 crf = tcg_const_i32(crfD(ctx->opcode)); 397 get_fpr(t0, rA(ctx->opcode)); 398 get_fpr(t1, rB(ctx->opcode)); 399 gen_helper_fcmpo(cpu_env, t0, t1, crf); 400 tcg_temp_free_i32(crf); 401 gen_helper_float_check_status(cpu_env); 402 tcg_temp_free_i64(t0); 403 tcg_temp_free_i64(t1); 404} 405 406/* fcmpu */ 407static void gen_fcmpu(DisasContext *ctx) 408{ 409 TCGv_i32 crf; 410 TCGv_i64 t0; 411 TCGv_i64 t1; 412 if (unlikely(!ctx->fpu_enabled)) { 413 gen_exception(ctx, POWERPC_EXCP_FPU); 414 return; 415 } 416 t0 = tcg_temp_new_i64(); 417 t1 = tcg_temp_new_i64(); 418 gen_reset_fpstatus(); 419 crf = tcg_const_i32(crfD(ctx->opcode)); 420 get_fpr(t0, rA(ctx->opcode)); 421 get_fpr(t1, rB(ctx->opcode)); 422 gen_helper_fcmpu(cpu_env, t0, t1, crf); 423 tcg_temp_free_i32(crf); 424 gen_helper_float_check_status(cpu_env); 425 tcg_temp_free_i64(t0); 426 tcg_temp_free_i64(t1); 427} 428 429/*** Floating-point move ***/ 430/* fabs */ 431/* XXX: beware that fabs never checks for NaNs nor update FPSCR */ 432static void gen_fabs(DisasContext *ctx) 433{ 434 TCGv_i64 t0; 435 TCGv_i64 t1; 436 if (unlikely(!ctx->fpu_enabled)) { 437 gen_exception(ctx, POWERPC_EXCP_FPU); 438 return; 439 } 440 t0 = tcg_temp_new_i64(); 441 t1 = tcg_temp_new_i64(); 442 get_fpr(t0, rB(ctx->opcode)); 443 tcg_gen_andi_i64(t1, t0, ~(1ULL << 63)); 444 set_fpr(rD(ctx->opcode), t1); 445 if (unlikely(Rc(ctx->opcode))) { 446 gen_set_cr1_from_fpscr(ctx); 447 } 448 tcg_temp_free_i64(t0); 449 tcg_temp_free_i64(t1); 450} 451 452/* fmr - fmr. */ 453/* XXX: beware that fmr never checks for NaNs nor update FPSCR */ 454static void gen_fmr(DisasContext *ctx) 455{ 456 TCGv_i64 t0; 457 if (unlikely(!ctx->fpu_enabled)) { 458 gen_exception(ctx, POWERPC_EXCP_FPU); 459 return; 460 } 461 t0 = tcg_temp_new_i64(); 462 get_fpr(t0, rB(ctx->opcode)); 463 set_fpr(rD(ctx->opcode), t0); 464 if (unlikely(Rc(ctx->opcode))) { 465 gen_set_cr1_from_fpscr(ctx); 466 } 467 tcg_temp_free_i64(t0); 468} 469 470/* fnabs */ 471/* XXX: beware that fnabs never checks for NaNs nor update FPSCR */ 472static void gen_fnabs(DisasContext *ctx) 473{ 474 TCGv_i64 t0; 475 TCGv_i64 t1; 476 if (unlikely(!ctx->fpu_enabled)) { 477 gen_exception(ctx, POWERPC_EXCP_FPU); 478 return; 479 } 480 t0 = tcg_temp_new_i64(); 481 t1 = tcg_temp_new_i64(); 482 get_fpr(t0, rB(ctx->opcode)); 483 tcg_gen_ori_i64(t1, t0, 1ULL << 63); 484 set_fpr(rD(ctx->opcode), t1); 485 if (unlikely(Rc(ctx->opcode))) { 486 gen_set_cr1_from_fpscr(ctx); 487 } 488 tcg_temp_free_i64(t0); 489 tcg_temp_free_i64(t1); 490} 491 492/* fneg */ 493/* XXX: beware that fneg never checks for NaNs nor update FPSCR */ 494static void gen_fneg(DisasContext *ctx) 495{ 496 TCGv_i64 t0; 497 TCGv_i64 t1; 498 if (unlikely(!ctx->fpu_enabled)) { 499 gen_exception(ctx, POWERPC_EXCP_FPU); 500 return; 501 } 502 t0 = tcg_temp_new_i64(); 503 t1 = tcg_temp_new_i64(); 504 get_fpr(t0, rB(ctx->opcode)); 505 tcg_gen_xori_i64(t1, t0, 1ULL << 63); 506 set_fpr(rD(ctx->opcode), t1); 507 if (unlikely(Rc(ctx->opcode))) { 508 gen_set_cr1_from_fpscr(ctx); 509 } 510 tcg_temp_free_i64(t0); 511 tcg_temp_free_i64(t1); 512} 513 514/* fcpsgn: PowerPC 2.05 specification */ 515/* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */ 516static void gen_fcpsgn(DisasContext *ctx) 517{ 518 TCGv_i64 t0; 519 TCGv_i64 t1; 520 TCGv_i64 t2; 521 if (unlikely(!ctx->fpu_enabled)) { 522 gen_exception(ctx, POWERPC_EXCP_FPU); 523 return; 524 } 525 t0 = tcg_temp_new_i64(); 526 t1 = tcg_temp_new_i64(); 527 t2 = tcg_temp_new_i64(); 528 get_fpr(t0, rA(ctx->opcode)); 529 get_fpr(t1, rB(ctx->opcode)); 530 tcg_gen_deposit_i64(t2, t0, t1, 0, 63); 531 set_fpr(rD(ctx->opcode), t2); 532 if (unlikely(Rc(ctx->opcode))) { 533 gen_set_cr1_from_fpscr(ctx); 534 } 535 tcg_temp_free_i64(t0); 536 tcg_temp_free_i64(t1); 537 tcg_temp_free_i64(t2); 538} 539 540static void gen_fmrgew(DisasContext *ctx) 541{ 542 TCGv_i64 b0; 543 TCGv_i64 t0; 544 TCGv_i64 t1; 545 if (unlikely(!ctx->fpu_enabled)) { 546 gen_exception(ctx, POWERPC_EXCP_FPU); 547 return; 548 } 549 b0 = tcg_temp_new_i64(); 550 t0 = tcg_temp_new_i64(); 551 t1 = tcg_temp_new_i64(); 552 get_fpr(t0, rB(ctx->opcode)); 553 tcg_gen_shri_i64(b0, t0, 32); 554 get_fpr(t0, rA(ctx->opcode)); 555 tcg_gen_deposit_i64(t1, t0, b0, 0, 32); 556 set_fpr(rD(ctx->opcode), t1); 557 tcg_temp_free_i64(b0); 558 tcg_temp_free_i64(t0); 559 tcg_temp_free_i64(t1); 560} 561 562static void gen_fmrgow(DisasContext *ctx) 563{ 564 TCGv_i64 t0; 565 TCGv_i64 t1; 566 TCGv_i64 t2; 567 if (unlikely(!ctx->fpu_enabled)) { 568 gen_exception(ctx, POWERPC_EXCP_FPU); 569 return; 570 } 571 t0 = tcg_temp_new_i64(); 572 t1 = tcg_temp_new_i64(); 573 t2 = tcg_temp_new_i64(); 574 get_fpr(t0, rB(ctx->opcode)); 575 get_fpr(t1, rA(ctx->opcode)); 576 tcg_gen_deposit_i64(t2, t0, t1, 32, 32); 577 set_fpr(rD(ctx->opcode), t2); 578 tcg_temp_free_i64(t0); 579 tcg_temp_free_i64(t1); 580 tcg_temp_free_i64(t2); 581} 582 583/*** Floating-Point status & ctrl register ***/ 584 585/* mcrfs */ 586static void gen_mcrfs(DisasContext *ctx) 587{ 588 TCGv tmp = tcg_temp_new(); 589 TCGv_i32 tmask; 590 TCGv_i64 tnew_fpscr = tcg_temp_new_i64(); 591 int bfa; 592 int nibble; 593 int shift; 594 595 if (unlikely(!ctx->fpu_enabled)) { 596 gen_exception(ctx, POWERPC_EXCP_FPU); 597 return; 598 } 599 bfa = crfS(ctx->opcode); 600 nibble = 7 - bfa; 601 shift = 4 * nibble; 602 tcg_gen_shri_tl(tmp, cpu_fpscr, shift); 603 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp); 604 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 605 0xf); 606 tcg_temp_free(tmp); 607 tcg_gen_extu_tl_i64(tnew_fpscr, cpu_fpscr); 608 /* Only the exception bits (including FX) should be cleared if read */ 609 tcg_gen_andi_i64(tnew_fpscr, tnew_fpscr, 610 ~((0xF << shift) & FP_EX_CLEAR_BITS)); 611 /* FEX and VX need to be updated, so don't set fpscr directly */ 612 tmask = tcg_const_i32(1 << nibble); 613 gen_helper_store_fpscr(cpu_env, tnew_fpscr, tmask); 614 tcg_temp_free_i32(tmask); 615 tcg_temp_free_i64(tnew_fpscr); 616} 617 618/* mffs */ 619static void gen_mffs(DisasContext *ctx) 620{ 621 TCGv_i64 t0; 622 if (unlikely(!ctx->fpu_enabled)) { 623 gen_exception(ctx, POWERPC_EXCP_FPU); 624 return; 625 } 626 t0 = tcg_temp_new_i64(); 627 gen_reset_fpstatus(); 628 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 629 set_fpr(rD(ctx->opcode), t0); 630 if (unlikely(Rc(ctx->opcode))) { 631 gen_set_cr1_from_fpscr(ctx); 632 } 633 tcg_temp_free_i64(t0); 634} 635 636/* mffsl */ 637static void gen_mffsl(DisasContext *ctx) 638{ 639 TCGv_i64 t0; 640 641 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 642 return gen_mffs(ctx); 643 } 644 645 if (unlikely(!ctx->fpu_enabled)) { 646 gen_exception(ctx, POWERPC_EXCP_FPU); 647 return; 648 } 649 t0 = tcg_temp_new_i64(); 650 gen_reset_fpstatus(); 651 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 652 /* Mask everything except mode, status, and enables. */ 653 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_STATUS | FP_ENABLES | FP_RN); 654 set_fpr(rD(ctx->opcode), t0); 655 tcg_temp_free_i64(t0); 656} 657 658/* mffsce */ 659static void gen_mffsce(DisasContext *ctx) 660{ 661 TCGv_i64 t0; 662 TCGv_i32 mask; 663 664 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 665 return gen_mffs(ctx); 666 } 667 668 if (unlikely(!ctx->fpu_enabled)) { 669 gen_exception(ctx, POWERPC_EXCP_FPU); 670 return; 671 } 672 673 t0 = tcg_temp_new_i64(); 674 675 gen_reset_fpstatus(); 676 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 677 set_fpr(rD(ctx->opcode), t0); 678 679 /* Clear exception enable bits in the FPSCR. */ 680 tcg_gen_andi_i64(t0, t0, ~FP_ENABLES); 681 mask = tcg_const_i32(0x0003); 682 gen_helper_store_fpscr(cpu_env, t0, mask); 683 684 tcg_temp_free_i32(mask); 685 tcg_temp_free_i64(t0); 686} 687 688static void gen_helper_mffscrn(DisasContext *ctx, TCGv_i64 t1) 689{ 690 TCGv_i64 t0 = tcg_temp_new_i64(); 691 TCGv_i32 mask = tcg_const_i32(0x0001); 692 693 gen_reset_fpstatus(); 694 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 695 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_ENABLES | FP_RN); 696 set_fpr(rD(ctx->opcode), t0); 697 698 /* Mask FPSCR value to clear RN. */ 699 tcg_gen_andi_i64(t0, t0, ~FP_RN); 700 701 /* Merge RN into FPSCR value. */ 702 tcg_gen_or_i64(t0, t0, t1); 703 704 gen_helper_store_fpscr(cpu_env, t0, mask); 705 706 tcg_temp_free_i32(mask); 707 tcg_temp_free_i64(t0); 708} 709 710/* mffscrn */ 711static void gen_mffscrn(DisasContext *ctx) 712{ 713 TCGv_i64 t1; 714 715 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 716 return gen_mffs(ctx); 717 } 718 719 if (unlikely(!ctx->fpu_enabled)) { 720 gen_exception(ctx, POWERPC_EXCP_FPU); 721 return; 722 } 723 724 t1 = tcg_temp_new_i64(); 725 get_fpr(t1, rB(ctx->opcode)); 726 /* Mask FRB to get just RN. */ 727 tcg_gen_andi_i64(t1, t1, FP_RN); 728 729 gen_helper_mffscrn(ctx, t1); 730 731 tcg_temp_free_i64(t1); 732} 733 734/* mffscrni */ 735static void gen_mffscrni(DisasContext *ctx) 736{ 737 TCGv_i64 t1; 738 739 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 740 return gen_mffs(ctx); 741 } 742 743 if (unlikely(!ctx->fpu_enabled)) { 744 gen_exception(ctx, POWERPC_EXCP_FPU); 745 return; 746 } 747 748 t1 = tcg_const_i64((uint64_t)RM(ctx->opcode)); 749 750 gen_helper_mffscrn(ctx, t1); 751 752 tcg_temp_free_i64(t1); 753} 754 755/* mtfsb0 */ 756static void gen_mtfsb0(DisasContext *ctx) 757{ 758 uint8_t crb; 759 760 if (unlikely(!ctx->fpu_enabled)) { 761 gen_exception(ctx, POWERPC_EXCP_FPU); 762 return; 763 } 764 crb = 31 - crbD(ctx->opcode); 765 gen_reset_fpstatus(); 766 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) { 767 TCGv_i32 t0; 768 t0 = tcg_const_i32(crb); 769 gen_helper_fpscr_clrbit(cpu_env, t0); 770 tcg_temp_free_i32(t0); 771 } 772 if (unlikely(Rc(ctx->opcode) != 0)) { 773 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 774 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 775 } 776} 777 778/* mtfsb1 */ 779static void gen_mtfsb1(DisasContext *ctx) 780{ 781 uint8_t crb; 782 783 if (unlikely(!ctx->fpu_enabled)) { 784 gen_exception(ctx, POWERPC_EXCP_FPU); 785 return; 786 } 787 crb = 31 - crbD(ctx->opcode); 788 /* XXX: we pretend we can only do IEEE floating-point computations */ 789 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) { 790 TCGv_i32 t0; 791 t0 = tcg_const_i32(crb); 792 gen_helper_fpscr_setbit(cpu_env, t0); 793 tcg_temp_free_i32(t0); 794 } 795 if (unlikely(Rc(ctx->opcode) != 0)) { 796 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 797 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 798 } 799 /* We can raise a deferred exception */ 800 gen_helper_fpscr_check_status(cpu_env); 801} 802 803/* mtfsf */ 804static void gen_mtfsf(DisasContext *ctx) 805{ 806 TCGv_i32 t0; 807 TCGv_i64 t1; 808 int flm, l, w; 809 810 if (unlikely(!ctx->fpu_enabled)) { 811 gen_exception(ctx, POWERPC_EXCP_FPU); 812 return; 813 } 814 flm = FPFLM(ctx->opcode); 815 l = FPL(ctx->opcode); 816 w = FPW(ctx->opcode); 817 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) { 818 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 819 return; 820 } 821 if (l) { 822 t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff); 823 } else { 824 t0 = tcg_const_i32(flm << (w * 8)); 825 } 826 t1 = tcg_temp_new_i64(); 827 get_fpr(t1, rB(ctx->opcode)); 828 gen_helper_store_fpscr(cpu_env, t1, t0); 829 tcg_temp_free_i32(t0); 830 if (unlikely(Rc(ctx->opcode) != 0)) { 831 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 832 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 833 } 834 /* We can raise a deferred exception */ 835 gen_helper_fpscr_check_status(cpu_env); 836 tcg_temp_free_i64(t1); 837} 838 839/* mtfsfi */ 840static void gen_mtfsfi(DisasContext *ctx) 841{ 842 int bf, sh, w; 843 TCGv_i64 t0; 844 TCGv_i32 t1; 845 846 if (unlikely(!ctx->fpu_enabled)) { 847 gen_exception(ctx, POWERPC_EXCP_FPU); 848 return; 849 } 850 w = FPW(ctx->opcode); 851 bf = FPBF(ctx->opcode); 852 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) { 853 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 854 return; 855 } 856 sh = (8 * w) + 7 - bf; 857 t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh)); 858 t1 = tcg_const_i32(1 << sh); 859 gen_helper_store_fpscr(cpu_env, t0, t1); 860 tcg_temp_free_i64(t0); 861 tcg_temp_free_i32(t1); 862 if (unlikely(Rc(ctx->opcode) != 0)) { 863 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 864 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 865 } 866 /* We can raise a deferred exception */ 867 gen_helper_fpscr_check_status(cpu_env); 868} 869 870static void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 dest, TCGv addr) 871{ 872 TCGv_i32 tmp = tcg_temp_new_i32(); 873 tcg_gen_qemu_ld_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL)); 874 gen_helper_todouble(dest, tmp); 875 tcg_temp_free_i32(tmp); 876} 877 878/* lfdepx (external PID lfdx) */ 879static void gen_lfdepx(DisasContext *ctx) 880{ 881 TCGv EA; 882 TCGv_i64 t0; 883 CHK_SV; 884 if (unlikely(!ctx->fpu_enabled)) { 885 gen_exception(ctx, POWERPC_EXCP_FPU); 886 return; 887 } 888 gen_set_access_type(ctx, ACCESS_FLOAT); 889 EA = tcg_temp_new(); 890 t0 = tcg_temp_new_i64(); 891 gen_addr_reg_index(ctx, EA); 892 tcg_gen_qemu_ld_i64(t0, EA, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UQ)); 893 set_fpr(rD(ctx->opcode), t0); 894 tcg_temp_free(EA); 895 tcg_temp_free_i64(t0); 896} 897 898/* lfdp */ 899static void gen_lfdp(DisasContext *ctx) 900{ 901 TCGv EA; 902 TCGv_i64 t0; 903 if (unlikely(!ctx->fpu_enabled)) { 904 gen_exception(ctx, POWERPC_EXCP_FPU); 905 return; 906 } 907 gen_set_access_type(ctx, ACCESS_FLOAT); 908 EA = tcg_temp_new(); 909 gen_addr_imm_index(ctx, EA, 0); 910 t0 = tcg_temp_new_i64(); 911 /* 912 * We only need to swap high and low halves. gen_qemu_ld64_i64 913 * does necessary 64-bit byteswap already. 914 */ 915 if (unlikely(ctx->le_mode)) { 916 gen_qemu_ld64_i64(ctx, t0, EA); 917 set_fpr(rD(ctx->opcode) + 1, t0); 918 tcg_gen_addi_tl(EA, EA, 8); 919 gen_qemu_ld64_i64(ctx, t0, EA); 920 set_fpr(rD(ctx->opcode), t0); 921 } else { 922 gen_qemu_ld64_i64(ctx, t0, EA); 923 set_fpr(rD(ctx->opcode), t0); 924 tcg_gen_addi_tl(EA, EA, 8); 925 gen_qemu_ld64_i64(ctx, t0, EA); 926 set_fpr(rD(ctx->opcode) + 1, t0); 927 } 928 tcg_temp_free(EA); 929 tcg_temp_free_i64(t0); 930} 931 932/* lfdpx */ 933static void gen_lfdpx(DisasContext *ctx) 934{ 935 TCGv EA; 936 TCGv_i64 t0; 937 if (unlikely(!ctx->fpu_enabled)) { 938 gen_exception(ctx, POWERPC_EXCP_FPU); 939 return; 940 } 941 gen_set_access_type(ctx, ACCESS_FLOAT); 942 EA = tcg_temp_new(); 943 gen_addr_reg_index(ctx, EA); 944 t0 = tcg_temp_new_i64(); 945 /* 946 * We only need to swap high and low halves. gen_qemu_ld64_i64 947 * does necessary 64-bit byteswap already. 948 */ 949 if (unlikely(ctx->le_mode)) { 950 gen_qemu_ld64_i64(ctx, t0, EA); 951 set_fpr(rD(ctx->opcode) + 1, t0); 952 tcg_gen_addi_tl(EA, EA, 8); 953 gen_qemu_ld64_i64(ctx, t0, EA); 954 set_fpr(rD(ctx->opcode), t0); 955 } else { 956 gen_qemu_ld64_i64(ctx, t0, EA); 957 set_fpr(rD(ctx->opcode), t0); 958 tcg_gen_addi_tl(EA, EA, 8); 959 gen_qemu_ld64_i64(ctx, t0, EA); 960 set_fpr(rD(ctx->opcode) + 1, t0); 961 } 962 tcg_temp_free(EA); 963 tcg_temp_free_i64(t0); 964} 965 966/* lfiwax */ 967static void gen_lfiwax(DisasContext *ctx) 968{ 969 TCGv EA; 970 TCGv t0; 971 TCGv_i64 t1; 972 if (unlikely(!ctx->fpu_enabled)) { 973 gen_exception(ctx, POWERPC_EXCP_FPU); 974 return; 975 } 976 gen_set_access_type(ctx, ACCESS_FLOAT); 977 EA = tcg_temp_new(); 978 t0 = tcg_temp_new(); 979 t1 = tcg_temp_new_i64(); 980 gen_addr_reg_index(ctx, EA); 981 gen_qemu_ld32s(ctx, t0, EA); 982 tcg_gen_ext_tl_i64(t1, t0); 983 set_fpr(rD(ctx->opcode), t1); 984 tcg_temp_free(EA); 985 tcg_temp_free(t0); 986 tcg_temp_free_i64(t1); 987} 988 989/* lfiwzx */ 990static void gen_lfiwzx(DisasContext *ctx) 991{ 992 TCGv EA; 993 TCGv_i64 t0; 994 if (unlikely(!ctx->fpu_enabled)) { 995 gen_exception(ctx, POWERPC_EXCP_FPU); 996 return; 997 } 998 gen_set_access_type(ctx, ACCESS_FLOAT); 999 EA = tcg_temp_new(); 1000 t0 = tcg_temp_new_i64(); 1001 gen_addr_reg_index(ctx, EA); 1002 gen_qemu_ld32u_i64(ctx, t0, EA); 1003 set_fpr(rD(ctx->opcode), t0); 1004 tcg_temp_free(EA); 1005 tcg_temp_free_i64(t0); 1006} 1007 1008#define GEN_STXF(name, stop, opc2, opc3, type) \ 1009static void glue(gen_, name##x)(DisasContext *ctx) \ 1010{ \ 1011 TCGv EA; \ 1012 TCGv_i64 t0; \ 1013 if (unlikely(!ctx->fpu_enabled)) { \ 1014 gen_exception(ctx, POWERPC_EXCP_FPU); \ 1015 return; \ 1016 } \ 1017 gen_set_access_type(ctx, ACCESS_FLOAT); \ 1018 EA = tcg_temp_new(); \ 1019 t0 = tcg_temp_new_i64(); \ 1020 gen_addr_reg_index(ctx, EA); \ 1021 get_fpr(t0, rS(ctx->opcode)); \ 1022 gen_qemu_##stop(ctx, t0, EA); \ 1023 tcg_temp_free(EA); \ 1024 tcg_temp_free_i64(t0); \ 1025} 1026 1027static void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 src, TCGv addr) 1028{ 1029 TCGv_i32 tmp = tcg_temp_new_i32(); 1030 gen_helper_tosingle(tmp, src); 1031 tcg_gen_qemu_st_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL)); 1032 tcg_temp_free_i32(tmp); 1033} 1034 1035/* stfdepx (external PID lfdx) */ 1036static void gen_stfdepx(DisasContext *ctx) 1037{ 1038 TCGv EA; 1039 TCGv_i64 t0; 1040 CHK_SV; 1041 if (unlikely(!ctx->fpu_enabled)) { 1042 gen_exception(ctx, POWERPC_EXCP_FPU); 1043 return; 1044 } 1045 gen_set_access_type(ctx, ACCESS_FLOAT); 1046 EA = tcg_temp_new(); 1047 t0 = tcg_temp_new_i64(); 1048 gen_addr_reg_index(ctx, EA); 1049 get_fpr(t0, rD(ctx->opcode)); 1050 tcg_gen_qemu_st_i64(t0, EA, PPC_TLB_EPID_STORE, DEF_MEMOP(MO_UQ)); 1051 tcg_temp_free(EA); 1052 tcg_temp_free_i64(t0); 1053} 1054 1055/* stfdp */ 1056static void gen_stfdp(DisasContext *ctx) 1057{ 1058 TCGv EA; 1059 TCGv_i64 t0; 1060 if (unlikely(!ctx->fpu_enabled)) { 1061 gen_exception(ctx, POWERPC_EXCP_FPU); 1062 return; 1063 } 1064 gen_set_access_type(ctx, ACCESS_FLOAT); 1065 EA = tcg_temp_new(); 1066 t0 = tcg_temp_new_i64(); 1067 gen_addr_imm_index(ctx, EA, 0); 1068 /* 1069 * We only need to swap high and low halves. gen_qemu_st64_i64 1070 * does necessary 64-bit byteswap already. 1071 */ 1072 if (unlikely(ctx->le_mode)) { 1073 get_fpr(t0, rD(ctx->opcode) + 1); 1074 gen_qemu_st64_i64(ctx, t0, EA); 1075 tcg_gen_addi_tl(EA, EA, 8); 1076 get_fpr(t0, rD(ctx->opcode)); 1077 gen_qemu_st64_i64(ctx, t0, EA); 1078 } else { 1079 get_fpr(t0, rD(ctx->opcode)); 1080 gen_qemu_st64_i64(ctx, t0, EA); 1081 tcg_gen_addi_tl(EA, EA, 8); 1082 get_fpr(t0, rD(ctx->opcode) + 1); 1083 gen_qemu_st64_i64(ctx, t0, EA); 1084 } 1085 tcg_temp_free(EA); 1086 tcg_temp_free_i64(t0); 1087} 1088 1089/* stfdpx */ 1090static void gen_stfdpx(DisasContext *ctx) 1091{ 1092 TCGv EA; 1093 TCGv_i64 t0; 1094 if (unlikely(!ctx->fpu_enabled)) { 1095 gen_exception(ctx, POWERPC_EXCP_FPU); 1096 return; 1097 } 1098 gen_set_access_type(ctx, ACCESS_FLOAT); 1099 EA = tcg_temp_new(); 1100 t0 = tcg_temp_new_i64(); 1101 gen_addr_reg_index(ctx, EA); 1102 /* 1103 * We only need to swap high and low halves. gen_qemu_st64_i64 1104 * does necessary 64-bit byteswap already. 1105 */ 1106 if (unlikely(ctx->le_mode)) { 1107 get_fpr(t0, rD(ctx->opcode) + 1); 1108 gen_qemu_st64_i64(ctx, t0, EA); 1109 tcg_gen_addi_tl(EA, EA, 8); 1110 get_fpr(t0, rD(ctx->opcode)); 1111 gen_qemu_st64_i64(ctx, t0, EA); 1112 } else { 1113 get_fpr(t0, rD(ctx->opcode)); 1114 gen_qemu_st64_i64(ctx, t0, EA); 1115 tcg_gen_addi_tl(EA, EA, 8); 1116 get_fpr(t0, rD(ctx->opcode) + 1); 1117 gen_qemu_st64_i64(ctx, t0, EA); 1118 } 1119 tcg_temp_free(EA); 1120 tcg_temp_free_i64(t0); 1121} 1122 1123/* Optional: */ 1124static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) 1125{ 1126 TCGv t0 = tcg_temp_new(); 1127 tcg_gen_trunc_i64_tl(t0, arg1), 1128 gen_qemu_st32(ctx, t0, arg2); 1129 tcg_temp_free(t0); 1130} 1131/* stfiwx */ 1132GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX); 1133 1134/* Floating-point Load/Store Instructions */ 1135static bool do_lsfpsd(DisasContext *ctx, int rt, int ra, TCGv displ, 1136 bool update, bool store, bool single) 1137{ 1138 TCGv ea; 1139 TCGv_i64 t0; 1140 REQUIRE_INSNS_FLAGS(ctx, FLOAT); 1141 REQUIRE_FPU(ctx); 1142 if (update && ra == 0) { 1143 gen_invalid(ctx); 1144 return true; 1145 } 1146 gen_set_access_type(ctx, ACCESS_FLOAT); 1147 t0 = tcg_temp_new_i64(); 1148 ea = do_ea_calc(ctx, ra, displ); 1149 if (store) { 1150 get_fpr(t0, rt); 1151 if (single) { 1152 gen_qemu_st32fs(ctx, t0, ea); 1153 } else { 1154 gen_qemu_st64_i64(ctx, t0, ea); 1155 } 1156 } else { 1157 if (single) { 1158 gen_qemu_ld32fs(ctx, t0, ea); 1159 } else { 1160 gen_qemu_ld64_i64(ctx, t0, ea); 1161 } 1162 set_fpr(rt, t0); 1163 } 1164 if (update) { 1165 tcg_gen_mov_tl(cpu_gpr[ra], ea); 1166 } 1167 tcg_temp_free_i64(t0); 1168 tcg_temp_free(ea); 1169 return true; 1170} 1171 1172static bool do_lsfp_D(DisasContext *ctx, arg_D *a, bool update, bool store, 1173 bool single) 1174{ 1175 return do_lsfpsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), update, store, 1176 single); 1177} 1178 1179static bool do_lsfp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool update, 1180 bool store, bool single) 1181{ 1182 arg_D d; 1183 if (!resolve_PLS_D(ctx, &d, a)) { 1184 return true; 1185 } 1186 return do_lsfp_D(ctx, &d, update, store, single); 1187} 1188 1189static bool do_lsfp_X(DisasContext *ctx, arg_X *a, bool update, 1190 bool store, bool single) 1191{ 1192 return do_lsfpsd(ctx, a->rt, a->ra, cpu_gpr[a->rb], update, store, single); 1193} 1194 1195TRANS(LFS, do_lsfp_D, false, false, true) 1196TRANS(LFSU, do_lsfp_D, true, false, true) 1197TRANS(LFSX, do_lsfp_X, false, false, true) 1198TRANS(LFSUX, do_lsfp_X, true, false, true) 1199TRANS(PLFS, do_lsfp_PLS_D, false, false, true) 1200 1201TRANS(LFD, do_lsfp_D, false, false, false) 1202TRANS(LFDU, do_lsfp_D, true, false, false) 1203TRANS(LFDX, do_lsfp_X, false, false, false) 1204TRANS(LFDUX, do_lsfp_X, true, false, false) 1205TRANS(PLFD, do_lsfp_PLS_D, false, false, false) 1206 1207TRANS(STFS, do_lsfp_D, false, true, true) 1208TRANS(STFSU, do_lsfp_D, true, true, true) 1209TRANS(STFSX, do_lsfp_X, false, true, true) 1210TRANS(STFSUX, do_lsfp_X, true, true, true) 1211TRANS(PSTFS, do_lsfp_PLS_D, false, true, true) 1212 1213TRANS(STFD, do_lsfp_D, false, true, false) 1214TRANS(STFDU, do_lsfp_D, true, true, false) 1215TRANS(STFDX, do_lsfp_X, false, true, false) 1216TRANS(STFDUX, do_lsfp_X, true, true, false) 1217TRANS(PSTFD, do_lsfp_PLS_D, false, true, false) 1218 1219#undef _GEN_FLOAT_ACB 1220#undef GEN_FLOAT_ACB 1221#undef _GEN_FLOAT_AB 1222#undef GEN_FLOAT_AB 1223#undef _GEN_FLOAT_AC 1224#undef GEN_FLOAT_AC 1225#undef GEN_FLOAT_B 1226#undef GEN_FLOAT_BS 1227 1228#undef GEN_LDF 1229#undef GEN_LDUF 1230#undef GEN_LDUXF 1231#undef GEN_LDXF 1232#undef GEN_LDFS 1233 1234#undef GEN_STF 1235#undef GEN_STUF 1236#undef GEN_STUXF 1237#undef GEN_STXF 1238#undef GEN_STFS 1239