1/* 2 * translate-fp.c 3 * 4 * Standard FPU translation 5 */ 6 7static inline void gen_reset_fpstatus(void) 8{ 9 gen_helper_reset_fpstatus(cpu_env); 10} 11 12static inline void gen_compute_fprf_float64(TCGv_i64 arg) 13{ 14 gen_helper_compute_fprf_float64(cpu_env, arg); 15 gen_helper_float_check_status(cpu_env); 16} 17 18#if defined(TARGET_PPC64) 19static void gen_set_cr1_from_fpscr(DisasContext *ctx) 20{ 21 TCGv_i32 tmp = tcg_temp_new_i32(); 22 tcg_gen_trunc_tl_i32(tmp, cpu_fpscr); 23 tcg_gen_shri_i32(cpu_crf[1], tmp, 28); 24 tcg_temp_free_i32(tmp); 25} 26#else 27static void gen_set_cr1_from_fpscr(DisasContext *ctx) 28{ 29 tcg_gen_shri_tl(cpu_crf[1], cpu_fpscr, 28); 30} 31#endif 32 33/*** Floating-Point arithmetic ***/ 34#define _GEN_FLOAT_ACB(name, op1, op2, set_fprf, type) \ 35static void gen_f##name(DisasContext *ctx) \ 36{ \ 37 TCGv_i64 t0; \ 38 TCGv_i64 t1; \ 39 TCGv_i64 t2; \ 40 TCGv_i64 t3; \ 41 if (unlikely(!ctx->fpu_enabled)) { \ 42 gen_exception(ctx, POWERPC_EXCP_FPU); \ 43 return; \ 44 } \ 45 t0 = tcg_temp_new_i64(); \ 46 t1 = tcg_temp_new_i64(); \ 47 t2 = tcg_temp_new_i64(); \ 48 t3 = tcg_temp_new_i64(); \ 49 gen_reset_fpstatus(); \ 50 get_fpr(t0, rA(ctx->opcode)); \ 51 get_fpr(t1, rC(ctx->opcode)); \ 52 get_fpr(t2, rB(ctx->opcode)); \ 53 gen_helper_f##name(t3, cpu_env, t0, t1, t2); \ 54 set_fpr(rD(ctx->opcode), t3); \ 55 if (set_fprf) { \ 56 gen_compute_fprf_float64(t3); \ 57 } \ 58 if (unlikely(Rc(ctx->opcode) != 0)) { \ 59 gen_set_cr1_from_fpscr(ctx); \ 60 } \ 61 tcg_temp_free_i64(t0); \ 62 tcg_temp_free_i64(t1); \ 63 tcg_temp_free_i64(t2); \ 64 tcg_temp_free_i64(t3); \ 65} 66 67#define GEN_FLOAT_ACB(name, op2, set_fprf, type) \ 68_GEN_FLOAT_ACB(name, 0x3F, op2, set_fprf, type); \ 69_GEN_FLOAT_ACB(name##s, 0x3B, op2, set_fprf, type); 70 71#define _GEN_FLOAT_AB(name, op1, op2, inval, set_fprf, type) \ 72static void gen_f##name(DisasContext *ctx) \ 73{ \ 74 TCGv_i64 t0; \ 75 TCGv_i64 t1; \ 76 TCGv_i64 t2; \ 77 if (unlikely(!ctx->fpu_enabled)) { \ 78 gen_exception(ctx, POWERPC_EXCP_FPU); \ 79 return; \ 80 } \ 81 t0 = tcg_temp_new_i64(); \ 82 t1 = tcg_temp_new_i64(); \ 83 t2 = tcg_temp_new_i64(); \ 84 gen_reset_fpstatus(); \ 85 get_fpr(t0, rA(ctx->opcode)); \ 86 get_fpr(t1, rB(ctx->opcode)); \ 87 gen_helper_f##name(t2, cpu_env, t0, t1); \ 88 set_fpr(rD(ctx->opcode), t2); \ 89 if (set_fprf) { \ 90 gen_compute_fprf_float64(t2); \ 91 } \ 92 if (unlikely(Rc(ctx->opcode) != 0)) { \ 93 gen_set_cr1_from_fpscr(ctx); \ 94 } \ 95 tcg_temp_free_i64(t0); \ 96 tcg_temp_free_i64(t1); \ 97 tcg_temp_free_i64(t2); \ 98} 99#define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \ 100_GEN_FLOAT_AB(name, 0x3F, op2, inval, set_fprf, type); \ 101_GEN_FLOAT_AB(name##s, 0x3B, op2, inval, set_fprf, type); 102 103#define _GEN_FLOAT_AC(name, op1, op2, inval, set_fprf, type) \ 104static void gen_f##name(DisasContext *ctx) \ 105{ \ 106 TCGv_i64 t0; \ 107 TCGv_i64 t1; \ 108 TCGv_i64 t2; \ 109 if (unlikely(!ctx->fpu_enabled)) { \ 110 gen_exception(ctx, POWERPC_EXCP_FPU); \ 111 return; \ 112 } \ 113 t0 = tcg_temp_new_i64(); \ 114 t1 = tcg_temp_new_i64(); \ 115 t2 = tcg_temp_new_i64(); \ 116 gen_reset_fpstatus(); \ 117 get_fpr(t0, rA(ctx->opcode)); \ 118 get_fpr(t1, rC(ctx->opcode)); \ 119 gen_helper_f##name(t2, cpu_env, t0, t1); \ 120 set_fpr(rD(ctx->opcode), t2); \ 121 if (set_fprf) { \ 122 gen_compute_fprf_float64(t2); \ 123 } \ 124 if (unlikely(Rc(ctx->opcode) != 0)) { \ 125 gen_set_cr1_from_fpscr(ctx); \ 126 } \ 127 tcg_temp_free_i64(t0); \ 128 tcg_temp_free_i64(t1); \ 129 tcg_temp_free_i64(t2); \ 130} 131#define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \ 132_GEN_FLOAT_AC(name, 0x3F, op2, inval, set_fprf, type); \ 133_GEN_FLOAT_AC(name##s, 0x3B, op2, inval, set_fprf, type); 134 135#define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \ 136static void gen_f##name(DisasContext *ctx) \ 137{ \ 138 TCGv_i64 t0; \ 139 TCGv_i64 t1; \ 140 if (unlikely(!ctx->fpu_enabled)) { \ 141 gen_exception(ctx, POWERPC_EXCP_FPU); \ 142 return; \ 143 } \ 144 t0 = tcg_temp_new_i64(); \ 145 t1 = tcg_temp_new_i64(); \ 146 gen_reset_fpstatus(); \ 147 get_fpr(t0, rB(ctx->opcode)); \ 148 gen_helper_f##name(t1, cpu_env, t0); \ 149 set_fpr(rD(ctx->opcode), t1); \ 150 if (set_fprf) { \ 151 gen_helper_compute_fprf_float64(cpu_env, t1); \ 152 } \ 153 gen_helper_float_check_status(cpu_env); \ 154 if (unlikely(Rc(ctx->opcode) != 0)) { \ 155 gen_set_cr1_from_fpscr(ctx); \ 156 } \ 157 tcg_temp_free_i64(t0); \ 158 tcg_temp_free_i64(t1); \ 159} 160 161#define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \ 162static void gen_f##name(DisasContext *ctx) \ 163{ \ 164 TCGv_i64 t0; \ 165 TCGv_i64 t1; \ 166 if (unlikely(!ctx->fpu_enabled)) { \ 167 gen_exception(ctx, POWERPC_EXCP_FPU); \ 168 return; \ 169 } \ 170 t0 = tcg_temp_new_i64(); \ 171 t1 = tcg_temp_new_i64(); \ 172 gen_reset_fpstatus(); \ 173 get_fpr(t0, rB(ctx->opcode)); \ 174 gen_helper_f##name(t1, cpu_env, t0); \ 175 set_fpr(rD(ctx->opcode), t1); \ 176 if (set_fprf) { \ 177 gen_compute_fprf_float64(t1); \ 178 } \ 179 if (unlikely(Rc(ctx->opcode) != 0)) { \ 180 gen_set_cr1_from_fpscr(ctx); \ 181 } \ 182 tcg_temp_free_i64(t0); \ 183 tcg_temp_free_i64(t1); \ 184} 185 186/* fadd - fadds */ 187GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT); 188/* fdiv - fdivs */ 189GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT); 190/* fmul - fmuls */ 191GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT); 192 193/* fre */ 194GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT); 195 196/* fres */ 197GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES); 198 199/* frsqrte */ 200GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE); 201 202/* frsqrtes */ 203static void gen_frsqrtes(DisasContext *ctx) 204{ 205 TCGv_i64 t0; 206 TCGv_i64 t1; 207 if (unlikely(!ctx->fpu_enabled)) { 208 gen_exception(ctx, POWERPC_EXCP_FPU); 209 return; 210 } 211 t0 = tcg_temp_new_i64(); 212 t1 = tcg_temp_new_i64(); 213 gen_reset_fpstatus(); 214 get_fpr(t0, rB(ctx->opcode)); 215 gen_helper_frsqrtes(t1, cpu_env, t0); 216 set_fpr(rD(ctx->opcode), t1); 217 gen_compute_fprf_float64(t1); 218 if (unlikely(Rc(ctx->opcode) != 0)) { 219 gen_set_cr1_from_fpscr(ctx); 220 } 221 tcg_temp_free_i64(t0); 222 tcg_temp_free_i64(t1); 223} 224 225/* fsel */ 226_GEN_FLOAT_ACB(sel, 0x3F, 0x17, 0, PPC_FLOAT_FSEL); 227/* fsub - fsubs */ 228GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT); 229/* Optional: */ 230 231/* fsqrt */ 232static void gen_fsqrt(DisasContext *ctx) 233{ 234 TCGv_i64 t0; 235 TCGv_i64 t1; 236 if (unlikely(!ctx->fpu_enabled)) { 237 gen_exception(ctx, POWERPC_EXCP_FPU); 238 return; 239 } 240 t0 = tcg_temp_new_i64(); 241 t1 = tcg_temp_new_i64(); 242 gen_reset_fpstatus(); 243 get_fpr(t0, rB(ctx->opcode)); 244 gen_helper_fsqrt(t1, cpu_env, t0); 245 set_fpr(rD(ctx->opcode), t1); 246 gen_compute_fprf_float64(t1); 247 if (unlikely(Rc(ctx->opcode) != 0)) { 248 gen_set_cr1_from_fpscr(ctx); 249 } 250 tcg_temp_free_i64(t0); 251 tcg_temp_free_i64(t1); 252} 253 254static void gen_fsqrts(DisasContext *ctx) 255{ 256 TCGv_i64 t0; 257 TCGv_i64 t1; 258 if (unlikely(!ctx->fpu_enabled)) { 259 gen_exception(ctx, POWERPC_EXCP_FPU); 260 return; 261 } 262 t0 = tcg_temp_new_i64(); 263 t1 = tcg_temp_new_i64(); 264 gen_reset_fpstatus(); 265 get_fpr(t0, rB(ctx->opcode)); 266 gen_helper_fsqrts(t1, cpu_env, t0); 267 set_fpr(rD(ctx->opcode), t1); 268 gen_compute_fprf_float64(t1); 269 if (unlikely(Rc(ctx->opcode) != 0)) { 270 gen_set_cr1_from_fpscr(ctx); 271 } 272 tcg_temp_free_i64(t0); 273 tcg_temp_free_i64(t1); 274} 275 276/*** Floating-Point multiply-and-add ***/ 277/* fmadd - fmadds */ 278GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT); 279/* fmsub - fmsubs */ 280GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT); 281/* fnmadd - fnmadds */ 282GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT); 283/* fnmsub - fnmsubs */ 284GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT); 285 286/*** Floating-Point round & convert ***/ 287/* fctiw */ 288GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT); 289/* fctiwu */ 290GEN_FLOAT_B(ctiwu, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206); 291/* fctiwz */ 292GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT); 293/* fctiwuz */ 294GEN_FLOAT_B(ctiwuz, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206); 295/* frsp */ 296GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT); 297/* fcfid */ 298GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64); 299/* fcfids */ 300GEN_FLOAT_B(cfids, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206); 301/* fcfidu */ 302GEN_FLOAT_B(cfidu, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206); 303/* fcfidus */ 304GEN_FLOAT_B(cfidus, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206); 305/* fctid */ 306GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC2_FP_CVT_S64); 307/* fctidu */ 308GEN_FLOAT_B(ctidu, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206); 309/* fctidz */ 310GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC2_FP_CVT_S64); 311/* fctidu */ 312GEN_FLOAT_B(ctiduz, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206); 313 314/* frin */ 315GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT); 316/* friz */ 317GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT); 318/* frip */ 319GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT); 320/* frim */ 321GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT); 322 323static void gen_ftdiv(DisasContext *ctx) 324{ 325 TCGv_i64 t0; 326 TCGv_i64 t1; 327 if (unlikely(!ctx->fpu_enabled)) { 328 gen_exception(ctx, POWERPC_EXCP_FPU); 329 return; 330 } 331 t0 = tcg_temp_new_i64(); 332 t1 = tcg_temp_new_i64(); 333 get_fpr(t0, rA(ctx->opcode)); 334 get_fpr(t1, rB(ctx->opcode)); 335 gen_helper_ftdiv(cpu_crf[crfD(ctx->opcode)], t0, t1); 336 tcg_temp_free_i64(t0); 337 tcg_temp_free_i64(t1); 338} 339 340static void gen_ftsqrt(DisasContext *ctx) 341{ 342 TCGv_i64 t0; 343 if (unlikely(!ctx->fpu_enabled)) { 344 gen_exception(ctx, POWERPC_EXCP_FPU); 345 return; 346 } 347 t0 = tcg_temp_new_i64(); 348 get_fpr(t0, rB(ctx->opcode)); 349 gen_helper_ftsqrt(cpu_crf[crfD(ctx->opcode)], t0); 350 tcg_temp_free_i64(t0); 351} 352 353 354 355/*** Floating-Point compare ***/ 356 357/* fcmpo */ 358static void gen_fcmpo(DisasContext *ctx) 359{ 360 TCGv_i32 crf; 361 TCGv_i64 t0; 362 TCGv_i64 t1; 363 if (unlikely(!ctx->fpu_enabled)) { 364 gen_exception(ctx, POWERPC_EXCP_FPU); 365 return; 366 } 367 t0 = tcg_temp_new_i64(); 368 t1 = tcg_temp_new_i64(); 369 gen_reset_fpstatus(); 370 crf = tcg_const_i32(crfD(ctx->opcode)); 371 get_fpr(t0, rA(ctx->opcode)); 372 get_fpr(t1, rB(ctx->opcode)); 373 gen_helper_fcmpo(cpu_env, t0, t1, crf); 374 tcg_temp_free_i32(crf); 375 gen_helper_float_check_status(cpu_env); 376 tcg_temp_free_i64(t0); 377 tcg_temp_free_i64(t1); 378} 379 380/* fcmpu */ 381static void gen_fcmpu(DisasContext *ctx) 382{ 383 TCGv_i32 crf; 384 TCGv_i64 t0; 385 TCGv_i64 t1; 386 if (unlikely(!ctx->fpu_enabled)) { 387 gen_exception(ctx, POWERPC_EXCP_FPU); 388 return; 389 } 390 t0 = tcg_temp_new_i64(); 391 t1 = tcg_temp_new_i64(); 392 gen_reset_fpstatus(); 393 crf = tcg_const_i32(crfD(ctx->opcode)); 394 get_fpr(t0, rA(ctx->opcode)); 395 get_fpr(t1, rB(ctx->opcode)); 396 gen_helper_fcmpu(cpu_env, t0, t1, crf); 397 tcg_temp_free_i32(crf); 398 gen_helper_float_check_status(cpu_env); 399 tcg_temp_free_i64(t0); 400 tcg_temp_free_i64(t1); 401} 402 403/*** Floating-point move ***/ 404/* fabs */ 405/* XXX: beware that fabs never checks for NaNs nor update FPSCR */ 406static void gen_fabs(DisasContext *ctx) 407{ 408 TCGv_i64 t0; 409 TCGv_i64 t1; 410 if (unlikely(!ctx->fpu_enabled)) { 411 gen_exception(ctx, POWERPC_EXCP_FPU); 412 return; 413 } 414 t0 = tcg_temp_new_i64(); 415 t1 = tcg_temp_new_i64(); 416 get_fpr(t0, rB(ctx->opcode)); 417 tcg_gen_andi_i64(t1, t0, ~(1ULL << 63)); 418 set_fpr(rD(ctx->opcode), t1); 419 if (unlikely(Rc(ctx->opcode))) { 420 gen_set_cr1_from_fpscr(ctx); 421 } 422 tcg_temp_free_i64(t0); 423 tcg_temp_free_i64(t1); 424} 425 426/* fmr - fmr. */ 427/* XXX: beware that fmr never checks for NaNs nor update FPSCR */ 428static void gen_fmr(DisasContext *ctx) 429{ 430 TCGv_i64 t0; 431 if (unlikely(!ctx->fpu_enabled)) { 432 gen_exception(ctx, POWERPC_EXCP_FPU); 433 return; 434 } 435 t0 = tcg_temp_new_i64(); 436 get_fpr(t0, rB(ctx->opcode)); 437 set_fpr(rD(ctx->opcode), t0); 438 if (unlikely(Rc(ctx->opcode))) { 439 gen_set_cr1_from_fpscr(ctx); 440 } 441 tcg_temp_free_i64(t0); 442} 443 444/* fnabs */ 445/* XXX: beware that fnabs never checks for NaNs nor update FPSCR */ 446static void gen_fnabs(DisasContext *ctx) 447{ 448 TCGv_i64 t0; 449 TCGv_i64 t1; 450 if (unlikely(!ctx->fpu_enabled)) { 451 gen_exception(ctx, POWERPC_EXCP_FPU); 452 return; 453 } 454 t0 = tcg_temp_new_i64(); 455 t1 = tcg_temp_new_i64(); 456 get_fpr(t0, rB(ctx->opcode)); 457 tcg_gen_ori_i64(t1, t0, 1ULL << 63); 458 set_fpr(rD(ctx->opcode), t1); 459 if (unlikely(Rc(ctx->opcode))) { 460 gen_set_cr1_from_fpscr(ctx); 461 } 462 tcg_temp_free_i64(t0); 463 tcg_temp_free_i64(t1); 464} 465 466/* fneg */ 467/* XXX: beware that fneg never checks for NaNs nor update FPSCR */ 468static void gen_fneg(DisasContext *ctx) 469{ 470 TCGv_i64 t0; 471 TCGv_i64 t1; 472 if (unlikely(!ctx->fpu_enabled)) { 473 gen_exception(ctx, POWERPC_EXCP_FPU); 474 return; 475 } 476 t0 = tcg_temp_new_i64(); 477 t1 = tcg_temp_new_i64(); 478 get_fpr(t0, rB(ctx->opcode)); 479 tcg_gen_xori_i64(t1, t0, 1ULL << 63); 480 set_fpr(rD(ctx->opcode), t1); 481 if (unlikely(Rc(ctx->opcode))) { 482 gen_set_cr1_from_fpscr(ctx); 483 } 484 tcg_temp_free_i64(t0); 485 tcg_temp_free_i64(t1); 486} 487 488/* fcpsgn: PowerPC 2.05 specification */ 489/* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */ 490static void gen_fcpsgn(DisasContext *ctx) 491{ 492 TCGv_i64 t0; 493 TCGv_i64 t1; 494 TCGv_i64 t2; 495 if (unlikely(!ctx->fpu_enabled)) { 496 gen_exception(ctx, POWERPC_EXCP_FPU); 497 return; 498 } 499 t0 = tcg_temp_new_i64(); 500 t1 = tcg_temp_new_i64(); 501 t2 = tcg_temp_new_i64(); 502 get_fpr(t0, rA(ctx->opcode)); 503 get_fpr(t1, rB(ctx->opcode)); 504 tcg_gen_deposit_i64(t2, t0, t1, 0, 63); 505 set_fpr(rD(ctx->opcode), t2); 506 if (unlikely(Rc(ctx->opcode))) { 507 gen_set_cr1_from_fpscr(ctx); 508 } 509 tcg_temp_free_i64(t0); 510 tcg_temp_free_i64(t1); 511 tcg_temp_free_i64(t2); 512} 513 514static void gen_fmrgew(DisasContext *ctx) 515{ 516 TCGv_i64 b0; 517 TCGv_i64 t0; 518 TCGv_i64 t1; 519 if (unlikely(!ctx->fpu_enabled)) { 520 gen_exception(ctx, POWERPC_EXCP_FPU); 521 return; 522 } 523 b0 = tcg_temp_new_i64(); 524 t0 = tcg_temp_new_i64(); 525 t1 = tcg_temp_new_i64(); 526 get_fpr(t0, rB(ctx->opcode)); 527 tcg_gen_shri_i64(b0, t0, 32); 528 get_fpr(t0, rA(ctx->opcode)); 529 tcg_gen_deposit_i64(t1, t0, b0, 0, 32); 530 set_fpr(rD(ctx->opcode), t1); 531 tcg_temp_free_i64(b0); 532 tcg_temp_free_i64(t0); 533 tcg_temp_free_i64(t1); 534} 535 536static void gen_fmrgow(DisasContext *ctx) 537{ 538 TCGv_i64 t0; 539 TCGv_i64 t1; 540 TCGv_i64 t2; 541 if (unlikely(!ctx->fpu_enabled)) { 542 gen_exception(ctx, POWERPC_EXCP_FPU); 543 return; 544 } 545 t0 = tcg_temp_new_i64(); 546 t1 = tcg_temp_new_i64(); 547 t2 = tcg_temp_new_i64(); 548 get_fpr(t0, rB(ctx->opcode)); 549 get_fpr(t1, rA(ctx->opcode)); 550 tcg_gen_deposit_i64(t2, t0, t1, 32, 32); 551 set_fpr(rD(ctx->opcode), t2); 552 tcg_temp_free_i64(t0); 553 tcg_temp_free_i64(t1); 554 tcg_temp_free_i64(t2); 555} 556 557/*** Floating-Point status & ctrl register ***/ 558 559/* mcrfs */ 560static void gen_mcrfs(DisasContext *ctx) 561{ 562 TCGv tmp = tcg_temp_new(); 563 TCGv_i32 tmask; 564 TCGv_i64 tnew_fpscr = tcg_temp_new_i64(); 565 int bfa; 566 int nibble; 567 int shift; 568 569 if (unlikely(!ctx->fpu_enabled)) { 570 gen_exception(ctx, POWERPC_EXCP_FPU); 571 return; 572 } 573 bfa = crfS(ctx->opcode); 574 nibble = 7 - bfa; 575 shift = 4 * nibble; 576 tcg_gen_shri_tl(tmp, cpu_fpscr, shift); 577 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp); 578 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 579 0xf); 580 tcg_temp_free(tmp); 581 tcg_gen_extu_tl_i64(tnew_fpscr, cpu_fpscr); 582 /* Only the exception bits (including FX) should be cleared if read */ 583 tcg_gen_andi_i64(tnew_fpscr, tnew_fpscr, 584 ~((0xF << shift) & FP_EX_CLEAR_BITS)); 585 /* FEX and VX need to be updated, so don't set fpscr directly */ 586 tmask = tcg_const_i32(1 << nibble); 587 gen_helper_store_fpscr(cpu_env, tnew_fpscr, tmask); 588 tcg_temp_free_i32(tmask); 589 tcg_temp_free_i64(tnew_fpscr); 590} 591 592/* mffs */ 593static void gen_mffs(DisasContext *ctx) 594{ 595 TCGv_i64 t0; 596 if (unlikely(!ctx->fpu_enabled)) { 597 gen_exception(ctx, POWERPC_EXCP_FPU); 598 return; 599 } 600 t0 = tcg_temp_new_i64(); 601 gen_reset_fpstatus(); 602 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 603 set_fpr(rD(ctx->opcode), t0); 604 if (unlikely(Rc(ctx->opcode))) { 605 gen_set_cr1_from_fpscr(ctx); 606 } 607 tcg_temp_free_i64(t0); 608} 609 610/* mffsl */ 611static void gen_mffsl(DisasContext *ctx) 612{ 613 TCGv_i64 t0; 614 615 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 616 return gen_mffs(ctx); 617 } 618 619 if (unlikely(!ctx->fpu_enabled)) { 620 gen_exception(ctx, POWERPC_EXCP_FPU); 621 return; 622 } 623 t0 = tcg_temp_new_i64(); 624 gen_reset_fpstatus(); 625 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 626 /* Mask everything except mode, status, and enables. */ 627 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_STATUS | FP_ENABLES | FP_RN); 628 set_fpr(rD(ctx->opcode), t0); 629 tcg_temp_free_i64(t0); 630} 631 632/* mffsce */ 633static void gen_mffsce(DisasContext *ctx) 634{ 635 TCGv_i64 t0; 636 TCGv_i32 mask; 637 638 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 639 return gen_mffs(ctx); 640 } 641 642 if (unlikely(!ctx->fpu_enabled)) { 643 gen_exception(ctx, POWERPC_EXCP_FPU); 644 return; 645 } 646 647 t0 = tcg_temp_new_i64(); 648 649 gen_reset_fpstatus(); 650 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 651 set_fpr(rD(ctx->opcode), t0); 652 653 /* Clear exception enable bits in the FPSCR. */ 654 tcg_gen_andi_i64(t0, t0, ~FP_ENABLES); 655 mask = tcg_const_i32(0x0003); 656 gen_helper_store_fpscr(cpu_env, t0, mask); 657 658 tcg_temp_free_i32(mask); 659 tcg_temp_free_i64(t0); 660} 661 662static void gen_helper_mffscrn(DisasContext *ctx, TCGv_i64 t1) 663{ 664 TCGv_i64 t0 = tcg_temp_new_i64(); 665 TCGv_i32 mask = tcg_const_i32(0x0001); 666 667 gen_reset_fpstatus(); 668 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 669 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_ENABLES | FP_RN); 670 set_fpr(rD(ctx->opcode), t0); 671 672 /* Mask FPSCR value to clear RN. */ 673 tcg_gen_andi_i64(t0, t0, ~FP_RN); 674 675 /* Merge RN into FPSCR value. */ 676 tcg_gen_or_i64(t0, t0, t1); 677 678 gen_helper_store_fpscr(cpu_env, t0, mask); 679 680 tcg_temp_free_i32(mask); 681 tcg_temp_free_i64(t0); 682} 683 684/* mffscrn */ 685static void gen_mffscrn(DisasContext *ctx) 686{ 687 TCGv_i64 t1; 688 689 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 690 return gen_mffs(ctx); 691 } 692 693 if (unlikely(!ctx->fpu_enabled)) { 694 gen_exception(ctx, POWERPC_EXCP_FPU); 695 return; 696 } 697 698 t1 = tcg_temp_new_i64(); 699 get_fpr(t1, rB(ctx->opcode)); 700 /* Mask FRB to get just RN. */ 701 tcg_gen_andi_i64(t1, t1, FP_RN); 702 703 gen_helper_mffscrn(ctx, t1); 704 705 tcg_temp_free_i64(t1); 706} 707 708/* mffscrni */ 709static void gen_mffscrni(DisasContext *ctx) 710{ 711 TCGv_i64 t1; 712 713 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 714 return gen_mffs(ctx); 715 } 716 717 if (unlikely(!ctx->fpu_enabled)) { 718 gen_exception(ctx, POWERPC_EXCP_FPU); 719 return; 720 } 721 722 t1 = tcg_const_i64((uint64_t)RM(ctx->opcode)); 723 724 gen_helper_mffscrn(ctx, t1); 725 726 tcg_temp_free_i64(t1); 727} 728 729/* mtfsb0 */ 730static void gen_mtfsb0(DisasContext *ctx) 731{ 732 uint8_t crb; 733 734 if (unlikely(!ctx->fpu_enabled)) { 735 gen_exception(ctx, POWERPC_EXCP_FPU); 736 return; 737 } 738 crb = 31 - crbD(ctx->opcode); 739 gen_reset_fpstatus(); 740 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) { 741 TCGv_i32 t0; 742 t0 = tcg_const_i32(crb); 743 gen_helper_fpscr_clrbit(cpu_env, t0); 744 tcg_temp_free_i32(t0); 745 } 746 if (unlikely(Rc(ctx->opcode) != 0)) { 747 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 748 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 749 } 750} 751 752/* mtfsb1 */ 753static void gen_mtfsb1(DisasContext *ctx) 754{ 755 uint8_t crb; 756 757 if (unlikely(!ctx->fpu_enabled)) { 758 gen_exception(ctx, POWERPC_EXCP_FPU); 759 return; 760 } 761 crb = 31 - crbD(ctx->opcode); 762 /* XXX: we pretend we can only do IEEE floating-point computations */ 763 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) { 764 TCGv_i32 t0; 765 t0 = tcg_const_i32(crb); 766 gen_helper_fpscr_setbit(cpu_env, t0); 767 tcg_temp_free_i32(t0); 768 } 769 if (unlikely(Rc(ctx->opcode) != 0)) { 770 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 771 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 772 } 773 /* We can raise a deferred exception */ 774 gen_helper_fpscr_check_status(cpu_env); 775} 776 777/* mtfsf */ 778static void gen_mtfsf(DisasContext *ctx) 779{ 780 TCGv_i32 t0; 781 TCGv_i64 t1; 782 int flm, l, w; 783 784 if (unlikely(!ctx->fpu_enabled)) { 785 gen_exception(ctx, POWERPC_EXCP_FPU); 786 return; 787 } 788 flm = FPFLM(ctx->opcode); 789 l = FPL(ctx->opcode); 790 w = FPW(ctx->opcode); 791 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) { 792 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 793 return; 794 } 795 if (l) { 796 t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff); 797 } else { 798 t0 = tcg_const_i32(flm << (w * 8)); 799 } 800 t1 = tcg_temp_new_i64(); 801 get_fpr(t1, rB(ctx->opcode)); 802 gen_helper_store_fpscr(cpu_env, t1, t0); 803 tcg_temp_free_i32(t0); 804 if (unlikely(Rc(ctx->opcode) != 0)) { 805 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 806 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 807 } 808 /* We can raise a deferred exception */ 809 gen_helper_fpscr_check_status(cpu_env); 810 tcg_temp_free_i64(t1); 811} 812 813/* mtfsfi */ 814static void gen_mtfsfi(DisasContext *ctx) 815{ 816 int bf, sh, w; 817 TCGv_i64 t0; 818 TCGv_i32 t1; 819 820 if (unlikely(!ctx->fpu_enabled)) { 821 gen_exception(ctx, POWERPC_EXCP_FPU); 822 return; 823 } 824 w = FPW(ctx->opcode); 825 bf = FPBF(ctx->opcode); 826 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) { 827 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 828 return; 829 } 830 sh = (8 * w) + 7 - bf; 831 t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh)); 832 t1 = tcg_const_i32(1 << sh); 833 gen_helper_store_fpscr(cpu_env, t0, t1); 834 tcg_temp_free_i64(t0); 835 tcg_temp_free_i32(t1); 836 if (unlikely(Rc(ctx->opcode) != 0)) { 837 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 838 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 839 } 840 /* We can raise a deferred exception */ 841 gen_helper_fpscr_check_status(cpu_env); 842} 843 844static void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 dest, TCGv addr) 845{ 846 TCGv_i32 tmp = tcg_temp_new_i32(); 847 tcg_gen_qemu_ld_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL)); 848 gen_helper_todouble(dest, tmp); 849 tcg_temp_free_i32(tmp); 850} 851 852/* lfdepx (external PID lfdx) */ 853static void gen_lfdepx(DisasContext *ctx) 854{ 855 TCGv EA; 856 TCGv_i64 t0; 857 CHK_SV; 858 if (unlikely(!ctx->fpu_enabled)) { 859 gen_exception(ctx, POWERPC_EXCP_FPU); 860 return; 861 } 862 gen_set_access_type(ctx, ACCESS_FLOAT); 863 EA = tcg_temp_new(); 864 t0 = tcg_temp_new_i64(); 865 gen_addr_reg_index(ctx, EA); 866 tcg_gen_qemu_ld_i64(t0, EA, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UQ)); 867 set_fpr(rD(ctx->opcode), t0); 868 tcg_temp_free(EA); 869 tcg_temp_free_i64(t0); 870} 871 872/* lfdp */ 873static void gen_lfdp(DisasContext *ctx) 874{ 875 TCGv EA; 876 TCGv_i64 t0; 877 if (unlikely(!ctx->fpu_enabled)) { 878 gen_exception(ctx, POWERPC_EXCP_FPU); 879 return; 880 } 881 gen_set_access_type(ctx, ACCESS_FLOAT); 882 EA = tcg_temp_new(); 883 gen_addr_imm_index(ctx, EA, 0); 884 t0 = tcg_temp_new_i64(); 885 /* 886 * We only need to swap high and low halves. gen_qemu_ld64_i64 887 * does necessary 64-bit byteswap already. 888 */ 889 if (unlikely(ctx->le_mode)) { 890 gen_qemu_ld64_i64(ctx, t0, EA); 891 set_fpr(rD(ctx->opcode) + 1, t0); 892 tcg_gen_addi_tl(EA, EA, 8); 893 gen_qemu_ld64_i64(ctx, t0, EA); 894 set_fpr(rD(ctx->opcode), t0); 895 } else { 896 gen_qemu_ld64_i64(ctx, t0, EA); 897 set_fpr(rD(ctx->opcode), t0); 898 tcg_gen_addi_tl(EA, EA, 8); 899 gen_qemu_ld64_i64(ctx, t0, EA); 900 set_fpr(rD(ctx->opcode) + 1, t0); 901 } 902 tcg_temp_free(EA); 903 tcg_temp_free_i64(t0); 904} 905 906/* lfdpx */ 907static void gen_lfdpx(DisasContext *ctx) 908{ 909 TCGv EA; 910 TCGv_i64 t0; 911 if (unlikely(!ctx->fpu_enabled)) { 912 gen_exception(ctx, POWERPC_EXCP_FPU); 913 return; 914 } 915 gen_set_access_type(ctx, ACCESS_FLOAT); 916 EA = tcg_temp_new(); 917 gen_addr_reg_index(ctx, EA); 918 t0 = tcg_temp_new_i64(); 919 /* 920 * We only need to swap high and low halves. gen_qemu_ld64_i64 921 * does necessary 64-bit byteswap already. 922 */ 923 if (unlikely(ctx->le_mode)) { 924 gen_qemu_ld64_i64(ctx, t0, EA); 925 set_fpr(rD(ctx->opcode) + 1, t0); 926 tcg_gen_addi_tl(EA, EA, 8); 927 gen_qemu_ld64_i64(ctx, t0, EA); 928 set_fpr(rD(ctx->opcode), t0); 929 } else { 930 gen_qemu_ld64_i64(ctx, t0, EA); 931 set_fpr(rD(ctx->opcode), t0); 932 tcg_gen_addi_tl(EA, EA, 8); 933 gen_qemu_ld64_i64(ctx, t0, EA); 934 set_fpr(rD(ctx->opcode) + 1, t0); 935 } 936 tcg_temp_free(EA); 937 tcg_temp_free_i64(t0); 938} 939 940/* lfiwax */ 941static void gen_lfiwax(DisasContext *ctx) 942{ 943 TCGv EA; 944 TCGv t0; 945 TCGv_i64 t1; 946 if (unlikely(!ctx->fpu_enabled)) { 947 gen_exception(ctx, POWERPC_EXCP_FPU); 948 return; 949 } 950 gen_set_access_type(ctx, ACCESS_FLOAT); 951 EA = tcg_temp_new(); 952 t0 = tcg_temp_new(); 953 t1 = tcg_temp_new_i64(); 954 gen_addr_reg_index(ctx, EA); 955 gen_qemu_ld32s(ctx, t0, EA); 956 tcg_gen_ext_tl_i64(t1, t0); 957 set_fpr(rD(ctx->opcode), t1); 958 tcg_temp_free(EA); 959 tcg_temp_free(t0); 960 tcg_temp_free_i64(t1); 961} 962 963/* lfiwzx */ 964static void gen_lfiwzx(DisasContext *ctx) 965{ 966 TCGv EA; 967 TCGv_i64 t0; 968 if (unlikely(!ctx->fpu_enabled)) { 969 gen_exception(ctx, POWERPC_EXCP_FPU); 970 return; 971 } 972 gen_set_access_type(ctx, ACCESS_FLOAT); 973 EA = tcg_temp_new(); 974 t0 = tcg_temp_new_i64(); 975 gen_addr_reg_index(ctx, EA); 976 gen_qemu_ld32u_i64(ctx, t0, EA); 977 set_fpr(rD(ctx->opcode), t0); 978 tcg_temp_free(EA); 979 tcg_temp_free_i64(t0); 980} 981 982#define GEN_STXF(name, stop, opc2, opc3, type) \ 983static void glue(gen_, name##x)(DisasContext *ctx) \ 984{ \ 985 TCGv EA; \ 986 TCGv_i64 t0; \ 987 if (unlikely(!ctx->fpu_enabled)) { \ 988 gen_exception(ctx, POWERPC_EXCP_FPU); \ 989 return; \ 990 } \ 991 gen_set_access_type(ctx, ACCESS_FLOAT); \ 992 EA = tcg_temp_new(); \ 993 t0 = tcg_temp_new_i64(); \ 994 gen_addr_reg_index(ctx, EA); \ 995 get_fpr(t0, rS(ctx->opcode)); \ 996 gen_qemu_##stop(ctx, t0, EA); \ 997 tcg_temp_free(EA); \ 998 tcg_temp_free_i64(t0); \ 999} 1000 1001static void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 src, TCGv addr) 1002{ 1003 TCGv_i32 tmp = tcg_temp_new_i32(); 1004 gen_helper_tosingle(tmp, src); 1005 tcg_gen_qemu_st_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL)); 1006 tcg_temp_free_i32(tmp); 1007} 1008 1009/* stfdepx (external PID lfdx) */ 1010static void gen_stfdepx(DisasContext *ctx) 1011{ 1012 TCGv EA; 1013 TCGv_i64 t0; 1014 CHK_SV; 1015 if (unlikely(!ctx->fpu_enabled)) { 1016 gen_exception(ctx, POWERPC_EXCP_FPU); 1017 return; 1018 } 1019 gen_set_access_type(ctx, ACCESS_FLOAT); 1020 EA = tcg_temp_new(); 1021 t0 = tcg_temp_new_i64(); 1022 gen_addr_reg_index(ctx, EA); 1023 get_fpr(t0, rD(ctx->opcode)); 1024 tcg_gen_qemu_st_i64(t0, EA, PPC_TLB_EPID_STORE, DEF_MEMOP(MO_UQ)); 1025 tcg_temp_free(EA); 1026 tcg_temp_free_i64(t0); 1027} 1028 1029/* stfdp */ 1030static void gen_stfdp(DisasContext *ctx) 1031{ 1032 TCGv EA; 1033 TCGv_i64 t0; 1034 if (unlikely(!ctx->fpu_enabled)) { 1035 gen_exception(ctx, POWERPC_EXCP_FPU); 1036 return; 1037 } 1038 gen_set_access_type(ctx, ACCESS_FLOAT); 1039 EA = tcg_temp_new(); 1040 t0 = tcg_temp_new_i64(); 1041 gen_addr_imm_index(ctx, EA, 0); 1042 /* 1043 * We only need to swap high and low halves. gen_qemu_st64_i64 1044 * does necessary 64-bit byteswap already. 1045 */ 1046 if (unlikely(ctx->le_mode)) { 1047 get_fpr(t0, rD(ctx->opcode) + 1); 1048 gen_qemu_st64_i64(ctx, t0, EA); 1049 tcg_gen_addi_tl(EA, EA, 8); 1050 get_fpr(t0, rD(ctx->opcode)); 1051 gen_qemu_st64_i64(ctx, t0, EA); 1052 } else { 1053 get_fpr(t0, rD(ctx->opcode)); 1054 gen_qemu_st64_i64(ctx, t0, EA); 1055 tcg_gen_addi_tl(EA, EA, 8); 1056 get_fpr(t0, rD(ctx->opcode) + 1); 1057 gen_qemu_st64_i64(ctx, t0, EA); 1058 } 1059 tcg_temp_free(EA); 1060 tcg_temp_free_i64(t0); 1061} 1062 1063/* stfdpx */ 1064static void gen_stfdpx(DisasContext *ctx) 1065{ 1066 TCGv EA; 1067 TCGv_i64 t0; 1068 if (unlikely(!ctx->fpu_enabled)) { 1069 gen_exception(ctx, POWERPC_EXCP_FPU); 1070 return; 1071 } 1072 gen_set_access_type(ctx, ACCESS_FLOAT); 1073 EA = tcg_temp_new(); 1074 t0 = tcg_temp_new_i64(); 1075 gen_addr_reg_index(ctx, EA); 1076 /* 1077 * We only need to swap high and low halves. gen_qemu_st64_i64 1078 * does necessary 64-bit byteswap already. 1079 */ 1080 if (unlikely(ctx->le_mode)) { 1081 get_fpr(t0, rD(ctx->opcode) + 1); 1082 gen_qemu_st64_i64(ctx, t0, EA); 1083 tcg_gen_addi_tl(EA, EA, 8); 1084 get_fpr(t0, rD(ctx->opcode)); 1085 gen_qemu_st64_i64(ctx, t0, EA); 1086 } else { 1087 get_fpr(t0, rD(ctx->opcode)); 1088 gen_qemu_st64_i64(ctx, t0, EA); 1089 tcg_gen_addi_tl(EA, EA, 8); 1090 get_fpr(t0, rD(ctx->opcode) + 1); 1091 gen_qemu_st64_i64(ctx, t0, EA); 1092 } 1093 tcg_temp_free(EA); 1094 tcg_temp_free_i64(t0); 1095} 1096 1097/* Optional: */ 1098static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) 1099{ 1100 TCGv t0 = tcg_temp_new(); 1101 tcg_gen_trunc_i64_tl(t0, arg1), 1102 gen_qemu_st32(ctx, t0, arg2); 1103 tcg_temp_free(t0); 1104} 1105/* stfiwx */ 1106GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX); 1107 1108/* POWER2 specific instructions */ 1109/* Quad manipulation (load/store two floats at a time) */ 1110 1111/* lfq */ 1112static void gen_lfq(DisasContext *ctx) 1113{ 1114 int rd = rD(ctx->opcode); 1115 TCGv t0; 1116 TCGv_i64 t1; 1117 gen_set_access_type(ctx, ACCESS_FLOAT); 1118 t0 = tcg_temp_new(); 1119 t1 = tcg_temp_new_i64(); 1120 gen_addr_imm_index(ctx, t0, 0); 1121 gen_qemu_ld64_i64(ctx, t1, t0); 1122 set_fpr(rd, t1); 1123 gen_addr_add(ctx, t0, t0, 8); 1124 gen_qemu_ld64_i64(ctx, t1, t0); 1125 set_fpr((rd + 1) % 32, t1); 1126 tcg_temp_free(t0); 1127 tcg_temp_free_i64(t1); 1128} 1129 1130/* lfqu */ 1131static void gen_lfqu(DisasContext *ctx) 1132{ 1133 int ra = rA(ctx->opcode); 1134 int rd = rD(ctx->opcode); 1135 TCGv t0, t1; 1136 TCGv_i64 t2; 1137 gen_set_access_type(ctx, ACCESS_FLOAT); 1138 t0 = tcg_temp_new(); 1139 t1 = tcg_temp_new(); 1140 t2 = tcg_temp_new_i64(); 1141 gen_addr_imm_index(ctx, t0, 0); 1142 gen_qemu_ld64_i64(ctx, t2, t0); 1143 set_fpr(rd, t2); 1144 gen_addr_add(ctx, t1, t0, 8); 1145 gen_qemu_ld64_i64(ctx, t2, t1); 1146 set_fpr((rd + 1) % 32, t2); 1147 if (ra != 0) { 1148 tcg_gen_mov_tl(cpu_gpr[ra], t0); 1149 } 1150 tcg_temp_free(t0); 1151 tcg_temp_free(t1); 1152 tcg_temp_free_i64(t2); 1153} 1154 1155/* lfqux */ 1156static void gen_lfqux(DisasContext *ctx) 1157{ 1158 int ra = rA(ctx->opcode); 1159 int rd = rD(ctx->opcode); 1160 gen_set_access_type(ctx, ACCESS_FLOAT); 1161 TCGv t0, t1; 1162 TCGv_i64 t2; 1163 t2 = tcg_temp_new_i64(); 1164 t0 = tcg_temp_new(); 1165 gen_addr_reg_index(ctx, t0); 1166 gen_qemu_ld64_i64(ctx, t2, t0); 1167 set_fpr(rd, t2); 1168 t1 = tcg_temp_new(); 1169 gen_addr_add(ctx, t1, t0, 8); 1170 gen_qemu_ld64_i64(ctx, t2, t1); 1171 set_fpr((rd + 1) % 32, t2); 1172 tcg_temp_free(t1); 1173 if (ra != 0) { 1174 tcg_gen_mov_tl(cpu_gpr[ra], t0); 1175 } 1176 tcg_temp_free(t0); 1177 tcg_temp_free_i64(t2); 1178} 1179 1180/* lfqx */ 1181static void gen_lfqx(DisasContext *ctx) 1182{ 1183 int rd = rD(ctx->opcode); 1184 TCGv t0; 1185 TCGv_i64 t1; 1186 gen_set_access_type(ctx, ACCESS_FLOAT); 1187 t0 = tcg_temp_new(); 1188 t1 = tcg_temp_new_i64(); 1189 gen_addr_reg_index(ctx, t0); 1190 gen_qemu_ld64_i64(ctx, t1, t0); 1191 set_fpr(rd, t1); 1192 gen_addr_add(ctx, t0, t0, 8); 1193 gen_qemu_ld64_i64(ctx, t1, t0); 1194 set_fpr((rd + 1) % 32, t1); 1195 tcg_temp_free(t0); 1196 tcg_temp_free_i64(t1); 1197} 1198 1199/* stfq */ 1200static void gen_stfq(DisasContext *ctx) 1201{ 1202 int rd = rD(ctx->opcode); 1203 TCGv t0; 1204 TCGv_i64 t1; 1205 gen_set_access_type(ctx, ACCESS_FLOAT); 1206 t0 = tcg_temp_new(); 1207 t1 = tcg_temp_new_i64(); 1208 gen_addr_imm_index(ctx, t0, 0); 1209 get_fpr(t1, rd); 1210 gen_qemu_st64_i64(ctx, t1, t0); 1211 gen_addr_add(ctx, t0, t0, 8); 1212 get_fpr(t1, (rd + 1) % 32); 1213 gen_qemu_st64_i64(ctx, t1, t0); 1214 tcg_temp_free(t0); 1215 tcg_temp_free_i64(t1); 1216} 1217 1218/* stfqu */ 1219static void gen_stfqu(DisasContext *ctx) 1220{ 1221 int ra = rA(ctx->opcode); 1222 int rd = rD(ctx->opcode); 1223 TCGv t0, t1; 1224 TCGv_i64 t2; 1225 gen_set_access_type(ctx, ACCESS_FLOAT); 1226 t2 = tcg_temp_new_i64(); 1227 t0 = tcg_temp_new(); 1228 gen_addr_imm_index(ctx, t0, 0); 1229 get_fpr(t2, rd); 1230 gen_qemu_st64_i64(ctx, t2, t0); 1231 t1 = tcg_temp_new(); 1232 gen_addr_add(ctx, t1, t0, 8); 1233 get_fpr(t2, (rd + 1) % 32); 1234 gen_qemu_st64_i64(ctx, t2, t1); 1235 tcg_temp_free(t1); 1236 if (ra != 0) { 1237 tcg_gen_mov_tl(cpu_gpr[ra], t0); 1238 } 1239 tcg_temp_free(t0); 1240 tcg_temp_free_i64(t2); 1241} 1242 1243/* stfqux */ 1244static void gen_stfqux(DisasContext *ctx) 1245{ 1246 int ra = rA(ctx->opcode); 1247 int rd = rD(ctx->opcode); 1248 TCGv t0, t1; 1249 TCGv_i64 t2; 1250 gen_set_access_type(ctx, ACCESS_FLOAT); 1251 t2 = tcg_temp_new_i64(); 1252 t0 = tcg_temp_new(); 1253 gen_addr_reg_index(ctx, t0); 1254 get_fpr(t2, rd); 1255 gen_qemu_st64_i64(ctx, t2, t0); 1256 t1 = tcg_temp_new(); 1257 gen_addr_add(ctx, t1, t0, 8); 1258 get_fpr(t2, (rd + 1) % 32); 1259 gen_qemu_st64_i64(ctx, t2, t1); 1260 tcg_temp_free(t1); 1261 if (ra != 0) { 1262 tcg_gen_mov_tl(cpu_gpr[ra], t0); 1263 } 1264 tcg_temp_free(t0); 1265 tcg_temp_free_i64(t2); 1266} 1267 1268/* stfqx */ 1269static void gen_stfqx(DisasContext *ctx) 1270{ 1271 int rd = rD(ctx->opcode); 1272 TCGv t0; 1273 TCGv_i64 t1; 1274 gen_set_access_type(ctx, ACCESS_FLOAT); 1275 t1 = tcg_temp_new_i64(); 1276 t0 = tcg_temp_new(); 1277 gen_addr_reg_index(ctx, t0); 1278 get_fpr(t1, rd); 1279 gen_qemu_st64_i64(ctx, t1, t0); 1280 gen_addr_add(ctx, t0, t0, 8); 1281 get_fpr(t1, (rd + 1) % 32); 1282 gen_qemu_st64_i64(ctx, t1, t0); 1283 tcg_temp_free(t0); 1284 tcg_temp_free_i64(t1); 1285} 1286 1287/* Floating-point Load/Store Instructions */ 1288static bool do_lsfpsd(DisasContext *ctx, int rt, int ra, TCGv displ, 1289 bool update, bool store, bool single) 1290{ 1291 TCGv ea; 1292 TCGv_i64 t0; 1293 REQUIRE_INSNS_FLAGS(ctx, FLOAT); 1294 REQUIRE_FPU(ctx); 1295 if (update && ra == 0) { 1296 gen_invalid(ctx); 1297 return true; 1298 } 1299 gen_set_access_type(ctx, ACCESS_FLOAT); 1300 t0 = tcg_temp_new_i64(); 1301 ea = do_ea_calc(ctx, ra, displ); 1302 if (store) { 1303 get_fpr(t0, rt); 1304 if (single) { 1305 gen_qemu_st32fs(ctx, t0, ea); 1306 } else { 1307 gen_qemu_st64_i64(ctx, t0, ea); 1308 } 1309 } else { 1310 if (single) { 1311 gen_qemu_ld32fs(ctx, t0, ea); 1312 } else { 1313 gen_qemu_ld64_i64(ctx, t0, ea); 1314 } 1315 set_fpr(rt, t0); 1316 } 1317 if (update) { 1318 tcg_gen_mov_tl(cpu_gpr[ra], ea); 1319 } 1320 tcg_temp_free_i64(t0); 1321 tcg_temp_free(ea); 1322 return true; 1323} 1324 1325static bool do_lsfp_D(DisasContext *ctx, arg_D *a, bool update, bool store, 1326 bool single) 1327{ 1328 return do_lsfpsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), update, store, 1329 single); 1330} 1331 1332static bool do_lsfp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool update, 1333 bool store, bool single) 1334{ 1335 arg_D d; 1336 if (!resolve_PLS_D(ctx, &d, a)) { 1337 return true; 1338 } 1339 return do_lsfp_D(ctx, &d, update, store, single); 1340} 1341 1342static bool do_lsfp_X(DisasContext *ctx, arg_X *a, bool update, 1343 bool store, bool single) 1344{ 1345 return do_lsfpsd(ctx, a->rt, a->ra, cpu_gpr[a->rb], update, store, single); 1346} 1347 1348TRANS(LFS, do_lsfp_D, false, false, true) 1349TRANS(LFSU, do_lsfp_D, true, false, true) 1350TRANS(LFSX, do_lsfp_X, false, false, true) 1351TRANS(LFSUX, do_lsfp_X, true, false, true) 1352TRANS(PLFS, do_lsfp_PLS_D, false, false, true) 1353 1354TRANS(LFD, do_lsfp_D, false, false, false) 1355TRANS(LFDU, do_lsfp_D, true, false, false) 1356TRANS(LFDX, do_lsfp_X, false, false, false) 1357TRANS(LFDUX, do_lsfp_X, true, false, false) 1358TRANS(PLFD, do_lsfp_PLS_D, false, false, false) 1359 1360TRANS(STFS, do_lsfp_D, false, true, true) 1361TRANS(STFSU, do_lsfp_D, true, true, true) 1362TRANS(STFSX, do_lsfp_X, false, true, true) 1363TRANS(STFSUX, do_lsfp_X, true, true, true) 1364TRANS(PSTFS, do_lsfp_PLS_D, false, true, true) 1365 1366TRANS(STFD, do_lsfp_D, false, true, false) 1367TRANS(STFDU, do_lsfp_D, true, true, false) 1368TRANS(STFDX, do_lsfp_X, false, true, false) 1369TRANS(STFDUX, do_lsfp_X, true, true, false) 1370TRANS(PSTFD, do_lsfp_PLS_D, false, true, false) 1371 1372#undef _GEN_FLOAT_ACB 1373#undef GEN_FLOAT_ACB 1374#undef _GEN_FLOAT_AB 1375#undef GEN_FLOAT_AB 1376#undef _GEN_FLOAT_AC 1377#undef GEN_FLOAT_AC 1378#undef GEN_FLOAT_B 1379#undef GEN_FLOAT_BS 1380 1381#undef GEN_LDF 1382#undef GEN_LDUF 1383#undef GEN_LDUXF 1384#undef GEN_LDXF 1385#undef GEN_LDFS 1386 1387#undef GEN_STF 1388#undef GEN_STUF 1389#undef GEN_STUXF 1390#undef GEN_STXF 1391#undef GEN_STFS 1392