1/*** VSX extension ***/ 2 3static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high) 4{ 5 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high)); 6} 7 8static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high) 9{ 10 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high)); 11} 12 13static inline TCGv_ptr gen_vsr_ptr(int reg) 14{ 15 TCGv_ptr r = tcg_temp_new_ptr(); 16 tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg)); 17 return r; 18} 19 20#define VSX_LOAD_SCALAR(name, operation) \ 21static void gen_##name(DisasContext *ctx) \ 22{ \ 23 TCGv EA; \ 24 TCGv_i64 t0; \ 25 if (unlikely(!ctx->vsx_enabled)) { \ 26 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 27 return; \ 28 } \ 29 t0 = tcg_temp_new_i64(); \ 30 gen_set_access_type(ctx, ACCESS_INT); \ 31 EA = tcg_temp_new(); \ 32 gen_addr_reg_index(ctx, EA); \ 33 gen_qemu_##operation(ctx, t0, EA); \ 34 set_cpu_vsr(xT(ctx->opcode), t0, true); \ 35 /* NOTE: cpu_vsrl is undefined */ \ 36 tcg_temp_free(EA); \ 37 tcg_temp_free_i64(t0); \ 38} 39 40VSX_LOAD_SCALAR(lxsdx, ld64_i64) 41VSX_LOAD_SCALAR(lxsiwax, ld32s_i64) 42VSX_LOAD_SCALAR(lxsibzx, ld8u_i64) 43VSX_LOAD_SCALAR(lxsihzx, ld16u_i64) 44VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64) 45VSX_LOAD_SCALAR(lxsspx, ld32fs) 46 47static void gen_lxvd2x(DisasContext *ctx) 48{ 49 TCGv EA; 50 TCGv_i64 t0; 51 if (unlikely(!ctx->vsx_enabled)) { 52 gen_exception(ctx, POWERPC_EXCP_VSXU); 53 return; 54 } 55 t0 = tcg_temp_new_i64(); 56 gen_set_access_type(ctx, ACCESS_INT); 57 EA = tcg_temp_new(); 58 gen_addr_reg_index(ctx, EA); 59 gen_qemu_ld64_i64(ctx, t0, EA); 60 set_cpu_vsr(xT(ctx->opcode), t0, true); 61 tcg_gen_addi_tl(EA, EA, 8); 62 gen_qemu_ld64_i64(ctx, t0, EA); 63 set_cpu_vsr(xT(ctx->opcode), t0, false); 64 tcg_temp_free(EA); 65 tcg_temp_free_i64(t0); 66} 67 68static void gen_lxvw4x(DisasContext *ctx) 69{ 70 TCGv EA; 71 TCGv_i64 xth; 72 TCGv_i64 xtl; 73 if (unlikely(!ctx->vsx_enabled)) { 74 gen_exception(ctx, POWERPC_EXCP_VSXU); 75 return; 76 } 77 xth = tcg_temp_new_i64(); 78 xtl = tcg_temp_new_i64(); 79 80 gen_set_access_type(ctx, ACCESS_INT); 81 EA = tcg_temp_new(); 82 83 gen_addr_reg_index(ctx, EA); 84 if (ctx->le_mode) { 85 TCGv_i64 t0 = tcg_temp_new_i64(); 86 TCGv_i64 t1 = tcg_temp_new_i64(); 87 88 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ); 89 tcg_gen_shri_i64(t1, t0, 32); 90 tcg_gen_deposit_i64(xth, t1, t0, 32, 32); 91 tcg_gen_addi_tl(EA, EA, 8); 92 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ); 93 tcg_gen_shri_i64(t1, t0, 32); 94 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32); 95 tcg_temp_free_i64(t0); 96 tcg_temp_free_i64(t1); 97 } else { 98 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ); 99 tcg_gen_addi_tl(EA, EA, 8); 100 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ); 101 } 102 set_cpu_vsr(xT(ctx->opcode), xth, true); 103 set_cpu_vsr(xT(ctx->opcode), xtl, false); 104 tcg_temp_free(EA); 105 tcg_temp_free_i64(xth); 106 tcg_temp_free_i64(xtl); 107} 108 109static void gen_lxvwsx(DisasContext *ctx) 110{ 111 TCGv EA; 112 TCGv_i32 data; 113 114 if (xT(ctx->opcode) < 32) { 115 if (unlikely(!ctx->vsx_enabled)) { 116 gen_exception(ctx, POWERPC_EXCP_VSXU); 117 return; 118 } 119 } else { 120 if (unlikely(!ctx->altivec_enabled)) { 121 gen_exception(ctx, POWERPC_EXCP_VPU); 122 return; 123 } 124 } 125 126 gen_set_access_type(ctx, ACCESS_INT); 127 EA = tcg_temp_new(); 128 129 gen_addr_reg_index(ctx, EA); 130 131 data = tcg_temp_new_i32(); 132 tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL)); 133 tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data); 134 135 tcg_temp_free(EA); 136 tcg_temp_free_i32(data); 137} 138 139static void gen_lxvdsx(DisasContext *ctx) 140{ 141 TCGv EA; 142 TCGv_i64 data; 143 144 if (unlikely(!ctx->vsx_enabled)) { 145 gen_exception(ctx, POWERPC_EXCP_VSXU); 146 return; 147 } 148 149 gen_set_access_type(ctx, ACCESS_INT); 150 EA = tcg_temp_new(); 151 152 gen_addr_reg_index(ctx, EA); 153 154 data = tcg_temp_new_i64(); 155 tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ)); 156 tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data); 157 158 tcg_temp_free(EA); 159 tcg_temp_free_i64(data); 160} 161 162static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl, 163 TCGv_i64 inh, TCGv_i64 inl) 164{ 165 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF); 166 TCGv_i64 t0 = tcg_temp_new_i64(); 167 TCGv_i64 t1 = tcg_temp_new_i64(); 168 169 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */ 170 tcg_gen_and_i64(t0, inh, mask); 171 tcg_gen_shli_i64(t0, t0, 8); 172 tcg_gen_shri_i64(t1, inh, 8); 173 tcg_gen_and_i64(t1, t1, mask); 174 tcg_gen_or_i64(outh, t0, t1); 175 176 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */ 177 tcg_gen_and_i64(t0, inl, mask); 178 tcg_gen_shli_i64(t0, t0, 8); 179 tcg_gen_shri_i64(t1, inl, 8); 180 tcg_gen_and_i64(t1, t1, mask); 181 tcg_gen_or_i64(outl, t0, t1); 182 183 tcg_temp_free_i64(t0); 184 tcg_temp_free_i64(t1); 185 tcg_temp_free_i64(mask); 186} 187 188static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl, 189 TCGv_i64 inh, TCGv_i64 inl) 190{ 191 TCGv_i64 hi = tcg_temp_new_i64(); 192 TCGv_i64 lo = tcg_temp_new_i64(); 193 194 tcg_gen_bswap64_i64(hi, inh); 195 tcg_gen_bswap64_i64(lo, inl); 196 tcg_gen_shri_i64(outh, hi, 32); 197 tcg_gen_deposit_i64(outh, outh, hi, 32, 32); 198 tcg_gen_shri_i64(outl, lo, 32); 199 tcg_gen_deposit_i64(outl, outl, lo, 32, 32); 200 201 tcg_temp_free_i64(hi); 202 tcg_temp_free_i64(lo); 203} 204static void gen_lxvh8x(DisasContext *ctx) 205{ 206 TCGv EA; 207 TCGv_i64 xth; 208 TCGv_i64 xtl; 209 210 if (unlikely(!ctx->vsx_enabled)) { 211 gen_exception(ctx, POWERPC_EXCP_VSXU); 212 return; 213 } 214 xth = tcg_temp_new_i64(); 215 xtl = tcg_temp_new_i64(); 216 gen_set_access_type(ctx, ACCESS_INT); 217 218 EA = tcg_temp_new(); 219 gen_addr_reg_index(ctx, EA); 220 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ); 221 tcg_gen_addi_tl(EA, EA, 8); 222 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ); 223 if (ctx->le_mode) { 224 gen_bswap16x8(xth, xtl, xth, xtl); 225 } 226 set_cpu_vsr(xT(ctx->opcode), xth, true); 227 set_cpu_vsr(xT(ctx->opcode), xtl, false); 228 tcg_temp_free(EA); 229 tcg_temp_free_i64(xth); 230 tcg_temp_free_i64(xtl); 231} 232 233static void gen_lxvb16x(DisasContext *ctx) 234{ 235 TCGv EA; 236 TCGv_i64 xth; 237 TCGv_i64 xtl; 238 239 if (unlikely(!ctx->vsx_enabled)) { 240 gen_exception(ctx, POWERPC_EXCP_VSXU); 241 return; 242 } 243 xth = tcg_temp_new_i64(); 244 xtl = tcg_temp_new_i64(); 245 gen_set_access_type(ctx, ACCESS_INT); 246 EA = tcg_temp_new(); 247 gen_addr_reg_index(ctx, EA); 248 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ); 249 tcg_gen_addi_tl(EA, EA, 8); 250 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ); 251 set_cpu_vsr(xT(ctx->opcode), xth, true); 252 set_cpu_vsr(xT(ctx->opcode), xtl, false); 253 tcg_temp_free(EA); 254 tcg_temp_free_i64(xth); 255 tcg_temp_free_i64(xtl); 256} 257 258#ifdef TARGET_PPC64 259#define VSX_VECTOR_LOAD_STORE_LENGTH(name) \ 260static void gen_##name(DisasContext *ctx) \ 261{ \ 262 TCGv EA; \ 263 TCGv_ptr xt; \ 264 \ 265 if (xT(ctx->opcode) < 32) { \ 266 if (unlikely(!ctx->vsx_enabled)) { \ 267 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 268 return; \ 269 } \ 270 } else { \ 271 if (unlikely(!ctx->altivec_enabled)) { \ 272 gen_exception(ctx, POWERPC_EXCP_VPU); \ 273 return; \ 274 } \ 275 } \ 276 EA = tcg_temp_new(); \ 277 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 278 gen_set_access_type(ctx, ACCESS_INT); \ 279 gen_addr_register(ctx, EA); \ 280 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \ 281 tcg_temp_free(EA); \ 282 tcg_temp_free_ptr(xt); \ 283} 284 285VSX_VECTOR_LOAD_STORE_LENGTH(lxvl) 286VSX_VECTOR_LOAD_STORE_LENGTH(lxvll) 287VSX_VECTOR_LOAD_STORE_LENGTH(stxvl) 288VSX_VECTOR_LOAD_STORE_LENGTH(stxvll) 289#endif 290 291#define VSX_STORE_SCALAR(name, operation) \ 292static void gen_##name(DisasContext *ctx) \ 293{ \ 294 TCGv EA; \ 295 TCGv_i64 t0; \ 296 if (unlikely(!ctx->vsx_enabled)) { \ 297 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 298 return; \ 299 } \ 300 t0 = tcg_temp_new_i64(); \ 301 gen_set_access_type(ctx, ACCESS_INT); \ 302 EA = tcg_temp_new(); \ 303 gen_addr_reg_index(ctx, EA); \ 304 get_cpu_vsr(t0, xS(ctx->opcode), true); \ 305 gen_qemu_##operation(ctx, t0, EA); \ 306 tcg_temp_free(EA); \ 307 tcg_temp_free_i64(t0); \ 308} 309 310VSX_STORE_SCALAR(stxsdx, st64_i64) 311 312VSX_STORE_SCALAR(stxsibx, st8_i64) 313VSX_STORE_SCALAR(stxsihx, st16_i64) 314VSX_STORE_SCALAR(stxsiwx, st32_i64) 315VSX_STORE_SCALAR(stxsspx, st32fs) 316 317static void gen_stxvd2x(DisasContext *ctx) 318{ 319 TCGv EA; 320 TCGv_i64 t0; 321 if (unlikely(!ctx->vsx_enabled)) { 322 gen_exception(ctx, POWERPC_EXCP_VSXU); 323 return; 324 } 325 t0 = tcg_temp_new_i64(); 326 gen_set_access_type(ctx, ACCESS_INT); 327 EA = tcg_temp_new(); 328 gen_addr_reg_index(ctx, EA); 329 get_cpu_vsr(t0, xS(ctx->opcode), true); 330 gen_qemu_st64_i64(ctx, t0, EA); 331 tcg_gen_addi_tl(EA, EA, 8); 332 get_cpu_vsr(t0, xS(ctx->opcode), false); 333 gen_qemu_st64_i64(ctx, t0, EA); 334 tcg_temp_free(EA); 335 tcg_temp_free_i64(t0); 336} 337 338static void gen_stxvw4x(DisasContext *ctx) 339{ 340 TCGv EA; 341 TCGv_i64 xsh; 342 TCGv_i64 xsl; 343 344 if (unlikely(!ctx->vsx_enabled)) { 345 gen_exception(ctx, POWERPC_EXCP_VSXU); 346 return; 347 } 348 xsh = tcg_temp_new_i64(); 349 xsl = tcg_temp_new_i64(); 350 get_cpu_vsr(xsh, xS(ctx->opcode), true); 351 get_cpu_vsr(xsl, xS(ctx->opcode), false); 352 gen_set_access_type(ctx, ACCESS_INT); 353 EA = tcg_temp_new(); 354 gen_addr_reg_index(ctx, EA); 355 if (ctx->le_mode) { 356 TCGv_i64 t0 = tcg_temp_new_i64(); 357 TCGv_i64 t1 = tcg_temp_new_i64(); 358 359 tcg_gen_shri_i64(t0, xsh, 32); 360 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32); 361 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ); 362 tcg_gen_addi_tl(EA, EA, 8); 363 tcg_gen_shri_i64(t0, xsl, 32); 364 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32); 365 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ); 366 tcg_temp_free_i64(t0); 367 tcg_temp_free_i64(t1); 368 } else { 369 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ); 370 tcg_gen_addi_tl(EA, EA, 8); 371 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ); 372 } 373 tcg_temp_free(EA); 374 tcg_temp_free_i64(xsh); 375 tcg_temp_free_i64(xsl); 376} 377 378static void gen_stxvh8x(DisasContext *ctx) 379{ 380 TCGv EA; 381 TCGv_i64 xsh; 382 TCGv_i64 xsl; 383 384 if (unlikely(!ctx->vsx_enabled)) { 385 gen_exception(ctx, POWERPC_EXCP_VSXU); 386 return; 387 } 388 xsh = tcg_temp_new_i64(); 389 xsl = tcg_temp_new_i64(); 390 get_cpu_vsr(xsh, xS(ctx->opcode), true); 391 get_cpu_vsr(xsl, xS(ctx->opcode), false); 392 gen_set_access_type(ctx, ACCESS_INT); 393 EA = tcg_temp_new(); 394 gen_addr_reg_index(ctx, EA); 395 if (ctx->le_mode) { 396 TCGv_i64 outh = tcg_temp_new_i64(); 397 TCGv_i64 outl = tcg_temp_new_i64(); 398 399 gen_bswap16x8(outh, outl, xsh, xsl); 400 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ); 401 tcg_gen_addi_tl(EA, EA, 8); 402 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ); 403 tcg_temp_free_i64(outh); 404 tcg_temp_free_i64(outl); 405 } else { 406 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ); 407 tcg_gen_addi_tl(EA, EA, 8); 408 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ); 409 } 410 tcg_temp_free(EA); 411 tcg_temp_free_i64(xsh); 412 tcg_temp_free_i64(xsl); 413} 414 415static void gen_stxvb16x(DisasContext *ctx) 416{ 417 TCGv EA; 418 TCGv_i64 xsh; 419 TCGv_i64 xsl; 420 421 if (unlikely(!ctx->vsx_enabled)) { 422 gen_exception(ctx, POWERPC_EXCP_VSXU); 423 return; 424 } 425 xsh = tcg_temp_new_i64(); 426 xsl = tcg_temp_new_i64(); 427 get_cpu_vsr(xsh, xS(ctx->opcode), true); 428 get_cpu_vsr(xsl, xS(ctx->opcode), false); 429 gen_set_access_type(ctx, ACCESS_INT); 430 EA = tcg_temp_new(); 431 gen_addr_reg_index(ctx, EA); 432 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ); 433 tcg_gen_addi_tl(EA, EA, 8); 434 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ); 435 tcg_temp_free(EA); 436 tcg_temp_free_i64(xsh); 437 tcg_temp_free_i64(xsl); 438} 439 440static void gen_mfvsrwz(DisasContext *ctx) 441{ 442 if (xS(ctx->opcode) < 32) { 443 if (unlikely(!ctx->fpu_enabled)) { 444 gen_exception(ctx, POWERPC_EXCP_FPU); 445 return; 446 } 447 } else { 448 if (unlikely(!ctx->altivec_enabled)) { 449 gen_exception(ctx, POWERPC_EXCP_VPU); 450 return; 451 } 452 } 453 TCGv_i64 tmp = tcg_temp_new_i64(); 454 TCGv_i64 xsh = tcg_temp_new_i64(); 455 get_cpu_vsr(xsh, xS(ctx->opcode), true); 456 tcg_gen_ext32u_i64(tmp, xsh); 457 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp); 458 tcg_temp_free_i64(tmp); 459 tcg_temp_free_i64(xsh); 460} 461 462static void gen_mtvsrwa(DisasContext *ctx) 463{ 464 if (xS(ctx->opcode) < 32) { 465 if (unlikely(!ctx->fpu_enabled)) { 466 gen_exception(ctx, POWERPC_EXCP_FPU); 467 return; 468 } 469 } else { 470 if (unlikely(!ctx->altivec_enabled)) { 471 gen_exception(ctx, POWERPC_EXCP_VPU); 472 return; 473 } 474 } 475 TCGv_i64 tmp = tcg_temp_new_i64(); 476 TCGv_i64 xsh = tcg_temp_new_i64(); 477 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]); 478 tcg_gen_ext32s_i64(xsh, tmp); 479 set_cpu_vsr(xT(ctx->opcode), xsh, true); 480 tcg_temp_free_i64(tmp); 481 tcg_temp_free_i64(xsh); 482} 483 484static void gen_mtvsrwz(DisasContext *ctx) 485{ 486 if (xS(ctx->opcode) < 32) { 487 if (unlikely(!ctx->fpu_enabled)) { 488 gen_exception(ctx, POWERPC_EXCP_FPU); 489 return; 490 } 491 } else { 492 if (unlikely(!ctx->altivec_enabled)) { 493 gen_exception(ctx, POWERPC_EXCP_VPU); 494 return; 495 } 496 } 497 TCGv_i64 tmp = tcg_temp_new_i64(); 498 TCGv_i64 xsh = tcg_temp_new_i64(); 499 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]); 500 tcg_gen_ext32u_i64(xsh, tmp); 501 set_cpu_vsr(xT(ctx->opcode), xsh, true); 502 tcg_temp_free_i64(tmp); 503 tcg_temp_free_i64(xsh); 504} 505 506#if defined(TARGET_PPC64) 507static void gen_mfvsrd(DisasContext *ctx) 508{ 509 TCGv_i64 t0; 510 if (xS(ctx->opcode) < 32) { 511 if (unlikely(!ctx->fpu_enabled)) { 512 gen_exception(ctx, POWERPC_EXCP_FPU); 513 return; 514 } 515 } else { 516 if (unlikely(!ctx->altivec_enabled)) { 517 gen_exception(ctx, POWERPC_EXCP_VPU); 518 return; 519 } 520 } 521 t0 = tcg_temp_new_i64(); 522 get_cpu_vsr(t0, xS(ctx->opcode), true); 523 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0); 524 tcg_temp_free_i64(t0); 525} 526 527static void gen_mtvsrd(DisasContext *ctx) 528{ 529 TCGv_i64 t0; 530 if (xS(ctx->opcode) < 32) { 531 if (unlikely(!ctx->fpu_enabled)) { 532 gen_exception(ctx, POWERPC_EXCP_FPU); 533 return; 534 } 535 } else { 536 if (unlikely(!ctx->altivec_enabled)) { 537 gen_exception(ctx, POWERPC_EXCP_VPU); 538 return; 539 } 540 } 541 t0 = tcg_temp_new_i64(); 542 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]); 543 set_cpu_vsr(xT(ctx->opcode), t0, true); 544 tcg_temp_free_i64(t0); 545} 546 547static void gen_mfvsrld(DisasContext *ctx) 548{ 549 TCGv_i64 t0; 550 if (xS(ctx->opcode) < 32) { 551 if (unlikely(!ctx->vsx_enabled)) { 552 gen_exception(ctx, POWERPC_EXCP_VSXU); 553 return; 554 } 555 } else { 556 if (unlikely(!ctx->altivec_enabled)) { 557 gen_exception(ctx, POWERPC_EXCP_VPU); 558 return; 559 } 560 } 561 t0 = tcg_temp_new_i64(); 562 get_cpu_vsr(t0, xS(ctx->opcode), false); 563 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0); 564 tcg_temp_free_i64(t0); 565} 566 567static void gen_mtvsrdd(DisasContext *ctx) 568{ 569 TCGv_i64 t0; 570 if (xT(ctx->opcode) < 32) { 571 if (unlikely(!ctx->vsx_enabled)) { 572 gen_exception(ctx, POWERPC_EXCP_VSXU); 573 return; 574 } 575 } else { 576 if (unlikely(!ctx->altivec_enabled)) { 577 gen_exception(ctx, POWERPC_EXCP_VPU); 578 return; 579 } 580 } 581 582 t0 = tcg_temp_new_i64(); 583 if (!rA(ctx->opcode)) { 584 tcg_gen_movi_i64(t0, 0); 585 } else { 586 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]); 587 } 588 set_cpu_vsr(xT(ctx->opcode), t0, true); 589 590 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]); 591 set_cpu_vsr(xT(ctx->opcode), t0, false); 592 tcg_temp_free_i64(t0); 593} 594 595static void gen_mtvsrws(DisasContext *ctx) 596{ 597 TCGv_i64 t0; 598 if (xT(ctx->opcode) < 32) { 599 if (unlikely(!ctx->vsx_enabled)) { 600 gen_exception(ctx, POWERPC_EXCP_VSXU); 601 return; 602 } 603 } else { 604 if (unlikely(!ctx->altivec_enabled)) { 605 gen_exception(ctx, POWERPC_EXCP_VPU); 606 return; 607 } 608 } 609 610 t0 = tcg_temp_new_i64(); 611 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)], 612 cpu_gpr[rA(ctx->opcode)], 32, 32); 613 set_cpu_vsr(xT(ctx->opcode), t0, false); 614 set_cpu_vsr(xT(ctx->opcode), t0, true); 615 tcg_temp_free_i64(t0); 616} 617 618#endif 619 620#define OP_ABS 1 621#define OP_NABS 2 622#define OP_NEG 3 623#define OP_CPSGN 4 624#define SGN_MASK_DP 0x8000000000000000ull 625#define SGN_MASK_SP 0x8000000080000000ull 626 627#define VSX_SCALAR_MOVE(name, op, sgn_mask) \ 628static void glue(gen_, name)(DisasContext *ctx) \ 629 { \ 630 TCGv_i64 xb, sgm; \ 631 if (unlikely(!ctx->vsx_enabled)) { \ 632 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 633 return; \ 634 } \ 635 xb = tcg_temp_new_i64(); \ 636 sgm = tcg_temp_new_i64(); \ 637 get_cpu_vsr(xb, xB(ctx->opcode), true); \ 638 tcg_gen_movi_i64(sgm, sgn_mask); \ 639 switch (op) { \ 640 case OP_ABS: { \ 641 tcg_gen_andc_i64(xb, xb, sgm); \ 642 break; \ 643 } \ 644 case OP_NABS: { \ 645 tcg_gen_or_i64(xb, xb, sgm); \ 646 break; \ 647 } \ 648 case OP_NEG: { \ 649 tcg_gen_xor_i64(xb, xb, sgm); \ 650 break; \ 651 } \ 652 case OP_CPSGN: { \ 653 TCGv_i64 xa = tcg_temp_new_i64(); \ 654 get_cpu_vsr(xa, xA(ctx->opcode), true); \ 655 tcg_gen_and_i64(xa, xa, sgm); \ 656 tcg_gen_andc_i64(xb, xb, sgm); \ 657 tcg_gen_or_i64(xb, xb, xa); \ 658 tcg_temp_free_i64(xa); \ 659 break; \ 660 } \ 661 } \ 662 set_cpu_vsr(xT(ctx->opcode), xb, true); \ 663 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \ 664 tcg_temp_free_i64(xb); \ 665 tcg_temp_free_i64(sgm); \ 666 } 667 668VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP) 669VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP) 670VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP) 671VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP) 672 673#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \ 674static void glue(gen_, name)(DisasContext *ctx) \ 675{ \ 676 int xa; \ 677 int xt = rD(ctx->opcode) + 32; \ 678 int xb = rB(ctx->opcode) + 32; \ 679 TCGv_i64 xah, xbh, xbl, sgm, tmp; \ 680 \ 681 if (unlikely(!ctx->vsx_enabled)) { \ 682 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 683 return; \ 684 } \ 685 xbh = tcg_temp_new_i64(); \ 686 xbl = tcg_temp_new_i64(); \ 687 sgm = tcg_temp_new_i64(); \ 688 tmp = tcg_temp_new_i64(); \ 689 get_cpu_vsr(xbh, xb, true); \ 690 get_cpu_vsr(xbl, xb, false); \ 691 tcg_gen_movi_i64(sgm, sgn_mask); \ 692 switch (op) { \ 693 case OP_ABS: \ 694 tcg_gen_andc_i64(xbh, xbh, sgm); \ 695 break; \ 696 case OP_NABS: \ 697 tcg_gen_or_i64(xbh, xbh, sgm); \ 698 break; \ 699 case OP_NEG: \ 700 tcg_gen_xor_i64(xbh, xbh, sgm); \ 701 break; \ 702 case OP_CPSGN: \ 703 xah = tcg_temp_new_i64(); \ 704 xa = rA(ctx->opcode) + 32; \ 705 get_cpu_vsr(tmp, xa, true); \ 706 tcg_gen_and_i64(xah, tmp, sgm); \ 707 tcg_gen_andc_i64(xbh, xbh, sgm); \ 708 tcg_gen_or_i64(xbh, xbh, xah); \ 709 tcg_temp_free_i64(xah); \ 710 break; \ 711 } \ 712 set_cpu_vsr(xt, xbh, true); \ 713 set_cpu_vsr(xt, xbl, false); \ 714 tcg_temp_free_i64(xbl); \ 715 tcg_temp_free_i64(xbh); \ 716 tcg_temp_free_i64(sgm); \ 717 tcg_temp_free_i64(tmp); \ 718} 719 720VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP) 721VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP) 722VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP) 723VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP) 724 725#define VSX_VECTOR_MOVE(name, op, sgn_mask) \ 726static void glue(gen_, name)(DisasContext *ctx) \ 727 { \ 728 TCGv_i64 xbh, xbl, sgm; \ 729 if (unlikely(!ctx->vsx_enabled)) { \ 730 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 731 return; \ 732 } \ 733 xbh = tcg_temp_new_i64(); \ 734 xbl = tcg_temp_new_i64(); \ 735 sgm = tcg_temp_new_i64(); \ 736 get_cpu_vsr(xbh, xB(ctx->opcode), true); \ 737 get_cpu_vsr(xbl, xB(ctx->opcode), false); \ 738 tcg_gen_movi_i64(sgm, sgn_mask); \ 739 switch (op) { \ 740 case OP_ABS: { \ 741 tcg_gen_andc_i64(xbh, xbh, sgm); \ 742 tcg_gen_andc_i64(xbl, xbl, sgm); \ 743 break; \ 744 } \ 745 case OP_NABS: { \ 746 tcg_gen_or_i64(xbh, xbh, sgm); \ 747 tcg_gen_or_i64(xbl, xbl, sgm); \ 748 break; \ 749 } \ 750 case OP_NEG: { \ 751 tcg_gen_xor_i64(xbh, xbh, sgm); \ 752 tcg_gen_xor_i64(xbl, xbl, sgm); \ 753 break; \ 754 } \ 755 case OP_CPSGN: { \ 756 TCGv_i64 xah = tcg_temp_new_i64(); \ 757 TCGv_i64 xal = tcg_temp_new_i64(); \ 758 get_cpu_vsr(xah, xA(ctx->opcode), true); \ 759 get_cpu_vsr(xal, xA(ctx->opcode), false); \ 760 tcg_gen_and_i64(xah, xah, sgm); \ 761 tcg_gen_and_i64(xal, xal, sgm); \ 762 tcg_gen_andc_i64(xbh, xbh, sgm); \ 763 tcg_gen_andc_i64(xbl, xbl, sgm); \ 764 tcg_gen_or_i64(xbh, xbh, xah); \ 765 tcg_gen_or_i64(xbl, xbl, xal); \ 766 tcg_temp_free_i64(xah); \ 767 tcg_temp_free_i64(xal); \ 768 break; \ 769 } \ 770 } \ 771 set_cpu_vsr(xT(ctx->opcode), xbh, true); \ 772 set_cpu_vsr(xT(ctx->opcode), xbl, false); \ 773 tcg_temp_free_i64(xbh); \ 774 tcg_temp_free_i64(xbl); \ 775 tcg_temp_free_i64(sgm); \ 776 } 777 778VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP) 779VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP) 780VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP) 781VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP) 782VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP) 783VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP) 784VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP) 785VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP) 786 787#define VSX_CMP(name, op1, op2, inval, type) \ 788static void gen_##name(DisasContext *ctx) \ 789{ \ 790 TCGv_i32 ignored; \ 791 TCGv_ptr xt, xa, xb; \ 792 if (unlikely(!ctx->vsx_enabled)) { \ 793 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 794 return; \ 795 } \ 796 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 797 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 798 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 799 if ((ctx->opcode >> (31 - 21)) & 1) { \ 800 gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb); \ 801 } else { \ 802 ignored = tcg_temp_new_i32(); \ 803 gen_helper_##name(ignored, cpu_env, xt, xa, xb); \ 804 tcg_temp_free_i32(ignored); \ 805 } \ 806 gen_helper_float_check_status(cpu_env); \ 807 tcg_temp_free_ptr(xt); \ 808 tcg_temp_free_ptr(xa); \ 809 tcg_temp_free_ptr(xb); \ 810} 811 812VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX) 813VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX) 814VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX) 815VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300) 816VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX) 817VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX) 818VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX) 819VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX) 820 821static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a) 822{ 823 TCGv_i32 ro; 824 TCGv_ptr xt, xb; 825 826 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 827 REQUIRE_VSX(ctx); 828 829 ro = tcg_const_i32(a->rc); 830 831 xt = gen_avr_ptr(a->rt); 832 xb = gen_avr_ptr(a->rb); 833 gen_helper_XSCVQPDP(cpu_env, ro, xt, xb); 834 tcg_temp_free_i32(ro); 835 tcg_temp_free_ptr(xt); 836 tcg_temp_free_ptr(xb); 837 838 return true; 839} 840 841#define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \ 842static void gen_##name(DisasContext *ctx) \ 843{ \ 844 TCGv_i32 opc; \ 845 if (unlikely(!ctx->vsx_enabled)) { \ 846 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 847 return; \ 848 } \ 849 opc = tcg_const_i32(ctx->opcode); \ 850 gen_helper_##name(cpu_env, opc); \ 851 tcg_temp_free_i32(opc); \ 852} 853 854#define GEN_VSX_HELPER_X3(name, op1, op2, inval, type) \ 855static void gen_##name(DisasContext *ctx) \ 856{ \ 857 TCGv_ptr xt, xa, xb; \ 858 if (unlikely(!ctx->vsx_enabled)) { \ 859 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 860 return; \ 861 } \ 862 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 863 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 864 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 865 gen_helper_##name(cpu_env, xt, xa, xb); \ 866 tcg_temp_free_ptr(xt); \ 867 tcg_temp_free_ptr(xa); \ 868 tcg_temp_free_ptr(xb); \ 869} 870 871#define GEN_VSX_HELPER_X2(name, op1, op2, inval, type) \ 872static void gen_##name(DisasContext *ctx) \ 873{ \ 874 TCGv_ptr xt, xb; \ 875 if (unlikely(!ctx->vsx_enabled)) { \ 876 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 877 return; \ 878 } \ 879 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 880 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 881 gen_helper_##name(cpu_env, xt, xb); \ 882 tcg_temp_free_ptr(xt); \ 883 tcg_temp_free_ptr(xb); \ 884} 885 886#define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type) \ 887static void gen_##name(DisasContext *ctx) \ 888{ \ 889 TCGv_i32 opc; \ 890 TCGv_ptr xa, xb; \ 891 if (unlikely(!ctx->vsx_enabled)) { \ 892 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 893 return; \ 894 } \ 895 opc = tcg_const_i32(ctx->opcode); \ 896 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 897 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 898 gen_helper_##name(cpu_env, opc, xa, xb); \ 899 tcg_temp_free_i32(opc); \ 900 tcg_temp_free_ptr(xa); \ 901 tcg_temp_free_ptr(xb); \ 902} 903 904#define GEN_VSX_HELPER_X1(name, op1, op2, inval, type) \ 905static void gen_##name(DisasContext *ctx) \ 906{ \ 907 TCGv_i32 opc; \ 908 TCGv_ptr xb; \ 909 if (unlikely(!ctx->vsx_enabled)) { \ 910 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 911 return; \ 912 } \ 913 opc = tcg_const_i32(ctx->opcode); \ 914 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 915 gen_helper_##name(cpu_env, opc, xb); \ 916 tcg_temp_free_i32(opc); \ 917 tcg_temp_free_ptr(xb); \ 918} 919 920#define GEN_VSX_HELPER_R3(name, op1, op2, inval, type) \ 921static void gen_##name(DisasContext *ctx) \ 922{ \ 923 TCGv_i32 opc; \ 924 TCGv_ptr xt, xa, xb; \ 925 if (unlikely(!ctx->vsx_enabled)) { \ 926 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 927 return; \ 928 } \ 929 opc = tcg_const_i32(ctx->opcode); \ 930 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \ 931 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \ 932 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \ 933 gen_helper_##name(cpu_env, opc, xt, xa, xb); \ 934 tcg_temp_free_i32(opc); \ 935 tcg_temp_free_ptr(xt); \ 936 tcg_temp_free_ptr(xa); \ 937 tcg_temp_free_ptr(xb); \ 938} 939 940#define GEN_VSX_HELPER_R2(name, op1, op2, inval, type) \ 941static void gen_##name(DisasContext *ctx) \ 942{ \ 943 TCGv_i32 opc; \ 944 TCGv_ptr xt, xb; \ 945 if (unlikely(!ctx->vsx_enabled)) { \ 946 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 947 return; \ 948 } \ 949 opc = tcg_const_i32(ctx->opcode); \ 950 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \ 951 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \ 952 gen_helper_##name(cpu_env, opc, xt, xb); \ 953 tcg_temp_free_i32(opc); \ 954 tcg_temp_free_ptr(xt); \ 955 tcg_temp_free_ptr(xb); \ 956} 957 958#define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type) \ 959static void gen_##name(DisasContext *ctx) \ 960{ \ 961 TCGv_i32 opc; \ 962 TCGv_ptr xa, xb; \ 963 if (unlikely(!ctx->vsx_enabled)) { \ 964 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 965 return; \ 966 } \ 967 opc = tcg_const_i32(ctx->opcode); \ 968 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \ 969 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \ 970 gen_helper_##name(cpu_env, opc, xa, xb); \ 971 tcg_temp_free_i32(opc); \ 972 tcg_temp_free_ptr(xa); \ 973 tcg_temp_free_ptr(xb); \ 974} 975 976#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \ 977static void gen_##name(DisasContext *ctx) \ 978{ \ 979 TCGv_i64 t0; \ 980 TCGv_i64 t1; \ 981 if (unlikely(!ctx->vsx_enabled)) { \ 982 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 983 return; \ 984 } \ 985 t0 = tcg_temp_new_i64(); \ 986 t1 = tcg_temp_new_i64(); \ 987 get_cpu_vsr(t0, xB(ctx->opcode), true); \ 988 gen_helper_##name(t1, cpu_env, t0); \ 989 set_cpu_vsr(xT(ctx->opcode), t1, true); \ 990 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \ 991 tcg_temp_free_i64(t0); \ 992 tcg_temp_free_i64(t1); \ 993} 994 995GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX) 996GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300) 997GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX) 998GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX) 999GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300) 1000GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX) 1001GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300) 1002GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX) 1003GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX) 1004GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX) 1005GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX) 1006GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX) 1007GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300) 1008GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300) 1009GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX) 1010GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX) 1011GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX) 1012GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX) 1013GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX) 1014GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX) 1015GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300) 1016GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX) 1017GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300) 1018GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207) 1019GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300) 1020GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300) 1021GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300) 1022GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300) 1023GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300) 1024GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300) 1025GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX) 1026GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207) 1027GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX) 1028GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX) 1029GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX) 1030GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX) 1031GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX) 1032GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300) 1033GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX) 1034GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX) 1035GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX) 1036GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX) 1037GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX) 1038GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX) 1039GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207) 1040GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300) 1041GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300) 1042GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300) 1043GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300) 1044GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207) 1045GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207) 1046GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207) 1047GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207) 1048GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207) 1049GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207) 1050GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207) 1051GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207) 1052GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207) 1053GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300) 1054GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300) 1055GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300) 1056 1057GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX) 1058GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX) 1059GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX) 1060GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX) 1061GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX) 1062GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX) 1063GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX) 1064GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX) 1065GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX) 1066GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX) 1067GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX) 1068GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX) 1069GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX) 1070GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX) 1071GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX) 1072GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX) 1073GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX) 1074GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX) 1075GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX) 1076GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX) 1077GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX) 1078GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX) 1079GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX) 1080GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX) 1081GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX) 1082 1083GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX) 1084GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX) 1085GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX) 1086GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX) 1087GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX) 1088GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX) 1089GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX) 1090GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX) 1091GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX) 1092GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX) 1093GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX) 1094GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX) 1095GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300) 1096GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300) 1097GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX) 1098GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX) 1099GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX) 1100GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX) 1101GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX) 1102GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX) 1103GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX) 1104GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX) 1105GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX) 1106GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX) 1107GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX) 1108GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX) 1109GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX) 1110GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX) 1111GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX) 1112 1113static bool trans_XXPERM(DisasContext *ctx, arg_XX3 *a) 1114{ 1115 TCGv_ptr xt, xa, xb; 1116 1117 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 1118 REQUIRE_VSX(ctx); 1119 1120 xt = gen_vsr_ptr(a->xt); 1121 xa = gen_vsr_ptr(a->xa); 1122 xb = gen_vsr_ptr(a->xb); 1123 1124 gen_helper_VPERM(xt, xa, xt, xb); 1125 1126 tcg_temp_free_ptr(xt); 1127 tcg_temp_free_ptr(xa); 1128 tcg_temp_free_ptr(xb); 1129 1130 return true; 1131} 1132 1133static bool trans_XXPERMR(DisasContext *ctx, arg_XX3 *a) 1134{ 1135 TCGv_ptr xt, xa, xb; 1136 1137 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 1138 REQUIRE_VSX(ctx); 1139 1140 xt = gen_vsr_ptr(a->xt); 1141 xa = gen_vsr_ptr(a->xa); 1142 xb = gen_vsr_ptr(a->xb); 1143 1144 gen_helper_VPERMR(xt, xa, xt, xb); 1145 1146 tcg_temp_free_ptr(xt); 1147 tcg_temp_free_ptr(xa); 1148 tcg_temp_free_ptr(xb); 1149 1150 return true; 1151} 1152 1153static bool trans_XXPERMDI(DisasContext *ctx, arg_XX3_dm *a) 1154{ 1155 TCGv_i64 t0, t1; 1156 1157 REQUIRE_INSNS_FLAGS2(ctx, VSX); 1158 REQUIRE_VSX(ctx); 1159 1160 t0 = tcg_temp_new_i64(); 1161 1162 if (unlikely(a->xt == a->xa || a->xt == a->xb)) { 1163 t1 = tcg_temp_new_i64(); 1164 1165 get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0); 1166 get_cpu_vsr(t1, a->xb, (a->dm & 1) == 0); 1167 1168 set_cpu_vsr(a->xt, t0, true); 1169 set_cpu_vsr(a->xt, t1, false); 1170 1171 tcg_temp_free_i64(t1); 1172 } else { 1173 get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0); 1174 set_cpu_vsr(a->xt, t0, true); 1175 1176 get_cpu_vsr(t0, a->xb, (a->dm & 1) == 0); 1177 set_cpu_vsr(a->xt, t0, false); 1178 } 1179 1180 tcg_temp_free_i64(t0); 1181 1182 return true; 1183} 1184 1185static bool trans_XXPERMX(DisasContext *ctx, arg_8RR_XX4_uim3 *a) 1186{ 1187 TCGv_ptr xt, xa, xb, xc; 1188 1189 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1190 REQUIRE_VSX(ctx); 1191 1192 xt = gen_vsr_ptr(a->xt); 1193 xa = gen_vsr_ptr(a->xa); 1194 xb = gen_vsr_ptr(a->xb); 1195 xc = gen_vsr_ptr(a->xc); 1196 1197 gen_helper_XXPERMX(xt, xa, xb, xc, tcg_constant_tl(a->uim3)); 1198 1199 tcg_temp_free_ptr(xt); 1200 tcg_temp_free_ptr(xa); 1201 tcg_temp_free_ptr(xb); 1202 tcg_temp_free_ptr(xc); 1203 1204 return true; 1205} 1206 1207#define XXGENPCV(NAME) \ 1208static bool trans_##NAME(DisasContext *ctx, arg_X_imm5 *a) \ 1209{ \ 1210 TCGv_ptr xt, vrb; \ 1211 \ 1212 REQUIRE_INSNS_FLAGS2(ctx, ISA310); \ 1213 REQUIRE_VSX(ctx); \ 1214 \ 1215 if (a->imm & ~0x3) { \ 1216 gen_invalid(ctx); \ 1217 return true; \ 1218 } \ 1219 \ 1220 xt = gen_vsr_ptr(a->xt); \ 1221 vrb = gen_avr_ptr(a->vrb); \ 1222 \ 1223 switch (a->imm) { \ 1224 case 0b00000: /* Big-Endian expansion */ \ 1225 glue(gen_helper_, glue(NAME, _be_exp))(xt, vrb); \ 1226 break; \ 1227 case 0b00001: /* Big-Endian compression */ \ 1228 glue(gen_helper_, glue(NAME, _be_comp))(xt, vrb); \ 1229 break; \ 1230 case 0b00010: /* Little-Endian expansion */ \ 1231 glue(gen_helper_, glue(NAME, _le_exp))(xt, vrb); \ 1232 break; \ 1233 case 0b00011: /* Little-Endian compression */ \ 1234 glue(gen_helper_, glue(NAME, _le_comp))(xt, vrb); \ 1235 break; \ 1236 } \ 1237 \ 1238 tcg_temp_free_ptr(xt); \ 1239 tcg_temp_free_ptr(vrb); \ 1240 \ 1241 return true; \ 1242} 1243 1244XXGENPCV(XXGENPCVBM) 1245XXGENPCV(XXGENPCVHM) 1246XXGENPCV(XXGENPCVWM) 1247XXGENPCV(XXGENPCVDM) 1248#undef XXGENPCV 1249 1250static bool do_xsmadd(DisasContext *ctx, int tgt, int src1, int src2, int src3, 1251 void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 1252{ 1253 TCGv_ptr t, s1, s2, s3; 1254 1255 t = gen_vsr_ptr(tgt); 1256 s1 = gen_vsr_ptr(src1); 1257 s2 = gen_vsr_ptr(src2); 1258 s3 = gen_vsr_ptr(src3); 1259 1260 gen_helper(cpu_env, t, s1, s2, s3); 1261 1262 tcg_temp_free_ptr(t); 1263 tcg_temp_free_ptr(s1); 1264 tcg_temp_free_ptr(s2); 1265 tcg_temp_free_ptr(s3); 1266 1267 return true; 1268} 1269 1270static bool do_xsmadd_XX3(DisasContext *ctx, arg_XX3 *a, bool type_a, 1271 void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 1272{ 1273 REQUIRE_VSX(ctx); 1274 1275 if (type_a) { 1276 return do_xsmadd(ctx, a->xt, a->xa, a->xt, a->xb, gen_helper); 1277 } 1278 return do_xsmadd(ctx, a->xt, a->xa, a->xb, a->xt, gen_helper); 1279} 1280 1281TRANS_FLAGS2(VSX, XSMADDADP, do_xsmadd_XX3, true, gen_helper_XSMADDDP) 1282TRANS_FLAGS2(VSX, XSMADDMDP, do_xsmadd_XX3, false, gen_helper_XSMADDDP) 1283TRANS_FLAGS2(VSX, XSMSUBADP, do_xsmadd_XX3, true, gen_helper_XSMSUBDP) 1284TRANS_FLAGS2(VSX, XSMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSMSUBDP) 1285TRANS_FLAGS2(VSX, XSNMADDADP, do_xsmadd_XX3, true, gen_helper_XSNMADDDP) 1286TRANS_FLAGS2(VSX, XSNMADDMDP, do_xsmadd_XX3, false, gen_helper_XSNMADDDP) 1287TRANS_FLAGS2(VSX, XSNMSUBADP, do_xsmadd_XX3, true, gen_helper_XSNMSUBDP) 1288TRANS_FLAGS2(VSX, XSNMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSNMSUBDP) 1289TRANS_FLAGS2(VSX207, XSMADDASP, do_xsmadd_XX3, true, gen_helper_XSMADDSP) 1290TRANS_FLAGS2(VSX207, XSMADDMSP, do_xsmadd_XX3, false, gen_helper_XSMADDSP) 1291TRANS_FLAGS2(VSX207, XSMSUBASP, do_xsmadd_XX3, true, gen_helper_XSMSUBSP) 1292TRANS_FLAGS2(VSX207, XSMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSMSUBSP) 1293TRANS_FLAGS2(VSX207, XSNMADDASP, do_xsmadd_XX3, true, gen_helper_XSNMADDSP) 1294TRANS_FLAGS2(VSX207, XSNMADDMSP, do_xsmadd_XX3, false, gen_helper_XSNMADDSP) 1295TRANS_FLAGS2(VSX207, XSNMSUBASP, do_xsmadd_XX3, true, gen_helper_XSNMSUBSP) 1296TRANS_FLAGS2(VSX207, XSNMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSNMSUBSP) 1297 1298static bool do_xsmadd_X(DisasContext *ctx, arg_X_rc *a, 1299 void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr), 1300 void (*gen_helper_ro)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 1301{ 1302 int vrt, vra, vrb; 1303 1304 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 1305 REQUIRE_VSX(ctx); 1306 1307 vrt = a->rt + 32; 1308 vra = a->ra + 32; 1309 vrb = a->rb + 32; 1310 1311 if (a->rc) { 1312 return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper_ro); 1313 } 1314 1315 return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper); 1316} 1317 1318TRANS(XSMADDQP, do_xsmadd_X, gen_helper_XSMADDQP, gen_helper_XSMADDQPO) 1319TRANS(XSMSUBQP, do_xsmadd_X, gen_helper_XSMSUBQP, gen_helper_XSMSUBQPO) 1320TRANS(XSNMADDQP, do_xsmadd_X, gen_helper_XSNMADDQP, gen_helper_XSNMADDQPO) 1321TRANS(XSNMSUBQP, do_xsmadd_X, gen_helper_XSNMSUBQP, gen_helper_XSNMSUBQPO) 1322 1323#define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type) \ 1324static void gen_##name(DisasContext *ctx) \ 1325{ \ 1326 TCGv_ptr xt, xa, b, c; \ 1327 if (unlikely(!ctx->vsx_enabled)) { \ 1328 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1329 return; \ 1330 } \ 1331 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 1332 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 1333 if (ctx->opcode & PPC_BIT32(25)) { \ 1334 /* \ 1335 * AxT + B \ 1336 */ \ 1337 b = gen_vsr_ptr(xT(ctx->opcode)); \ 1338 c = gen_vsr_ptr(xB(ctx->opcode)); \ 1339 } else { \ 1340 /* \ 1341 * AxB + T \ 1342 */ \ 1343 b = gen_vsr_ptr(xB(ctx->opcode)); \ 1344 c = gen_vsr_ptr(xT(ctx->opcode)); \ 1345 } \ 1346 gen_helper_##name(cpu_env, xt, xa, b, c); \ 1347 tcg_temp_free_ptr(xt); \ 1348 tcg_temp_free_ptr(xa); \ 1349 tcg_temp_free_ptr(b); \ 1350 tcg_temp_free_ptr(c); \ 1351} 1352 1353GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX) 1354GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX) 1355GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX) 1356GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX) 1357GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX) 1358GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX) 1359GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX) 1360GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX) 1361 1362static void gen_xxbrd(DisasContext *ctx) 1363{ 1364 TCGv_i64 xth; 1365 TCGv_i64 xtl; 1366 TCGv_i64 xbh; 1367 TCGv_i64 xbl; 1368 1369 if (unlikely(!ctx->vsx_enabled)) { 1370 gen_exception(ctx, POWERPC_EXCP_VSXU); 1371 return; 1372 } 1373 xth = tcg_temp_new_i64(); 1374 xtl = tcg_temp_new_i64(); 1375 xbh = tcg_temp_new_i64(); 1376 xbl = tcg_temp_new_i64(); 1377 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1378 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1379 1380 tcg_gen_bswap64_i64(xth, xbh); 1381 tcg_gen_bswap64_i64(xtl, xbl); 1382 set_cpu_vsr(xT(ctx->opcode), xth, true); 1383 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1384 1385 tcg_temp_free_i64(xth); 1386 tcg_temp_free_i64(xtl); 1387 tcg_temp_free_i64(xbh); 1388 tcg_temp_free_i64(xbl); 1389} 1390 1391static void gen_xxbrh(DisasContext *ctx) 1392{ 1393 TCGv_i64 xth; 1394 TCGv_i64 xtl; 1395 TCGv_i64 xbh; 1396 TCGv_i64 xbl; 1397 1398 if (unlikely(!ctx->vsx_enabled)) { 1399 gen_exception(ctx, POWERPC_EXCP_VSXU); 1400 return; 1401 } 1402 xth = tcg_temp_new_i64(); 1403 xtl = tcg_temp_new_i64(); 1404 xbh = tcg_temp_new_i64(); 1405 xbl = tcg_temp_new_i64(); 1406 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1407 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1408 1409 gen_bswap16x8(xth, xtl, xbh, xbl); 1410 set_cpu_vsr(xT(ctx->opcode), xth, true); 1411 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1412 1413 tcg_temp_free_i64(xth); 1414 tcg_temp_free_i64(xtl); 1415 tcg_temp_free_i64(xbh); 1416 tcg_temp_free_i64(xbl); 1417} 1418 1419static void gen_xxbrq(DisasContext *ctx) 1420{ 1421 TCGv_i64 xth; 1422 TCGv_i64 xtl; 1423 TCGv_i64 xbh; 1424 TCGv_i64 xbl; 1425 TCGv_i64 t0; 1426 1427 if (unlikely(!ctx->vsx_enabled)) { 1428 gen_exception(ctx, POWERPC_EXCP_VSXU); 1429 return; 1430 } 1431 xth = tcg_temp_new_i64(); 1432 xtl = tcg_temp_new_i64(); 1433 xbh = tcg_temp_new_i64(); 1434 xbl = tcg_temp_new_i64(); 1435 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1436 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1437 t0 = tcg_temp_new_i64(); 1438 1439 tcg_gen_bswap64_i64(t0, xbl); 1440 tcg_gen_bswap64_i64(xtl, xbh); 1441 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1442 tcg_gen_mov_i64(xth, t0); 1443 set_cpu_vsr(xT(ctx->opcode), xth, true); 1444 1445 tcg_temp_free_i64(t0); 1446 tcg_temp_free_i64(xth); 1447 tcg_temp_free_i64(xtl); 1448 tcg_temp_free_i64(xbh); 1449 tcg_temp_free_i64(xbl); 1450} 1451 1452static void gen_xxbrw(DisasContext *ctx) 1453{ 1454 TCGv_i64 xth; 1455 TCGv_i64 xtl; 1456 TCGv_i64 xbh; 1457 TCGv_i64 xbl; 1458 1459 if (unlikely(!ctx->vsx_enabled)) { 1460 gen_exception(ctx, POWERPC_EXCP_VSXU); 1461 return; 1462 } 1463 xth = tcg_temp_new_i64(); 1464 xtl = tcg_temp_new_i64(); 1465 xbh = tcg_temp_new_i64(); 1466 xbl = tcg_temp_new_i64(); 1467 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1468 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1469 1470 gen_bswap32x4(xth, xtl, xbh, xbl); 1471 set_cpu_vsr(xT(ctx->opcode), xth, true); 1472 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1473 1474 tcg_temp_free_i64(xth); 1475 tcg_temp_free_i64(xtl); 1476 tcg_temp_free_i64(xbh); 1477 tcg_temp_free_i64(xbl); 1478} 1479 1480#define VSX_LOGICAL(name, vece, tcg_op) \ 1481static void glue(gen_, name)(DisasContext *ctx) \ 1482 { \ 1483 if (unlikely(!ctx->vsx_enabled)) { \ 1484 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1485 return; \ 1486 } \ 1487 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \ 1488 vsr_full_offset(xA(ctx->opcode)), \ 1489 vsr_full_offset(xB(ctx->opcode)), 16, 16); \ 1490 } 1491 1492VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and) 1493VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc) 1494VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or) 1495VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor) 1496VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor) 1497VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv) 1498VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand) 1499VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc) 1500 1501#define VSX_XXMRG(name, high) \ 1502static void glue(gen_, name)(DisasContext *ctx) \ 1503 { \ 1504 TCGv_i64 a0, a1, b0, b1, tmp; \ 1505 if (unlikely(!ctx->vsx_enabled)) { \ 1506 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1507 return; \ 1508 } \ 1509 a0 = tcg_temp_new_i64(); \ 1510 a1 = tcg_temp_new_i64(); \ 1511 b0 = tcg_temp_new_i64(); \ 1512 b1 = tcg_temp_new_i64(); \ 1513 tmp = tcg_temp_new_i64(); \ 1514 get_cpu_vsr(a0, xA(ctx->opcode), high); \ 1515 get_cpu_vsr(a1, xA(ctx->opcode), high); \ 1516 get_cpu_vsr(b0, xB(ctx->opcode), high); \ 1517 get_cpu_vsr(b1, xB(ctx->opcode), high); \ 1518 tcg_gen_shri_i64(a0, a0, 32); \ 1519 tcg_gen_shri_i64(b0, b0, 32); \ 1520 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \ 1521 set_cpu_vsr(xT(ctx->opcode), tmp, true); \ 1522 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \ 1523 set_cpu_vsr(xT(ctx->opcode), tmp, false); \ 1524 tcg_temp_free_i64(a0); \ 1525 tcg_temp_free_i64(a1); \ 1526 tcg_temp_free_i64(b0); \ 1527 tcg_temp_free_i64(b1); \ 1528 tcg_temp_free_i64(tmp); \ 1529 } 1530 1531VSX_XXMRG(xxmrghw, 1) 1532VSX_XXMRG(xxmrglw, 0) 1533 1534static bool trans_XXSEL(DisasContext *ctx, arg_XX4 *a) 1535{ 1536 REQUIRE_INSNS_FLAGS2(ctx, VSX); 1537 REQUIRE_VSX(ctx); 1538 1539 tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(a->xt), vsr_full_offset(a->xc), 1540 vsr_full_offset(a->xb), vsr_full_offset(a->xa), 16, 16); 1541 1542 return true; 1543} 1544 1545static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2_uim2 *a) 1546{ 1547 int tofs, bofs; 1548 1549 REQUIRE_VSX(ctx); 1550 1551 tofs = vsr_full_offset(a->xt); 1552 bofs = vsr_full_offset(a->xb); 1553 bofs += a->uim << MO_32; 1554#ifndef HOST_WORDS_BIG_ENDIAN 1555 bofs ^= 8 | 4; 1556#endif 1557 1558 tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16); 1559 return true; 1560} 1561 1562#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff)) 1563 1564static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a) 1565{ 1566 if (a->xt < 32) { 1567 REQUIRE_VSX(ctx); 1568 } else { 1569 REQUIRE_VECTOR(ctx); 1570 } 1571 tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm); 1572 return true; 1573} 1574 1575static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a) 1576{ 1577 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1578 REQUIRE_VSX(ctx); 1579 1580 tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si); 1581 1582 return true; 1583} 1584 1585static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a) 1586{ 1587 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1588 REQUIRE_VSX(ctx); 1589 1590 tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16, 1591 helper_todouble(a->si)); 1592 return true; 1593} 1594 1595static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a) 1596{ 1597 TCGv_i32 imm; 1598 1599 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1600 REQUIRE_VSX(ctx); 1601 1602 imm = tcg_constant_i32(a->si); 1603 1604 tcg_gen_st_i32(imm, cpu_env, 1605 offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix))); 1606 tcg_gen_st_i32(imm, cpu_env, 1607 offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix))); 1608 1609 return true; 1610} 1611 1612static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a) 1613{ 1614 static const uint64_t values[32] = { 1615 0, /* Unspecified */ 1616 0x3FFF000000000000llu, /* QP +1.0 */ 1617 0x4000000000000000llu, /* QP +2.0 */ 1618 0x4000800000000000llu, /* QP +3.0 */ 1619 0x4001000000000000llu, /* QP +4.0 */ 1620 0x4001400000000000llu, /* QP +5.0 */ 1621 0x4001800000000000llu, /* QP +6.0 */ 1622 0x4001C00000000000llu, /* QP +7.0 */ 1623 0x7FFF000000000000llu, /* QP +Inf */ 1624 0x7FFF800000000000llu, /* QP dQNaN */ 1625 0, /* Unspecified */ 1626 0, /* Unspecified */ 1627 0, /* Unspecified */ 1628 0, /* Unspecified */ 1629 0, /* Unspecified */ 1630 0, /* Unspecified */ 1631 0x8000000000000000llu, /* QP -0.0 */ 1632 0xBFFF000000000000llu, /* QP -1.0 */ 1633 0xC000000000000000llu, /* QP -2.0 */ 1634 0xC000800000000000llu, /* QP -3.0 */ 1635 0xC001000000000000llu, /* QP -4.0 */ 1636 0xC001400000000000llu, /* QP -5.0 */ 1637 0xC001800000000000llu, /* QP -6.0 */ 1638 0xC001C00000000000llu, /* QP -7.0 */ 1639 0xFFFF000000000000llu, /* QP -Inf */ 1640 }; 1641 1642 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1643 REQUIRE_VSX(ctx); 1644 1645 if (values[a->uim]) { 1646 set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false); 1647 set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true); 1648 } else { 1649 gen_invalid(ctx); 1650 } 1651 1652 return true; 1653} 1654 1655static bool trans_XVTLSBB(DisasContext *ctx, arg_XX2_bf_xb *a) 1656{ 1657 TCGv_i64 xb, t0, t1, all_true, all_false, mask, zero; 1658 1659 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1660 REQUIRE_VSX(ctx); 1661 1662 xb = tcg_temp_new_i64(); 1663 t0 = tcg_temp_new_i64(); 1664 t1 = tcg_temp_new_i64(); 1665 all_true = tcg_temp_new_i64(); 1666 all_false = tcg_temp_new_i64(); 1667 mask = tcg_constant_i64(dup_const(MO_8, 1)); 1668 zero = tcg_constant_i64(0); 1669 1670 get_cpu_vsr(xb, a->xb, true); 1671 tcg_gen_and_i64(t0, mask, xb); 1672 get_cpu_vsr(xb, a->xb, false); 1673 tcg_gen_and_i64(t1, mask, xb); 1674 1675 tcg_gen_or_i64(all_false, t0, t1); 1676 tcg_gen_and_i64(all_true, t0, t1); 1677 1678 tcg_gen_setcond_i64(TCG_COND_EQ, all_false, all_false, zero); 1679 tcg_gen_shli_i64(all_false, all_false, 1); 1680 tcg_gen_setcond_i64(TCG_COND_EQ, all_true, all_true, mask); 1681 tcg_gen_shli_i64(all_true, all_true, 3); 1682 1683 tcg_gen_or_i64(t0, all_false, all_true); 1684 tcg_gen_extrl_i64_i32(cpu_crf[a->bf], t0); 1685 1686 tcg_temp_free_i64(xb); 1687 tcg_temp_free_i64(t0); 1688 tcg_temp_free_i64(t1); 1689 tcg_temp_free_i64(all_true); 1690 tcg_temp_free_i64(all_false); 1691 1692 return true; 1693} 1694 1695static void gen_xxsldwi(DisasContext *ctx) 1696{ 1697 TCGv_i64 xth, xtl; 1698 if (unlikely(!ctx->vsx_enabled)) { 1699 gen_exception(ctx, POWERPC_EXCP_VSXU); 1700 return; 1701 } 1702 xth = tcg_temp_new_i64(); 1703 xtl = tcg_temp_new_i64(); 1704 1705 switch (SHW(ctx->opcode)) { 1706 case 0: { 1707 get_cpu_vsr(xth, xA(ctx->opcode), true); 1708 get_cpu_vsr(xtl, xA(ctx->opcode), false); 1709 break; 1710 } 1711 case 1: { 1712 TCGv_i64 t0 = tcg_temp_new_i64(); 1713 get_cpu_vsr(xth, xA(ctx->opcode), true); 1714 tcg_gen_shli_i64(xth, xth, 32); 1715 get_cpu_vsr(t0, xA(ctx->opcode), false); 1716 tcg_gen_shri_i64(t0, t0, 32); 1717 tcg_gen_or_i64(xth, xth, t0); 1718 get_cpu_vsr(xtl, xA(ctx->opcode), false); 1719 tcg_gen_shli_i64(xtl, xtl, 32); 1720 get_cpu_vsr(t0, xB(ctx->opcode), true); 1721 tcg_gen_shri_i64(t0, t0, 32); 1722 tcg_gen_or_i64(xtl, xtl, t0); 1723 tcg_temp_free_i64(t0); 1724 break; 1725 } 1726 case 2: { 1727 get_cpu_vsr(xth, xA(ctx->opcode), false); 1728 get_cpu_vsr(xtl, xB(ctx->opcode), true); 1729 break; 1730 } 1731 case 3: { 1732 TCGv_i64 t0 = tcg_temp_new_i64(); 1733 get_cpu_vsr(xth, xA(ctx->opcode), false); 1734 tcg_gen_shli_i64(xth, xth, 32); 1735 get_cpu_vsr(t0, xB(ctx->opcode), true); 1736 tcg_gen_shri_i64(t0, t0, 32); 1737 tcg_gen_or_i64(xth, xth, t0); 1738 get_cpu_vsr(xtl, xB(ctx->opcode), true); 1739 tcg_gen_shli_i64(xtl, xtl, 32); 1740 get_cpu_vsr(t0, xB(ctx->opcode), false); 1741 tcg_gen_shri_i64(t0, t0, 32); 1742 tcg_gen_or_i64(xtl, xtl, t0); 1743 tcg_temp_free_i64(t0); 1744 break; 1745 } 1746 } 1747 1748 set_cpu_vsr(xT(ctx->opcode), xth, true); 1749 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1750 1751 tcg_temp_free_i64(xth); 1752 tcg_temp_free_i64(xtl); 1753} 1754 1755#define VSX_EXTRACT_INSERT(name) \ 1756static void gen_##name(DisasContext *ctx) \ 1757{ \ 1758 TCGv_ptr xt, xb; \ 1759 TCGv_i32 t0; \ 1760 TCGv_i64 t1; \ 1761 uint8_t uimm = UIMM4(ctx->opcode); \ 1762 \ 1763 if (unlikely(!ctx->vsx_enabled)) { \ 1764 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1765 return; \ 1766 } \ 1767 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 1768 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 1769 t0 = tcg_temp_new_i32(); \ 1770 t1 = tcg_temp_new_i64(); \ 1771 /* \ 1772 * uimm > 15 out of bound and for \ 1773 * uimm > 12 handle as per hardware in helper \ 1774 */ \ 1775 if (uimm > 15) { \ 1776 tcg_gen_movi_i64(t1, 0); \ 1777 set_cpu_vsr(xT(ctx->opcode), t1, true); \ 1778 set_cpu_vsr(xT(ctx->opcode), t1, false); \ 1779 return; \ 1780 } \ 1781 tcg_gen_movi_i32(t0, uimm); \ 1782 gen_helper_##name(cpu_env, xt, xb, t0); \ 1783 tcg_temp_free_ptr(xb); \ 1784 tcg_temp_free_ptr(xt); \ 1785 tcg_temp_free_i32(t0); \ 1786 tcg_temp_free_i64(t1); \ 1787} 1788 1789VSX_EXTRACT_INSERT(xxextractuw) 1790VSX_EXTRACT_INSERT(xxinsertw) 1791 1792#ifdef TARGET_PPC64 1793static void gen_xsxexpdp(DisasContext *ctx) 1794{ 1795 TCGv rt = cpu_gpr[rD(ctx->opcode)]; 1796 TCGv_i64 t0; 1797 if (unlikely(!ctx->vsx_enabled)) { 1798 gen_exception(ctx, POWERPC_EXCP_VSXU); 1799 return; 1800 } 1801 t0 = tcg_temp_new_i64(); 1802 get_cpu_vsr(t0, xB(ctx->opcode), true); 1803 tcg_gen_extract_i64(rt, t0, 52, 11); 1804 tcg_temp_free_i64(t0); 1805} 1806 1807static void gen_xsxexpqp(DisasContext *ctx) 1808{ 1809 TCGv_i64 xth; 1810 TCGv_i64 xtl; 1811 TCGv_i64 xbh; 1812 1813 if (unlikely(!ctx->vsx_enabled)) { 1814 gen_exception(ctx, POWERPC_EXCP_VSXU); 1815 return; 1816 } 1817 xth = tcg_temp_new_i64(); 1818 xtl = tcg_temp_new_i64(); 1819 xbh = tcg_temp_new_i64(); 1820 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true); 1821 1822 tcg_gen_extract_i64(xth, xbh, 48, 15); 1823 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true); 1824 tcg_gen_movi_i64(xtl, 0); 1825 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false); 1826 1827 tcg_temp_free_i64(xbh); 1828 tcg_temp_free_i64(xth); 1829 tcg_temp_free_i64(xtl); 1830} 1831 1832static void gen_xsiexpdp(DisasContext *ctx) 1833{ 1834 TCGv_i64 xth; 1835 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1836 TCGv rb = cpu_gpr[rB(ctx->opcode)]; 1837 TCGv_i64 t0; 1838 1839 if (unlikely(!ctx->vsx_enabled)) { 1840 gen_exception(ctx, POWERPC_EXCP_VSXU); 1841 return; 1842 } 1843 t0 = tcg_temp_new_i64(); 1844 xth = tcg_temp_new_i64(); 1845 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF); 1846 tcg_gen_andi_i64(t0, rb, 0x7FF); 1847 tcg_gen_shli_i64(t0, t0, 52); 1848 tcg_gen_or_i64(xth, xth, t0); 1849 set_cpu_vsr(xT(ctx->opcode), xth, true); 1850 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); 1851 tcg_temp_free_i64(t0); 1852 tcg_temp_free_i64(xth); 1853} 1854 1855static void gen_xsiexpqp(DisasContext *ctx) 1856{ 1857 TCGv_i64 xth; 1858 TCGv_i64 xtl; 1859 TCGv_i64 xah; 1860 TCGv_i64 xal; 1861 TCGv_i64 xbh; 1862 TCGv_i64 t0; 1863 1864 if (unlikely(!ctx->vsx_enabled)) { 1865 gen_exception(ctx, POWERPC_EXCP_VSXU); 1866 return; 1867 } 1868 xth = tcg_temp_new_i64(); 1869 xtl = tcg_temp_new_i64(); 1870 xah = tcg_temp_new_i64(); 1871 xal = tcg_temp_new_i64(); 1872 get_cpu_vsr(xah, rA(ctx->opcode) + 32, true); 1873 get_cpu_vsr(xal, rA(ctx->opcode) + 32, false); 1874 xbh = tcg_temp_new_i64(); 1875 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true); 1876 t0 = tcg_temp_new_i64(); 1877 1878 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF); 1879 tcg_gen_andi_i64(t0, xbh, 0x7FFF); 1880 tcg_gen_shli_i64(t0, t0, 48); 1881 tcg_gen_or_i64(xth, xth, t0); 1882 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true); 1883 tcg_gen_mov_i64(xtl, xal); 1884 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false); 1885 1886 tcg_temp_free_i64(t0); 1887 tcg_temp_free_i64(xth); 1888 tcg_temp_free_i64(xtl); 1889 tcg_temp_free_i64(xah); 1890 tcg_temp_free_i64(xal); 1891 tcg_temp_free_i64(xbh); 1892} 1893 1894static void gen_xsxsigdp(DisasContext *ctx) 1895{ 1896 TCGv rt = cpu_gpr[rD(ctx->opcode)]; 1897 TCGv_i64 t0, t1, zr, nan, exp; 1898 1899 if (unlikely(!ctx->vsx_enabled)) { 1900 gen_exception(ctx, POWERPC_EXCP_VSXU); 1901 return; 1902 } 1903 exp = tcg_temp_new_i64(); 1904 t0 = tcg_temp_new_i64(); 1905 t1 = tcg_temp_new_i64(); 1906 zr = tcg_const_i64(0); 1907 nan = tcg_const_i64(2047); 1908 1909 get_cpu_vsr(t1, xB(ctx->opcode), true); 1910 tcg_gen_extract_i64(exp, t1, 52, 11); 1911 tcg_gen_movi_i64(t0, 0x0010000000000000); 1912 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 1913 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 1914 get_cpu_vsr(t1, xB(ctx->opcode), true); 1915 tcg_gen_deposit_i64(rt, t0, t1, 0, 52); 1916 1917 tcg_temp_free_i64(t0); 1918 tcg_temp_free_i64(t1); 1919 tcg_temp_free_i64(exp); 1920 tcg_temp_free_i64(zr); 1921 tcg_temp_free_i64(nan); 1922} 1923 1924static void gen_xsxsigqp(DisasContext *ctx) 1925{ 1926 TCGv_i64 t0, zr, nan, exp; 1927 TCGv_i64 xth; 1928 TCGv_i64 xtl; 1929 TCGv_i64 xbh; 1930 TCGv_i64 xbl; 1931 1932 if (unlikely(!ctx->vsx_enabled)) { 1933 gen_exception(ctx, POWERPC_EXCP_VSXU); 1934 return; 1935 } 1936 xth = tcg_temp_new_i64(); 1937 xtl = tcg_temp_new_i64(); 1938 xbh = tcg_temp_new_i64(); 1939 xbl = tcg_temp_new_i64(); 1940 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true); 1941 get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false); 1942 exp = tcg_temp_new_i64(); 1943 t0 = tcg_temp_new_i64(); 1944 zr = tcg_const_i64(0); 1945 nan = tcg_const_i64(32767); 1946 1947 tcg_gen_extract_i64(exp, xbh, 48, 15); 1948 tcg_gen_movi_i64(t0, 0x0001000000000000); 1949 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 1950 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 1951 tcg_gen_deposit_i64(xth, t0, xbh, 0, 48); 1952 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true); 1953 tcg_gen_mov_i64(xtl, xbl); 1954 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false); 1955 1956 tcg_temp_free_i64(t0); 1957 tcg_temp_free_i64(exp); 1958 tcg_temp_free_i64(zr); 1959 tcg_temp_free_i64(nan); 1960 tcg_temp_free_i64(xth); 1961 tcg_temp_free_i64(xtl); 1962 tcg_temp_free_i64(xbh); 1963 tcg_temp_free_i64(xbl); 1964} 1965#endif 1966 1967static void gen_xviexpsp(DisasContext *ctx) 1968{ 1969 TCGv_i64 xth; 1970 TCGv_i64 xtl; 1971 TCGv_i64 xah; 1972 TCGv_i64 xal; 1973 TCGv_i64 xbh; 1974 TCGv_i64 xbl; 1975 TCGv_i64 t0; 1976 1977 if (unlikely(!ctx->vsx_enabled)) { 1978 gen_exception(ctx, POWERPC_EXCP_VSXU); 1979 return; 1980 } 1981 xth = tcg_temp_new_i64(); 1982 xtl = tcg_temp_new_i64(); 1983 xah = tcg_temp_new_i64(); 1984 xal = tcg_temp_new_i64(); 1985 xbh = tcg_temp_new_i64(); 1986 xbl = tcg_temp_new_i64(); 1987 get_cpu_vsr(xah, xA(ctx->opcode), true); 1988 get_cpu_vsr(xal, xA(ctx->opcode), false); 1989 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1990 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1991 t0 = tcg_temp_new_i64(); 1992 1993 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF); 1994 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF); 1995 tcg_gen_shli_i64(t0, t0, 23); 1996 tcg_gen_or_i64(xth, xth, t0); 1997 set_cpu_vsr(xT(ctx->opcode), xth, true); 1998 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF); 1999 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF); 2000 tcg_gen_shli_i64(t0, t0, 23); 2001 tcg_gen_or_i64(xtl, xtl, t0); 2002 set_cpu_vsr(xT(ctx->opcode), xtl, false); 2003 2004 tcg_temp_free_i64(t0); 2005 tcg_temp_free_i64(xth); 2006 tcg_temp_free_i64(xtl); 2007 tcg_temp_free_i64(xah); 2008 tcg_temp_free_i64(xal); 2009 tcg_temp_free_i64(xbh); 2010 tcg_temp_free_i64(xbl); 2011} 2012 2013static void gen_xviexpdp(DisasContext *ctx) 2014{ 2015 TCGv_i64 xth; 2016 TCGv_i64 xtl; 2017 TCGv_i64 xah; 2018 TCGv_i64 xal; 2019 TCGv_i64 xbh; 2020 TCGv_i64 xbl; 2021 2022 if (unlikely(!ctx->vsx_enabled)) { 2023 gen_exception(ctx, POWERPC_EXCP_VSXU); 2024 return; 2025 } 2026 xth = tcg_temp_new_i64(); 2027 xtl = tcg_temp_new_i64(); 2028 xah = tcg_temp_new_i64(); 2029 xal = tcg_temp_new_i64(); 2030 xbh = tcg_temp_new_i64(); 2031 xbl = tcg_temp_new_i64(); 2032 get_cpu_vsr(xah, xA(ctx->opcode), true); 2033 get_cpu_vsr(xal, xA(ctx->opcode), false); 2034 get_cpu_vsr(xbh, xB(ctx->opcode), true); 2035 get_cpu_vsr(xbl, xB(ctx->opcode), false); 2036 2037 tcg_gen_deposit_i64(xth, xah, xbh, 52, 11); 2038 set_cpu_vsr(xT(ctx->opcode), xth, true); 2039 2040 tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11); 2041 set_cpu_vsr(xT(ctx->opcode), xtl, false); 2042 2043 tcg_temp_free_i64(xth); 2044 tcg_temp_free_i64(xtl); 2045 tcg_temp_free_i64(xah); 2046 tcg_temp_free_i64(xal); 2047 tcg_temp_free_i64(xbh); 2048 tcg_temp_free_i64(xbl); 2049} 2050 2051static void gen_xvxexpsp(DisasContext *ctx) 2052{ 2053 TCGv_i64 xth; 2054 TCGv_i64 xtl; 2055 TCGv_i64 xbh; 2056 TCGv_i64 xbl; 2057 2058 if (unlikely(!ctx->vsx_enabled)) { 2059 gen_exception(ctx, POWERPC_EXCP_VSXU); 2060 return; 2061 } 2062 xth = tcg_temp_new_i64(); 2063 xtl = tcg_temp_new_i64(); 2064 xbh = tcg_temp_new_i64(); 2065 xbl = tcg_temp_new_i64(); 2066 get_cpu_vsr(xbh, xB(ctx->opcode), true); 2067 get_cpu_vsr(xbl, xB(ctx->opcode), false); 2068 2069 tcg_gen_shri_i64(xth, xbh, 23); 2070 tcg_gen_andi_i64(xth, xth, 0xFF000000FF); 2071 set_cpu_vsr(xT(ctx->opcode), xth, true); 2072 tcg_gen_shri_i64(xtl, xbl, 23); 2073 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF); 2074 set_cpu_vsr(xT(ctx->opcode), xtl, false); 2075 2076 tcg_temp_free_i64(xth); 2077 tcg_temp_free_i64(xtl); 2078 tcg_temp_free_i64(xbh); 2079 tcg_temp_free_i64(xbl); 2080} 2081 2082static void gen_xvxexpdp(DisasContext *ctx) 2083{ 2084 TCGv_i64 xth; 2085 TCGv_i64 xtl; 2086 TCGv_i64 xbh; 2087 TCGv_i64 xbl; 2088 2089 if (unlikely(!ctx->vsx_enabled)) { 2090 gen_exception(ctx, POWERPC_EXCP_VSXU); 2091 return; 2092 } 2093 xth = tcg_temp_new_i64(); 2094 xtl = tcg_temp_new_i64(); 2095 xbh = tcg_temp_new_i64(); 2096 xbl = tcg_temp_new_i64(); 2097 get_cpu_vsr(xbh, xB(ctx->opcode), true); 2098 get_cpu_vsr(xbl, xB(ctx->opcode), false); 2099 2100 tcg_gen_extract_i64(xth, xbh, 52, 11); 2101 set_cpu_vsr(xT(ctx->opcode), xth, true); 2102 tcg_gen_extract_i64(xtl, xbl, 52, 11); 2103 set_cpu_vsr(xT(ctx->opcode), xtl, false); 2104 2105 tcg_temp_free_i64(xth); 2106 tcg_temp_free_i64(xtl); 2107 tcg_temp_free_i64(xbh); 2108 tcg_temp_free_i64(xbl); 2109} 2110 2111GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300) 2112 2113static void gen_xvxsigdp(DisasContext *ctx) 2114{ 2115 TCGv_i64 xth; 2116 TCGv_i64 xtl; 2117 TCGv_i64 xbh; 2118 TCGv_i64 xbl; 2119 TCGv_i64 t0, zr, nan, exp; 2120 2121 if (unlikely(!ctx->vsx_enabled)) { 2122 gen_exception(ctx, POWERPC_EXCP_VSXU); 2123 return; 2124 } 2125 xth = tcg_temp_new_i64(); 2126 xtl = tcg_temp_new_i64(); 2127 xbh = tcg_temp_new_i64(); 2128 xbl = tcg_temp_new_i64(); 2129 get_cpu_vsr(xbh, xB(ctx->opcode), true); 2130 get_cpu_vsr(xbl, xB(ctx->opcode), false); 2131 exp = tcg_temp_new_i64(); 2132 t0 = tcg_temp_new_i64(); 2133 zr = tcg_const_i64(0); 2134 nan = tcg_const_i64(2047); 2135 2136 tcg_gen_extract_i64(exp, xbh, 52, 11); 2137 tcg_gen_movi_i64(t0, 0x0010000000000000); 2138 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 2139 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 2140 tcg_gen_deposit_i64(xth, t0, xbh, 0, 52); 2141 set_cpu_vsr(xT(ctx->opcode), xth, true); 2142 2143 tcg_gen_extract_i64(exp, xbl, 52, 11); 2144 tcg_gen_movi_i64(t0, 0x0010000000000000); 2145 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 2146 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 2147 tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52); 2148 set_cpu_vsr(xT(ctx->opcode), xtl, false); 2149 2150 tcg_temp_free_i64(t0); 2151 tcg_temp_free_i64(exp); 2152 tcg_temp_free_i64(zr); 2153 tcg_temp_free_i64(nan); 2154 tcg_temp_free_i64(xth); 2155 tcg_temp_free_i64(xtl); 2156 tcg_temp_free_i64(xbh); 2157 tcg_temp_free_i64(xbl); 2158} 2159 2160static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ, 2161 int rt, bool store, bool paired) 2162{ 2163 TCGv ea; 2164 TCGv_i64 xt; 2165 MemOp mop; 2166 int rt1, rt2; 2167 2168 xt = tcg_temp_new_i64(); 2169 2170 mop = DEF_MEMOP(MO_UQ); 2171 2172 gen_set_access_type(ctx, ACCESS_INT); 2173 ea = do_ea_calc(ctx, ra, displ); 2174 2175 if (paired && ctx->le_mode) { 2176 rt1 = rt + 1; 2177 rt2 = rt; 2178 } else { 2179 rt1 = rt; 2180 rt2 = rt + 1; 2181 } 2182 2183 if (store) { 2184 get_cpu_vsr(xt, rt1, !ctx->le_mode); 2185 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2186 gen_addr_add(ctx, ea, ea, 8); 2187 get_cpu_vsr(xt, rt1, ctx->le_mode); 2188 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2189 if (paired) { 2190 gen_addr_add(ctx, ea, ea, 8); 2191 get_cpu_vsr(xt, rt2, !ctx->le_mode); 2192 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2193 gen_addr_add(ctx, ea, ea, 8); 2194 get_cpu_vsr(xt, rt2, ctx->le_mode); 2195 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2196 } 2197 } else { 2198 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2199 set_cpu_vsr(rt1, xt, !ctx->le_mode); 2200 gen_addr_add(ctx, ea, ea, 8); 2201 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2202 set_cpu_vsr(rt1, xt, ctx->le_mode); 2203 if (paired) { 2204 gen_addr_add(ctx, ea, ea, 8); 2205 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2206 set_cpu_vsr(rt2, xt, !ctx->le_mode); 2207 gen_addr_add(ctx, ea, ea, 8); 2208 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2209 set_cpu_vsr(rt2, xt, ctx->le_mode); 2210 } 2211 } 2212 2213 tcg_temp_free(ea); 2214 tcg_temp_free_i64(xt); 2215 return true; 2216} 2217 2218static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired) 2219{ 2220 if (paired || a->rt >= 32) { 2221 REQUIRE_VSX(ctx); 2222 } else { 2223 REQUIRE_VECTOR(ctx); 2224 } 2225 2226 return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired); 2227} 2228 2229static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a, 2230 bool store, bool paired) 2231{ 2232 arg_D d; 2233 REQUIRE_VSX(ctx); 2234 2235 if (!resolve_PLS_D(ctx, &d, a)) { 2236 return true; 2237 } 2238 2239 return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired); 2240} 2241 2242static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired) 2243{ 2244 if (paired || a->rt >= 32) { 2245 REQUIRE_VSX(ctx); 2246 } else { 2247 REQUIRE_VECTOR(ctx); 2248 } 2249 2250 return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired); 2251} 2252 2253static bool do_lstxsd(DisasContext *ctx, int rt, int ra, TCGv displ, bool store) 2254{ 2255 TCGv ea; 2256 TCGv_i64 xt; 2257 MemOp mop; 2258 2259 if (store) { 2260 REQUIRE_VECTOR(ctx); 2261 } else { 2262 REQUIRE_VSX(ctx); 2263 } 2264 2265 xt = tcg_temp_new_i64(); 2266 mop = DEF_MEMOP(MO_UQ); 2267 2268 gen_set_access_type(ctx, ACCESS_INT); 2269 ea = do_ea_calc(ctx, ra, displ); 2270 2271 if (store) { 2272 get_cpu_vsr(xt, rt + 32, true); 2273 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2274 } else { 2275 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2276 set_cpu_vsr(rt + 32, xt, true); 2277 set_cpu_vsr(rt + 32, tcg_constant_i64(0), false); 2278 } 2279 2280 tcg_temp_free(ea); 2281 tcg_temp_free_i64(xt); 2282 2283 return true; 2284} 2285 2286static bool do_lstxsd_DS(DisasContext *ctx, arg_D *a, bool store) 2287{ 2288 return do_lstxsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store); 2289} 2290 2291static bool do_plstxsd_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store) 2292{ 2293 arg_D d; 2294 2295 if (!resolve_PLS_D(ctx, &d, a)) { 2296 return true; 2297 } 2298 2299 return do_lstxsd(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store); 2300} 2301 2302static bool do_lstxssp(DisasContext *ctx, int rt, int ra, TCGv displ, bool store) 2303{ 2304 TCGv ea; 2305 TCGv_i64 xt; 2306 2307 REQUIRE_VECTOR(ctx); 2308 2309 xt = tcg_temp_new_i64(); 2310 2311 gen_set_access_type(ctx, ACCESS_INT); 2312 ea = do_ea_calc(ctx, ra, displ); 2313 2314 if (store) { 2315 get_cpu_vsr(xt, rt + 32, true); 2316 gen_qemu_st32fs(ctx, xt, ea); 2317 } else { 2318 gen_qemu_ld32fs(ctx, xt, ea); 2319 set_cpu_vsr(rt + 32, xt, true); 2320 set_cpu_vsr(rt + 32, tcg_constant_i64(0), false); 2321 } 2322 2323 tcg_temp_free(ea); 2324 tcg_temp_free_i64(xt); 2325 2326 return true; 2327} 2328 2329static bool do_lstxssp_DS(DisasContext *ctx, arg_D *a, bool store) 2330{ 2331 return do_lstxssp(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store); 2332} 2333 2334static bool do_plstxssp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store) 2335{ 2336 arg_D d; 2337 2338 if (!resolve_PLS_D(ctx, &d, a)) { 2339 return true; 2340 } 2341 2342 return do_lstxssp(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store); 2343} 2344 2345TRANS_FLAGS2(ISA300, LXSD, do_lstxsd_DS, false) 2346TRANS_FLAGS2(ISA300, STXSD, do_lstxsd_DS, true) 2347TRANS_FLAGS2(ISA300, LXSSP, do_lstxssp_DS, false) 2348TRANS_FLAGS2(ISA300, STXSSP, do_lstxssp_DS, true) 2349TRANS_FLAGS2(ISA300, STXV, do_lstxv_D, true, false) 2350TRANS_FLAGS2(ISA300, LXV, do_lstxv_D, false, false) 2351TRANS_FLAGS2(ISA310, STXVP, do_lstxv_D, true, true) 2352TRANS_FLAGS2(ISA310, LXVP, do_lstxv_D, false, true) 2353TRANS_FLAGS2(ISA300, STXVX, do_lstxv_X, true, false) 2354TRANS_FLAGS2(ISA300, LXVX, do_lstxv_X, false, false) 2355TRANS_FLAGS2(ISA310, STXVPX, do_lstxv_X, true, true) 2356TRANS_FLAGS2(ISA310, LXVPX, do_lstxv_X, false, true) 2357TRANS64_FLAGS2(ISA310, PLXSD, do_plstxsd_PLS_D, false) 2358TRANS64_FLAGS2(ISA310, PSTXSD, do_plstxsd_PLS_D, true) 2359TRANS64_FLAGS2(ISA310, PLXSSP, do_plstxssp_PLS_D, false) 2360TRANS64_FLAGS2(ISA310, PSTXSSP, do_plstxssp_PLS_D, true) 2361TRANS64_FLAGS2(ISA310, PSTXV, do_lstxv_PLS_D, true, false) 2362TRANS64_FLAGS2(ISA310, PLXV, do_lstxv_PLS_D, false, false) 2363TRANS64_FLAGS2(ISA310, PSTXVP, do_lstxv_PLS_D, true, true) 2364TRANS64_FLAGS2(ISA310, PLXVP, do_lstxv_PLS_D, false, true) 2365 2366static bool do_lstrm(DisasContext *ctx, arg_X *a, MemOp mop, bool store) 2367{ 2368 TCGv ea; 2369 TCGv_i64 xt; 2370 2371 REQUIRE_VSX(ctx); 2372 2373 xt = tcg_temp_new_i64(); 2374 2375 gen_set_access_type(ctx, ACCESS_INT); 2376 ea = do_ea_calc(ctx, a->ra , cpu_gpr[a->rb]); 2377 2378 if (store) { 2379 get_cpu_vsr(xt, a->rt, false); 2380 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2381 } else { 2382 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2383 set_cpu_vsr(a->rt, xt, false); 2384 set_cpu_vsr(a->rt, tcg_constant_i64(0), true); 2385 } 2386 2387 tcg_temp_free(ea); 2388 tcg_temp_free_i64(xt); 2389 return true; 2390} 2391 2392TRANS_FLAGS2(ISA310, LXVRBX, do_lstrm, DEF_MEMOP(MO_UB), false) 2393TRANS_FLAGS2(ISA310, LXVRHX, do_lstrm, DEF_MEMOP(MO_UW), false) 2394TRANS_FLAGS2(ISA310, LXVRWX, do_lstrm, DEF_MEMOP(MO_UL), false) 2395TRANS_FLAGS2(ISA310, LXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), false) 2396TRANS_FLAGS2(ISA310, STXVRBX, do_lstrm, DEF_MEMOP(MO_UB), true) 2397TRANS_FLAGS2(ISA310, STXVRHX, do_lstrm, DEF_MEMOP(MO_UW), true) 2398TRANS_FLAGS2(ISA310, STXVRWX, do_lstrm, DEF_MEMOP(MO_UL), true) 2399TRANS_FLAGS2(ISA310, STXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), true) 2400 2401static void gen_xxeval_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b, TCGv_i64 c, 2402 int64_t imm) 2403{ 2404 /* 2405 * Instead of processing imm bit-by-bit, we'll skip the computation of 2406 * conjunctions whose corresponding bit is unset. 2407 */ 2408 int bit; 2409 TCGv_i64 conj, disj; 2410 2411 conj = tcg_temp_new_i64(); 2412 disj = tcg_const_i64(0); 2413 2414 /* Iterate over set bits from the least to the most significant bit */ 2415 while (imm) { 2416 /* 2417 * Get the next bit to be processed with ctz64. Invert the result of 2418 * ctz64 to match the indexing used by PowerISA. 2419 */ 2420 bit = 7 - ctz64(imm); 2421 if (bit & 0x4) { 2422 tcg_gen_mov_i64(conj, a); 2423 } else { 2424 tcg_gen_not_i64(conj, a); 2425 } 2426 if (bit & 0x2) { 2427 tcg_gen_and_i64(conj, conj, b); 2428 } else { 2429 tcg_gen_andc_i64(conj, conj, b); 2430 } 2431 if (bit & 0x1) { 2432 tcg_gen_and_i64(conj, conj, c); 2433 } else { 2434 tcg_gen_andc_i64(conj, conj, c); 2435 } 2436 tcg_gen_or_i64(disj, disj, conj); 2437 2438 /* Unset the least significant bit that is set */ 2439 imm &= imm - 1; 2440 } 2441 2442 tcg_gen_mov_i64(t, disj); 2443 2444 tcg_temp_free_i64(conj); 2445 tcg_temp_free_i64(disj); 2446} 2447 2448static void gen_xxeval_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b, 2449 TCGv_vec c, int64_t imm) 2450{ 2451 /* 2452 * Instead of processing imm bit-by-bit, we'll skip the computation of 2453 * conjunctions whose corresponding bit is unset. 2454 */ 2455 int bit; 2456 TCGv_vec disj, conj; 2457 2458 disj = tcg_const_zeros_vec_matching(t); 2459 conj = tcg_temp_new_vec_matching(t); 2460 2461 /* Iterate over set bits from the least to the most significant bit */ 2462 while (imm) { 2463 /* 2464 * Get the next bit to be processed with ctz64. Invert the result of 2465 * ctz64 to match the indexing used by PowerISA. 2466 */ 2467 bit = 7 - ctz64(imm); 2468 if (bit & 0x4) { 2469 tcg_gen_mov_vec(conj, a); 2470 } else { 2471 tcg_gen_not_vec(vece, conj, a); 2472 } 2473 if (bit & 0x2) { 2474 tcg_gen_and_vec(vece, conj, conj, b); 2475 } else { 2476 tcg_gen_andc_vec(vece, conj, conj, b); 2477 } 2478 if (bit & 0x1) { 2479 tcg_gen_and_vec(vece, conj, conj, c); 2480 } else { 2481 tcg_gen_andc_vec(vece, conj, conj, c); 2482 } 2483 tcg_gen_or_vec(vece, disj, disj, conj); 2484 2485 /* Unset the least significant bit that is set */ 2486 imm &= imm - 1; 2487 } 2488 2489 tcg_gen_mov_vec(t, disj); 2490 2491 tcg_temp_free_vec(disj); 2492 tcg_temp_free_vec(conj); 2493} 2494 2495static bool trans_XXEVAL(DisasContext *ctx, arg_8RR_XX4_imm *a) 2496{ 2497 static const TCGOpcode vecop_list[] = { 2498 INDEX_op_andc_vec, 0 2499 }; 2500 static const GVecGen4i op = { 2501 .fniv = gen_xxeval_vec, 2502 .fno = gen_helper_XXEVAL, 2503 .fni8 = gen_xxeval_i64, 2504 .opt_opc = vecop_list, 2505 .vece = MO_64 2506 }; 2507 int xt = vsr_full_offset(a->xt), xa = vsr_full_offset(a->xa), 2508 xb = vsr_full_offset(a->xb), xc = vsr_full_offset(a->xc); 2509 2510 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 2511 REQUIRE_VSX(ctx); 2512 2513 /* Equivalent functions that can be implemented with a single gen_gvec */ 2514 switch (a->imm) { 2515 case 0b00000000: /* true */ 2516 set_cpu_vsr(a->xt, tcg_constant_i64(0), true); 2517 set_cpu_vsr(a->xt, tcg_constant_i64(0), false); 2518 break; 2519 case 0b00000011: /* and(B,A) */ 2520 tcg_gen_gvec_and(MO_64, xt, xb, xa, 16, 16); 2521 break; 2522 case 0b00000101: /* and(C,A) */ 2523 tcg_gen_gvec_and(MO_64, xt, xc, xa, 16, 16); 2524 break; 2525 case 0b00001111: /* A */ 2526 tcg_gen_gvec_mov(MO_64, xt, xa, 16, 16); 2527 break; 2528 case 0b00010001: /* and(C,B) */ 2529 tcg_gen_gvec_and(MO_64, xt, xc, xb, 16, 16); 2530 break; 2531 case 0b00011011: /* C?B:A */ 2532 tcg_gen_gvec_bitsel(MO_64, xt, xc, xb, xa, 16, 16); 2533 break; 2534 case 0b00011101: /* B?C:A */ 2535 tcg_gen_gvec_bitsel(MO_64, xt, xb, xc, xa, 16, 16); 2536 break; 2537 case 0b00100111: /* C?A:B */ 2538 tcg_gen_gvec_bitsel(MO_64, xt, xc, xa, xb, 16, 16); 2539 break; 2540 case 0b00110011: /* B */ 2541 tcg_gen_gvec_mov(MO_64, xt, xb, 16, 16); 2542 break; 2543 case 0b00110101: /* A?C:B */ 2544 tcg_gen_gvec_bitsel(MO_64, xt, xa, xc, xb, 16, 16); 2545 break; 2546 case 0b00111100: /* xor(B,A) */ 2547 tcg_gen_gvec_xor(MO_64, xt, xb, xa, 16, 16); 2548 break; 2549 case 0b00111111: /* or(B,A) */ 2550 tcg_gen_gvec_or(MO_64, xt, xb, xa, 16, 16); 2551 break; 2552 case 0b01000111: /* B?A:C */ 2553 tcg_gen_gvec_bitsel(MO_64, xt, xb, xa, xc, 16, 16); 2554 break; 2555 case 0b01010011: /* A?B:C */ 2556 tcg_gen_gvec_bitsel(MO_64, xt, xa, xb, xc, 16, 16); 2557 break; 2558 case 0b01010101: /* C */ 2559 tcg_gen_gvec_mov(MO_64, xt, xc, 16, 16); 2560 break; 2561 case 0b01011010: /* xor(C,A) */ 2562 tcg_gen_gvec_xor(MO_64, xt, xc, xa, 16, 16); 2563 break; 2564 case 0b01011111: /* or(C,A) */ 2565 tcg_gen_gvec_or(MO_64, xt, xc, xa, 16, 16); 2566 break; 2567 case 0b01100110: /* xor(C,B) */ 2568 tcg_gen_gvec_xor(MO_64, xt, xc, xb, 16, 16); 2569 break; 2570 case 0b01110111: /* or(C,B) */ 2571 tcg_gen_gvec_or(MO_64, xt, xc, xb, 16, 16); 2572 break; 2573 case 0b10001000: /* nor(C,B) */ 2574 tcg_gen_gvec_nor(MO_64, xt, xc, xb, 16, 16); 2575 break; 2576 case 0b10011001: /* eqv(C,B) */ 2577 tcg_gen_gvec_eqv(MO_64, xt, xc, xb, 16, 16); 2578 break; 2579 case 0b10100000: /* nor(C,A) */ 2580 tcg_gen_gvec_nor(MO_64, xt, xc, xa, 16, 16); 2581 break; 2582 case 0b10100101: /* eqv(C,A) */ 2583 tcg_gen_gvec_eqv(MO_64, xt, xc, xa, 16, 16); 2584 break; 2585 case 0b10101010: /* not(C) */ 2586 tcg_gen_gvec_not(MO_64, xt, xc, 16, 16); 2587 break; 2588 case 0b11000000: /* nor(B,A) */ 2589 tcg_gen_gvec_nor(MO_64, xt, xb, xa, 16, 16); 2590 break; 2591 case 0b11000011: /* eqv(B,A) */ 2592 tcg_gen_gvec_eqv(MO_64, xt, xb, xa, 16, 16); 2593 break; 2594 case 0b11001100: /* not(B) */ 2595 tcg_gen_gvec_not(MO_64, xt, xb, 16, 16); 2596 break; 2597 case 0b11101110: /* nand(C,B) */ 2598 tcg_gen_gvec_nand(MO_64, xt, xc, xb, 16, 16); 2599 break; 2600 case 0b11110000: /* not(A) */ 2601 tcg_gen_gvec_not(MO_64, xt, xa, 16, 16); 2602 break; 2603 case 0b11111010: /* nand(C,A) */ 2604 tcg_gen_gvec_nand(MO_64, xt, xc, xa, 16, 16); 2605 break; 2606 case 0b11111100: /* nand(B,A) */ 2607 tcg_gen_gvec_nand(MO_64, xt, xb, xa, 16, 16); 2608 break; 2609 case 0b11111111: /* true */ 2610 set_cpu_vsr(a->xt, tcg_constant_i64(-1), true); 2611 set_cpu_vsr(a->xt, tcg_constant_i64(-1), false); 2612 break; 2613 default: 2614 /* Fallback to compute all conjunctions/disjunctions */ 2615 tcg_gen_gvec_4i(xt, xa, xb, xc, 16, 16, a->imm, &op); 2616 } 2617 2618 return true; 2619} 2620 2621static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b, 2622 TCGv_vec c) 2623{ 2624 TCGv_vec tmp = tcg_temp_new_vec_matching(c); 2625 tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1); 2626 tcg_gen_bitsel_vec(vece, t, tmp, b, a); 2627 tcg_temp_free_vec(tmp); 2628} 2629 2630static bool do_xxblendv(DisasContext *ctx, arg_8RR_XX4 *a, unsigned vece) 2631{ 2632 static const TCGOpcode vecop_list[] = { 2633 INDEX_op_sari_vec, 0 2634 }; 2635 static const GVecGen4 ops[4] = { 2636 { 2637 .fniv = gen_xxblendv_vec, 2638 .fno = gen_helper_XXBLENDVB, 2639 .opt_opc = vecop_list, 2640 .vece = MO_8 2641 }, 2642 { 2643 .fniv = gen_xxblendv_vec, 2644 .fno = gen_helper_XXBLENDVH, 2645 .opt_opc = vecop_list, 2646 .vece = MO_16 2647 }, 2648 { 2649 .fniv = gen_xxblendv_vec, 2650 .fno = gen_helper_XXBLENDVW, 2651 .opt_opc = vecop_list, 2652 .vece = MO_32 2653 }, 2654 { 2655 .fniv = gen_xxblendv_vec, 2656 .fno = gen_helper_XXBLENDVD, 2657 .opt_opc = vecop_list, 2658 .vece = MO_64 2659 } 2660 }; 2661 2662 REQUIRE_VSX(ctx); 2663 2664 tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa), 2665 vsr_full_offset(a->xb), vsr_full_offset(a->xc), 2666 16, 16, &ops[vece]); 2667 2668 return true; 2669} 2670 2671TRANS(XXBLENDVB, do_xxblendv, MO_8) 2672TRANS(XXBLENDVH, do_xxblendv, MO_16) 2673TRANS(XXBLENDVW, do_xxblendv, MO_32) 2674TRANS(XXBLENDVD, do_xxblendv, MO_64) 2675 2676static bool do_helper_XX3(DisasContext *ctx, arg_XX3 *a, 2677 void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 2678{ 2679 TCGv_ptr xt, xa, xb; 2680 2681 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 2682 REQUIRE_VSX(ctx); 2683 2684 xt = gen_vsr_ptr(a->xt); 2685 xa = gen_vsr_ptr(a->xa); 2686 xb = gen_vsr_ptr(a->xb); 2687 2688 helper(cpu_env, xt, xa, xb); 2689 2690 tcg_temp_free_ptr(xt); 2691 tcg_temp_free_ptr(xa); 2692 tcg_temp_free_ptr(xb); 2693 2694 return true; 2695} 2696 2697TRANS(XSCMPEQDP, do_helper_XX3, gen_helper_XSCMPEQDP) 2698TRANS(XSCMPGEDP, do_helper_XX3, gen_helper_XSCMPGEDP) 2699TRANS(XSCMPGTDP, do_helper_XX3, gen_helper_XSCMPGTDP) 2700TRANS(XSMAXCDP, do_helper_XX3, gen_helper_XSMAXCDP) 2701TRANS(XSMINCDP, do_helper_XX3, gen_helper_XSMINCDP) 2702TRANS(XSMAXJDP, do_helper_XX3, gen_helper_XSMAXJDP) 2703TRANS(XSMINJDP, do_helper_XX3, gen_helper_XSMINJDP) 2704 2705static bool do_helper_X(arg_X *a, 2706 void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 2707{ 2708 TCGv_ptr rt, ra, rb; 2709 2710 rt = gen_avr_ptr(a->rt); 2711 ra = gen_avr_ptr(a->ra); 2712 rb = gen_avr_ptr(a->rb); 2713 2714 helper(cpu_env, rt, ra, rb); 2715 2716 tcg_temp_free_ptr(rt); 2717 tcg_temp_free_ptr(ra); 2718 tcg_temp_free_ptr(rb); 2719 2720 return true; 2721} 2722 2723static bool do_xscmpqp(DisasContext *ctx, arg_X *a, 2724 void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 2725{ 2726 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 2727 REQUIRE_VSX(ctx); 2728 2729 return do_helper_X(a, helper); 2730} 2731 2732TRANS(XSCMPEQQP, do_xscmpqp, gen_helper_XSCMPEQQP) 2733TRANS(XSCMPGEQP, do_xscmpqp, gen_helper_XSCMPGEQP) 2734TRANS(XSCMPGTQP, do_xscmpqp, gen_helper_XSCMPGTQP) 2735TRANS(XSMAXCQP, do_xscmpqp, gen_helper_XSMAXCQP) 2736TRANS(XSMINCQP, do_xscmpqp, gen_helper_XSMINCQP) 2737 2738static bool trans_XVCVSPBF16(DisasContext *ctx, arg_XX2 *a) 2739{ 2740 TCGv_ptr xt, xb; 2741 2742 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 2743 REQUIRE_VSX(ctx); 2744 2745 xt = gen_vsr_ptr(a->xt); 2746 xb = gen_vsr_ptr(a->xb); 2747 2748 gen_helper_XVCVSPBF16(cpu_env, xt, xb); 2749 2750 tcg_temp_free_ptr(xt); 2751 tcg_temp_free_ptr(xb); 2752 2753 return true; 2754} 2755 2756static bool trans_XVCVBF16SPN(DisasContext *ctx, arg_XX2 *a) 2757{ 2758 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 2759 REQUIRE_VSX(ctx); 2760 2761 tcg_gen_gvec_shli(MO_32, vsr_full_offset(a->xt), vsr_full_offset(a->xb), 2762 16, 16, 16); 2763 2764 return true; 2765} 2766 2767#undef GEN_XX2FORM 2768#undef GEN_XX3FORM 2769#undef GEN_XX2IFORM 2770#undef GEN_XX3_RC_FORM 2771#undef GEN_XX3FORM_DM 2772#undef VSX_LOGICAL 2773