1/*** VSX extension ***/ 2 3static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high) 4{ 5 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high)); 6} 7 8static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high) 9{ 10 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high)); 11} 12 13static inline TCGv_ptr gen_vsr_ptr(int reg) 14{ 15 TCGv_ptr r = tcg_temp_new_ptr(); 16 tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg)); 17 return r; 18} 19 20#define VSX_LOAD_SCALAR(name, operation) \ 21static void gen_##name(DisasContext *ctx) \ 22{ \ 23 TCGv EA; \ 24 TCGv_i64 t0; \ 25 if (unlikely(!ctx->vsx_enabled)) { \ 26 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 27 return; \ 28 } \ 29 t0 = tcg_temp_new_i64(); \ 30 gen_set_access_type(ctx, ACCESS_INT); \ 31 EA = tcg_temp_new(); \ 32 gen_addr_reg_index(ctx, EA); \ 33 gen_qemu_##operation(ctx, t0, EA); \ 34 set_cpu_vsr(xT(ctx->opcode), t0, true); \ 35 /* NOTE: cpu_vsrl is undefined */ \ 36 tcg_temp_free(EA); \ 37 tcg_temp_free_i64(t0); \ 38} 39 40VSX_LOAD_SCALAR(lxsdx, ld64_i64) 41VSX_LOAD_SCALAR(lxsiwax, ld32s_i64) 42VSX_LOAD_SCALAR(lxsibzx, ld8u_i64) 43VSX_LOAD_SCALAR(lxsihzx, ld16u_i64) 44VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64) 45VSX_LOAD_SCALAR(lxsspx, ld32fs) 46 47static void gen_lxvd2x(DisasContext *ctx) 48{ 49 TCGv EA; 50 TCGv_i64 t0; 51 if (unlikely(!ctx->vsx_enabled)) { 52 gen_exception(ctx, POWERPC_EXCP_VSXU); 53 return; 54 } 55 t0 = tcg_temp_new_i64(); 56 gen_set_access_type(ctx, ACCESS_INT); 57 EA = tcg_temp_new(); 58 gen_addr_reg_index(ctx, EA); 59 gen_qemu_ld64_i64(ctx, t0, EA); 60 set_cpu_vsr(xT(ctx->opcode), t0, true); 61 tcg_gen_addi_tl(EA, EA, 8); 62 gen_qemu_ld64_i64(ctx, t0, EA); 63 set_cpu_vsr(xT(ctx->opcode), t0, false); 64 tcg_temp_free(EA); 65 tcg_temp_free_i64(t0); 66} 67 68static void gen_lxvw4x(DisasContext *ctx) 69{ 70 TCGv EA; 71 TCGv_i64 xth; 72 TCGv_i64 xtl; 73 if (unlikely(!ctx->vsx_enabled)) { 74 gen_exception(ctx, POWERPC_EXCP_VSXU); 75 return; 76 } 77 xth = tcg_temp_new_i64(); 78 xtl = tcg_temp_new_i64(); 79 80 gen_set_access_type(ctx, ACCESS_INT); 81 EA = tcg_temp_new(); 82 83 gen_addr_reg_index(ctx, EA); 84 if (ctx->le_mode) { 85 TCGv_i64 t0 = tcg_temp_new_i64(); 86 TCGv_i64 t1 = tcg_temp_new_i64(); 87 88 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ); 89 tcg_gen_shri_i64(t1, t0, 32); 90 tcg_gen_deposit_i64(xth, t1, t0, 32, 32); 91 tcg_gen_addi_tl(EA, EA, 8); 92 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ); 93 tcg_gen_shri_i64(t1, t0, 32); 94 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32); 95 tcg_temp_free_i64(t0); 96 tcg_temp_free_i64(t1); 97 } else { 98 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ); 99 tcg_gen_addi_tl(EA, EA, 8); 100 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ); 101 } 102 set_cpu_vsr(xT(ctx->opcode), xth, true); 103 set_cpu_vsr(xT(ctx->opcode), xtl, false); 104 tcg_temp_free(EA); 105 tcg_temp_free_i64(xth); 106 tcg_temp_free_i64(xtl); 107} 108 109static void gen_lxvwsx(DisasContext *ctx) 110{ 111 TCGv EA; 112 TCGv_i32 data; 113 114 if (xT(ctx->opcode) < 32) { 115 if (unlikely(!ctx->vsx_enabled)) { 116 gen_exception(ctx, POWERPC_EXCP_VSXU); 117 return; 118 } 119 } else { 120 if (unlikely(!ctx->altivec_enabled)) { 121 gen_exception(ctx, POWERPC_EXCP_VPU); 122 return; 123 } 124 } 125 126 gen_set_access_type(ctx, ACCESS_INT); 127 EA = tcg_temp_new(); 128 129 gen_addr_reg_index(ctx, EA); 130 131 data = tcg_temp_new_i32(); 132 tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL)); 133 tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data); 134 135 tcg_temp_free(EA); 136 tcg_temp_free_i32(data); 137} 138 139static void gen_lxvdsx(DisasContext *ctx) 140{ 141 TCGv EA; 142 TCGv_i64 data; 143 144 if (unlikely(!ctx->vsx_enabled)) { 145 gen_exception(ctx, POWERPC_EXCP_VSXU); 146 return; 147 } 148 149 gen_set_access_type(ctx, ACCESS_INT); 150 EA = tcg_temp_new(); 151 152 gen_addr_reg_index(ctx, EA); 153 154 data = tcg_temp_new_i64(); 155 tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ)); 156 tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data); 157 158 tcg_temp_free(EA); 159 tcg_temp_free_i64(data); 160} 161 162static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl, 163 TCGv_i64 inh, TCGv_i64 inl) 164{ 165 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF); 166 TCGv_i64 t0 = tcg_temp_new_i64(); 167 TCGv_i64 t1 = tcg_temp_new_i64(); 168 169 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */ 170 tcg_gen_and_i64(t0, inh, mask); 171 tcg_gen_shli_i64(t0, t0, 8); 172 tcg_gen_shri_i64(t1, inh, 8); 173 tcg_gen_and_i64(t1, t1, mask); 174 tcg_gen_or_i64(outh, t0, t1); 175 176 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */ 177 tcg_gen_and_i64(t0, inl, mask); 178 tcg_gen_shli_i64(t0, t0, 8); 179 tcg_gen_shri_i64(t1, inl, 8); 180 tcg_gen_and_i64(t1, t1, mask); 181 tcg_gen_or_i64(outl, t0, t1); 182 183 tcg_temp_free_i64(t0); 184 tcg_temp_free_i64(t1); 185 tcg_temp_free_i64(mask); 186} 187 188static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl, 189 TCGv_i64 inh, TCGv_i64 inl) 190{ 191 TCGv_i64 hi = tcg_temp_new_i64(); 192 TCGv_i64 lo = tcg_temp_new_i64(); 193 194 tcg_gen_bswap64_i64(hi, inh); 195 tcg_gen_bswap64_i64(lo, inl); 196 tcg_gen_shri_i64(outh, hi, 32); 197 tcg_gen_deposit_i64(outh, outh, hi, 32, 32); 198 tcg_gen_shri_i64(outl, lo, 32); 199 tcg_gen_deposit_i64(outl, outl, lo, 32, 32); 200 201 tcg_temp_free_i64(hi); 202 tcg_temp_free_i64(lo); 203} 204static void gen_lxvh8x(DisasContext *ctx) 205{ 206 TCGv EA; 207 TCGv_i64 xth; 208 TCGv_i64 xtl; 209 210 if (unlikely(!ctx->vsx_enabled)) { 211 gen_exception(ctx, POWERPC_EXCP_VSXU); 212 return; 213 } 214 xth = tcg_temp_new_i64(); 215 xtl = tcg_temp_new_i64(); 216 gen_set_access_type(ctx, ACCESS_INT); 217 218 EA = tcg_temp_new(); 219 gen_addr_reg_index(ctx, EA); 220 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ); 221 tcg_gen_addi_tl(EA, EA, 8); 222 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ); 223 if (ctx->le_mode) { 224 gen_bswap16x8(xth, xtl, xth, xtl); 225 } 226 set_cpu_vsr(xT(ctx->opcode), xth, true); 227 set_cpu_vsr(xT(ctx->opcode), xtl, false); 228 tcg_temp_free(EA); 229 tcg_temp_free_i64(xth); 230 tcg_temp_free_i64(xtl); 231} 232 233static void gen_lxvb16x(DisasContext *ctx) 234{ 235 TCGv EA; 236 TCGv_i64 xth; 237 TCGv_i64 xtl; 238 239 if (unlikely(!ctx->vsx_enabled)) { 240 gen_exception(ctx, POWERPC_EXCP_VSXU); 241 return; 242 } 243 xth = tcg_temp_new_i64(); 244 xtl = tcg_temp_new_i64(); 245 gen_set_access_type(ctx, ACCESS_INT); 246 EA = tcg_temp_new(); 247 gen_addr_reg_index(ctx, EA); 248 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ); 249 tcg_gen_addi_tl(EA, EA, 8); 250 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ); 251 set_cpu_vsr(xT(ctx->opcode), xth, true); 252 set_cpu_vsr(xT(ctx->opcode), xtl, false); 253 tcg_temp_free(EA); 254 tcg_temp_free_i64(xth); 255 tcg_temp_free_i64(xtl); 256} 257 258#ifdef TARGET_PPC64 259#define VSX_VECTOR_LOAD_STORE_LENGTH(name) \ 260static void gen_##name(DisasContext *ctx) \ 261{ \ 262 TCGv EA; \ 263 TCGv_ptr xt; \ 264 \ 265 if (xT(ctx->opcode) < 32) { \ 266 if (unlikely(!ctx->vsx_enabled)) { \ 267 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 268 return; \ 269 } \ 270 } else { \ 271 if (unlikely(!ctx->altivec_enabled)) { \ 272 gen_exception(ctx, POWERPC_EXCP_VPU); \ 273 return; \ 274 } \ 275 } \ 276 EA = tcg_temp_new(); \ 277 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 278 gen_set_access_type(ctx, ACCESS_INT); \ 279 gen_addr_register(ctx, EA); \ 280 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \ 281 tcg_temp_free(EA); \ 282 tcg_temp_free_ptr(xt); \ 283} 284 285VSX_VECTOR_LOAD_STORE_LENGTH(lxvl) 286VSX_VECTOR_LOAD_STORE_LENGTH(lxvll) 287VSX_VECTOR_LOAD_STORE_LENGTH(stxvl) 288VSX_VECTOR_LOAD_STORE_LENGTH(stxvll) 289#endif 290 291#define VSX_STORE_SCALAR(name, operation) \ 292static void gen_##name(DisasContext *ctx) \ 293{ \ 294 TCGv EA; \ 295 TCGv_i64 t0; \ 296 if (unlikely(!ctx->vsx_enabled)) { \ 297 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 298 return; \ 299 } \ 300 t0 = tcg_temp_new_i64(); \ 301 gen_set_access_type(ctx, ACCESS_INT); \ 302 EA = tcg_temp_new(); \ 303 gen_addr_reg_index(ctx, EA); \ 304 get_cpu_vsr(t0, xS(ctx->opcode), true); \ 305 gen_qemu_##operation(ctx, t0, EA); \ 306 tcg_temp_free(EA); \ 307 tcg_temp_free_i64(t0); \ 308} 309 310VSX_STORE_SCALAR(stxsdx, st64_i64) 311 312VSX_STORE_SCALAR(stxsibx, st8_i64) 313VSX_STORE_SCALAR(stxsihx, st16_i64) 314VSX_STORE_SCALAR(stxsiwx, st32_i64) 315VSX_STORE_SCALAR(stxsspx, st32fs) 316 317static void gen_stxvd2x(DisasContext *ctx) 318{ 319 TCGv EA; 320 TCGv_i64 t0; 321 if (unlikely(!ctx->vsx_enabled)) { 322 gen_exception(ctx, POWERPC_EXCP_VSXU); 323 return; 324 } 325 t0 = tcg_temp_new_i64(); 326 gen_set_access_type(ctx, ACCESS_INT); 327 EA = tcg_temp_new(); 328 gen_addr_reg_index(ctx, EA); 329 get_cpu_vsr(t0, xS(ctx->opcode), true); 330 gen_qemu_st64_i64(ctx, t0, EA); 331 tcg_gen_addi_tl(EA, EA, 8); 332 get_cpu_vsr(t0, xS(ctx->opcode), false); 333 gen_qemu_st64_i64(ctx, t0, EA); 334 tcg_temp_free(EA); 335 tcg_temp_free_i64(t0); 336} 337 338static void gen_stxvw4x(DisasContext *ctx) 339{ 340 TCGv EA; 341 TCGv_i64 xsh; 342 TCGv_i64 xsl; 343 344 if (unlikely(!ctx->vsx_enabled)) { 345 gen_exception(ctx, POWERPC_EXCP_VSXU); 346 return; 347 } 348 xsh = tcg_temp_new_i64(); 349 xsl = tcg_temp_new_i64(); 350 get_cpu_vsr(xsh, xS(ctx->opcode), true); 351 get_cpu_vsr(xsl, xS(ctx->opcode), false); 352 gen_set_access_type(ctx, ACCESS_INT); 353 EA = tcg_temp_new(); 354 gen_addr_reg_index(ctx, EA); 355 if (ctx->le_mode) { 356 TCGv_i64 t0 = tcg_temp_new_i64(); 357 TCGv_i64 t1 = tcg_temp_new_i64(); 358 359 tcg_gen_shri_i64(t0, xsh, 32); 360 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32); 361 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ); 362 tcg_gen_addi_tl(EA, EA, 8); 363 tcg_gen_shri_i64(t0, xsl, 32); 364 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32); 365 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ); 366 tcg_temp_free_i64(t0); 367 tcg_temp_free_i64(t1); 368 } else { 369 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ); 370 tcg_gen_addi_tl(EA, EA, 8); 371 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ); 372 } 373 tcg_temp_free(EA); 374 tcg_temp_free_i64(xsh); 375 tcg_temp_free_i64(xsl); 376} 377 378static void gen_stxvh8x(DisasContext *ctx) 379{ 380 TCGv EA; 381 TCGv_i64 xsh; 382 TCGv_i64 xsl; 383 384 if (unlikely(!ctx->vsx_enabled)) { 385 gen_exception(ctx, POWERPC_EXCP_VSXU); 386 return; 387 } 388 xsh = tcg_temp_new_i64(); 389 xsl = tcg_temp_new_i64(); 390 get_cpu_vsr(xsh, xS(ctx->opcode), true); 391 get_cpu_vsr(xsl, xS(ctx->opcode), false); 392 gen_set_access_type(ctx, ACCESS_INT); 393 EA = tcg_temp_new(); 394 gen_addr_reg_index(ctx, EA); 395 if (ctx->le_mode) { 396 TCGv_i64 outh = tcg_temp_new_i64(); 397 TCGv_i64 outl = tcg_temp_new_i64(); 398 399 gen_bswap16x8(outh, outl, xsh, xsl); 400 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ); 401 tcg_gen_addi_tl(EA, EA, 8); 402 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ); 403 tcg_temp_free_i64(outh); 404 tcg_temp_free_i64(outl); 405 } else { 406 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ); 407 tcg_gen_addi_tl(EA, EA, 8); 408 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ); 409 } 410 tcg_temp_free(EA); 411 tcg_temp_free_i64(xsh); 412 tcg_temp_free_i64(xsl); 413} 414 415static void gen_stxvb16x(DisasContext *ctx) 416{ 417 TCGv EA; 418 TCGv_i64 xsh; 419 TCGv_i64 xsl; 420 421 if (unlikely(!ctx->vsx_enabled)) { 422 gen_exception(ctx, POWERPC_EXCP_VSXU); 423 return; 424 } 425 xsh = tcg_temp_new_i64(); 426 xsl = tcg_temp_new_i64(); 427 get_cpu_vsr(xsh, xS(ctx->opcode), true); 428 get_cpu_vsr(xsl, xS(ctx->opcode), false); 429 gen_set_access_type(ctx, ACCESS_INT); 430 EA = tcg_temp_new(); 431 gen_addr_reg_index(ctx, EA); 432 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ); 433 tcg_gen_addi_tl(EA, EA, 8); 434 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ); 435 tcg_temp_free(EA); 436 tcg_temp_free_i64(xsh); 437 tcg_temp_free_i64(xsl); 438} 439 440static void gen_mfvsrwz(DisasContext *ctx) 441{ 442 if (xS(ctx->opcode) < 32) { 443 if (unlikely(!ctx->fpu_enabled)) { 444 gen_exception(ctx, POWERPC_EXCP_FPU); 445 return; 446 } 447 } else { 448 if (unlikely(!ctx->altivec_enabled)) { 449 gen_exception(ctx, POWERPC_EXCP_VPU); 450 return; 451 } 452 } 453 TCGv_i64 tmp = tcg_temp_new_i64(); 454 TCGv_i64 xsh = tcg_temp_new_i64(); 455 get_cpu_vsr(xsh, xS(ctx->opcode), true); 456 tcg_gen_ext32u_i64(tmp, xsh); 457 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp); 458 tcg_temp_free_i64(tmp); 459 tcg_temp_free_i64(xsh); 460} 461 462static void gen_mtvsrwa(DisasContext *ctx) 463{ 464 if (xS(ctx->opcode) < 32) { 465 if (unlikely(!ctx->fpu_enabled)) { 466 gen_exception(ctx, POWERPC_EXCP_FPU); 467 return; 468 } 469 } else { 470 if (unlikely(!ctx->altivec_enabled)) { 471 gen_exception(ctx, POWERPC_EXCP_VPU); 472 return; 473 } 474 } 475 TCGv_i64 tmp = tcg_temp_new_i64(); 476 TCGv_i64 xsh = tcg_temp_new_i64(); 477 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]); 478 tcg_gen_ext32s_i64(xsh, tmp); 479 set_cpu_vsr(xT(ctx->opcode), xsh, true); 480 tcg_temp_free_i64(tmp); 481 tcg_temp_free_i64(xsh); 482} 483 484static void gen_mtvsrwz(DisasContext *ctx) 485{ 486 if (xS(ctx->opcode) < 32) { 487 if (unlikely(!ctx->fpu_enabled)) { 488 gen_exception(ctx, POWERPC_EXCP_FPU); 489 return; 490 } 491 } else { 492 if (unlikely(!ctx->altivec_enabled)) { 493 gen_exception(ctx, POWERPC_EXCP_VPU); 494 return; 495 } 496 } 497 TCGv_i64 tmp = tcg_temp_new_i64(); 498 TCGv_i64 xsh = tcg_temp_new_i64(); 499 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]); 500 tcg_gen_ext32u_i64(xsh, tmp); 501 set_cpu_vsr(xT(ctx->opcode), xsh, true); 502 tcg_temp_free_i64(tmp); 503 tcg_temp_free_i64(xsh); 504} 505 506#if defined(TARGET_PPC64) 507static void gen_mfvsrd(DisasContext *ctx) 508{ 509 TCGv_i64 t0; 510 if (xS(ctx->opcode) < 32) { 511 if (unlikely(!ctx->fpu_enabled)) { 512 gen_exception(ctx, POWERPC_EXCP_FPU); 513 return; 514 } 515 } else { 516 if (unlikely(!ctx->altivec_enabled)) { 517 gen_exception(ctx, POWERPC_EXCP_VPU); 518 return; 519 } 520 } 521 t0 = tcg_temp_new_i64(); 522 get_cpu_vsr(t0, xS(ctx->opcode), true); 523 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0); 524 tcg_temp_free_i64(t0); 525} 526 527static void gen_mtvsrd(DisasContext *ctx) 528{ 529 TCGv_i64 t0; 530 if (xS(ctx->opcode) < 32) { 531 if (unlikely(!ctx->fpu_enabled)) { 532 gen_exception(ctx, POWERPC_EXCP_FPU); 533 return; 534 } 535 } else { 536 if (unlikely(!ctx->altivec_enabled)) { 537 gen_exception(ctx, POWERPC_EXCP_VPU); 538 return; 539 } 540 } 541 t0 = tcg_temp_new_i64(); 542 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]); 543 set_cpu_vsr(xT(ctx->opcode), t0, true); 544 tcg_temp_free_i64(t0); 545} 546 547static void gen_mfvsrld(DisasContext *ctx) 548{ 549 TCGv_i64 t0; 550 if (xS(ctx->opcode) < 32) { 551 if (unlikely(!ctx->vsx_enabled)) { 552 gen_exception(ctx, POWERPC_EXCP_VSXU); 553 return; 554 } 555 } else { 556 if (unlikely(!ctx->altivec_enabled)) { 557 gen_exception(ctx, POWERPC_EXCP_VPU); 558 return; 559 } 560 } 561 t0 = tcg_temp_new_i64(); 562 get_cpu_vsr(t0, xS(ctx->opcode), false); 563 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0); 564 tcg_temp_free_i64(t0); 565} 566 567static void gen_mtvsrdd(DisasContext *ctx) 568{ 569 TCGv_i64 t0; 570 if (xT(ctx->opcode) < 32) { 571 if (unlikely(!ctx->vsx_enabled)) { 572 gen_exception(ctx, POWERPC_EXCP_VSXU); 573 return; 574 } 575 } else { 576 if (unlikely(!ctx->altivec_enabled)) { 577 gen_exception(ctx, POWERPC_EXCP_VPU); 578 return; 579 } 580 } 581 582 t0 = tcg_temp_new_i64(); 583 if (!rA(ctx->opcode)) { 584 tcg_gen_movi_i64(t0, 0); 585 } else { 586 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]); 587 } 588 set_cpu_vsr(xT(ctx->opcode), t0, true); 589 590 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]); 591 set_cpu_vsr(xT(ctx->opcode), t0, false); 592 tcg_temp_free_i64(t0); 593} 594 595static void gen_mtvsrws(DisasContext *ctx) 596{ 597 TCGv_i64 t0; 598 if (xT(ctx->opcode) < 32) { 599 if (unlikely(!ctx->vsx_enabled)) { 600 gen_exception(ctx, POWERPC_EXCP_VSXU); 601 return; 602 } 603 } else { 604 if (unlikely(!ctx->altivec_enabled)) { 605 gen_exception(ctx, POWERPC_EXCP_VPU); 606 return; 607 } 608 } 609 610 t0 = tcg_temp_new_i64(); 611 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)], 612 cpu_gpr[rA(ctx->opcode)], 32, 32); 613 set_cpu_vsr(xT(ctx->opcode), t0, false); 614 set_cpu_vsr(xT(ctx->opcode), t0, true); 615 tcg_temp_free_i64(t0); 616} 617 618#endif 619 620#define OP_ABS 1 621#define OP_NABS 2 622#define OP_NEG 3 623#define OP_CPSGN 4 624#define SGN_MASK_DP 0x8000000000000000ull 625#define SGN_MASK_SP 0x8000000080000000ull 626 627#define VSX_SCALAR_MOVE(name, op, sgn_mask) \ 628static void glue(gen_, name)(DisasContext *ctx) \ 629 { \ 630 TCGv_i64 xb, sgm; \ 631 if (unlikely(!ctx->vsx_enabled)) { \ 632 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 633 return; \ 634 } \ 635 xb = tcg_temp_new_i64(); \ 636 sgm = tcg_temp_new_i64(); \ 637 get_cpu_vsr(xb, xB(ctx->opcode), true); \ 638 tcg_gen_movi_i64(sgm, sgn_mask); \ 639 switch (op) { \ 640 case OP_ABS: { \ 641 tcg_gen_andc_i64(xb, xb, sgm); \ 642 break; \ 643 } \ 644 case OP_NABS: { \ 645 tcg_gen_or_i64(xb, xb, sgm); \ 646 break; \ 647 } \ 648 case OP_NEG: { \ 649 tcg_gen_xor_i64(xb, xb, sgm); \ 650 break; \ 651 } \ 652 case OP_CPSGN: { \ 653 TCGv_i64 xa = tcg_temp_new_i64(); \ 654 get_cpu_vsr(xa, xA(ctx->opcode), true); \ 655 tcg_gen_and_i64(xa, xa, sgm); \ 656 tcg_gen_andc_i64(xb, xb, sgm); \ 657 tcg_gen_or_i64(xb, xb, xa); \ 658 tcg_temp_free_i64(xa); \ 659 break; \ 660 } \ 661 } \ 662 set_cpu_vsr(xT(ctx->opcode), xb, true); \ 663 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \ 664 tcg_temp_free_i64(xb); \ 665 tcg_temp_free_i64(sgm); \ 666 } 667 668VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP) 669VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP) 670VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP) 671VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP) 672 673#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \ 674static void glue(gen_, name)(DisasContext *ctx) \ 675{ \ 676 int xa; \ 677 int xt = rD(ctx->opcode) + 32; \ 678 int xb = rB(ctx->opcode) + 32; \ 679 TCGv_i64 xah, xbh, xbl, sgm, tmp; \ 680 \ 681 if (unlikely(!ctx->vsx_enabled)) { \ 682 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 683 return; \ 684 } \ 685 xbh = tcg_temp_new_i64(); \ 686 xbl = tcg_temp_new_i64(); \ 687 sgm = tcg_temp_new_i64(); \ 688 tmp = tcg_temp_new_i64(); \ 689 get_cpu_vsr(xbh, xb, true); \ 690 get_cpu_vsr(xbl, xb, false); \ 691 tcg_gen_movi_i64(sgm, sgn_mask); \ 692 switch (op) { \ 693 case OP_ABS: \ 694 tcg_gen_andc_i64(xbh, xbh, sgm); \ 695 break; \ 696 case OP_NABS: \ 697 tcg_gen_or_i64(xbh, xbh, sgm); \ 698 break; \ 699 case OP_NEG: \ 700 tcg_gen_xor_i64(xbh, xbh, sgm); \ 701 break; \ 702 case OP_CPSGN: \ 703 xah = tcg_temp_new_i64(); \ 704 xa = rA(ctx->opcode) + 32; \ 705 get_cpu_vsr(tmp, xa, true); \ 706 tcg_gen_and_i64(xah, tmp, sgm); \ 707 tcg_gen_andc_i64(xbh, xbh, sgm); \ 708 tcg_gen_or_i64(xbh, xbh, xah); \ 709 tcg_temp_free_i64(xah); \ 710 break; \ 711 } \ 712 set_cpu_vsr(xt, xbh, true); \ 713 set_cpu_vsr(xt, xbl, false); \ 714 tcg_temp_free_i64(xbl); \ 715 tcg_temp_free_i64(xbh); \ 716 tcg_temp_free_i64(sgm); \ 717 tcg_temp_free_i64(tmp); \ 718} 719 720VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP) 721VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP) 722VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP) 723VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP) 724 725#define VSX_VECTOR_MOVE(name, op, sgn_mask) \ 726static void glue(gen_, name)(DisasContext *ctx) \ 727 { \ 728 TCGv_i64 xbh, xbl, sgm; \ 729 if (unlikely(!ctx->vsx_enabled)) { \ 730 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 731 return; \ 732 } \ 733 xbh = tcg_temp_new_i64(); \ 734 xbl = tcg_temp_new_i64(); \ 735 sgm = tcg_temp_new_i64(); \ 736 get_cpu_vsr(xbh, xB(ctx->opcode), true); \ 737 get_cpu_vsr(xbl, xB(ctx->opcode), false); \ 738 tcg_gen_movi_i64(sgm, sgn_mask); \ 739 switch (op) { \ 740 case OP_ABS: { \ 741 tcg_gen_andc_i64(xbh, xbh, sgm); \ 742 tcg_gen_andc_i64(xbl, xbl, sgm); \ 743 break; \ 744 } \ 745 case OP_NABS: { \ 746 tcg_gen_or_i64(xbh, xbh, sgm); \ 747 tcg_gen_or_i64(xbl, xbl, sgm); \ 748 break; \ 749 } \ 750 case OP_NEG: { \ 751 tcg_gen_xor_i64(xbh, xbh, sgm); \ 752 tcg_gen_xor_i64(xbl, xbl, sgm); \ 753 break; \ 754 } \ 755 case OP_CPSGN: { \ 756 TCGv_i64 xah = tcg_temp_new_i64(); \ 757 TCGv_i64 xal = tcg_temp_new_i64(); \ 758 get_cpu_vsr(xah, xA(ctx->opcode), true); \ 759 get_cpu_vsr(xal, xA(ctx->opcode), false); \ 760 tcg_gen_and_i64(xah, xah, sgm); \ 761 tcg_gen_and_i64(xal, xal, sgm); \ 762 tcg_gen_andc_i64(xbh, xbh, sgm); \ 763 tcg_gen_andc_i64(xbl, xbl, sgm); \ 764 tcg_gen_or_i64(xbh, xbh, xah); \ 765 tcg_gen_or_i64(xbl, xbl, xal); \ 766 tcg_temp_free_i64(xah); \ 767 tcg_temp_free_i64(xal); \ 768 break; \ 769 } \ 770 } \ 771 set_cpu_vsr(xT(ctx->opcode), xbh, true); \ 772 set_cpu_vsr(xT(ctx->opcode), xbl, false); \ 773 tcg_temp_free_i64(xbh); \ 774 tcg_temp_free_i64(xbl); \ 775 tcg_temp_free_i64(sgm); \ 776 } 777 778VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP) 779VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP) 780VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP) 781VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP) 782VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP) 783VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP) 784VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP) 785VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP) 786 787#define VSX_CMP(name, op1, op2, inval, type) \ 788static void gen_##name(DisasContext *ctx) \ 789{ \ 790 TCGv_i32 ignored; \ 791 TCGv_ptr xt, xa, xb; \ 792 if (unlikely(!ctx->vsx_enabled)) { \ 793 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 794 return; \ 795 } \ 796 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 797 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 798 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 799 if ((ctx->opcode >> (31 - 21)) & 1) { \ 800 gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb); \ 801 } else { \ 802 ignored = tcg_temp_new_i32(); \ 803 gen_helper_##name(ignored, cpu_env, xt, xa, xb); \ 804 tcg_temp_free_i32(ignored); \ 805 } \ 806 gen_helper_float_check_status(cpu_env); \ 807 tcg_temp_free_ptr(xt); \ 808 tcg_temp_free_ptr(xa); \ 809 tcg_temp_free_ptr(xb); \ 810} 811 812VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX) 813VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX) 814VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX) 815VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300) 816VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX) 817VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX) 818VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX) 819VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX) 820 821static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a) 822{ 823 TCGv_i32 ro; 824 TCGv_ptr xt, xb; 825 826 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 827 REQUIRE_VSX(ctx); 828 829 ro = tcg_const_i32(a->rc); 830 831 xt = gen_avr_ptr(a->rt); 832 xb = gen_avr_ptr(a->rb); 833 gen_helper_XSCVQPDP(cpu_env, ro, xt, xb); 834 tcg_temp_free_i32(ro); 835 tcg_temp_free_ptr(xt); 836 tcg_temp_free_ptr(xb); 837 838 return true; 839} 840 841#define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \ 842static void gen_##name(DisasContext *ctx) \ 843{ \ 844 TCGv_i32 opc; \ 845 if (unlikely(!ctx->vsx_enabled)) { \ 846 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 847 return; \ 848 } \ 849 opc = tcg_const_i32(ctx->opcode); \ 850 gen_helper_##name(cpu_env, opc); \ 851 tcg_temp_free_i32(opc); \ 852} 853 854#define GEN_VSX_HELPER_X3(name, op1, op2, inval, type) \ 855static void gen_##name(DisasContext *ctx) \ 856{ \ 857 TCGv_ptr xt, xa, xb; \ 858 if (unlikely(!ctx->vsx_enabled)) { \ 859 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 860 return; \ 861 } \ 862 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 863 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 864 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 865 gen_helper_##name(cpu_env, xt, xa, xb); \ 866 tcg_temp_free_ptr(xt); \ 867 tcg_temp_free_ptr(xa); \ 868 tcg_temp_free_ptr(xb); \ 869} 870 871#define GEN_VSX_HELPER_X2(name, op1, op2, inval, type) \ 872static void gen_##name(DisasContext *ctx) \ 873{ \ 874 TCGv_ptr xt, xb; \ 875 if (unlikely(!ctx->vsx_enabled)) { \ 876 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 877 return; \ 878 } \ 879 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 880 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 881 gen_helper_##name(cpu_env, xt, xb); \ 882 tcg_temp_free_ptr(xt); \ 883 tcg_temp_free_ptr(xb); \ 884} 885 886#define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type) \ 887static void gen_##name(DisasContext *ctx) \ 888{ \ 889 TCGv_i32 opc; \ 890 TCGv_ptr xa, xb; \ 891 if (unlikely(!ctx->vsx_enabled)) { \ 892 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 893 return; \ 894 } \ 895 opc = tcg_const_i32(ctx->opcode); \ 896 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 897 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 898 gen_helper_##name(cpu_env, opc, xa, xb); \ 899 tcg_temp_free_i32(opc); \ 900 tcg_temp_free_ptr(xa); \ 901 tcg_temp_free_ptr(xb); \ 902} 903 904#define GEN_VSX_HELPER_X1(name, op1, op2, inval, type) \ 905static void gen_##name(DisasContext *ctx) \ 906{ \ 907 TCGv_i32 opc; \ 908 TCGv_ptr xb; \ 909 if (unlikely(!ctx->vsx_enabled)) { \ 910 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 911 return; \ 912 } \ 913 opc = tcg_const_i32(ctx->opcode); \ 914 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 915 gen_helper_##name(cpu_env, opc, xb); \ 916 tcg_temp_free_i32(opc); \ 917 tcg_temp_free_ptr(xb); \ 918} 919 920#define GEN_VSX_HELPER_R3(name, op1, op2, inval, type) \ 921static void gen_##name(DisasContext *ctx) \ 922{ \ 923 TCGv_i32 opc; \ 924 TCGv_ptr xt, xa, xb; \ 925 if (unlikely(!ctx->vsx_enabled)) { \ 926 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 927 return; \ 928 } \ 929 opc = tcg_const_i32(ctx->opcode); \ 930 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \ 931 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \ 932 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \ 933 gen_helper_##name(cpu_env, opc, xt, xa, xb); \ 934 tcg_temp_free_i32(opc); \ 935 tcg_temp_free_ptr(xt); \ 936 tcg_temp_free_ptr(xa); \ 937 tcg_temp_free_ptr(xb); \ 938} 939 940#define GEN_VSX_HELPER_R2(name, op1, op2, inval, type) \ 941static void gen_##name(DisasContext *ctx) \ 942{ \ 943 TCGv_i32 opc; \ 944 TCGv_ptr xt, xb; \ 945 if (unlikely(!ctx->vsx_enabled)) { \ 946 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 947 return; \ 948 } \ 949 opc = tcg_const_i32(ctx->opcode); \ 950 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \ 951 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \ 952 gen_helper_##name(cpu_env, opc, xt, xb); \ 953 tcg_temp_free_i32(opc); \ 954 tcg_temp_free_ptr(xt); \ 955 tcg_temp_free_ptr(xb); \ 956} 957 958#define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type) \ 959static void gen_##name(DisasContext *ctx) \ 960{ \ 961 TCGv_i32 opc; \ 962 TCGv_ptr xa, xb; \ 963 if (unlikely(!ctx->vsx_enabled)) { \ 964 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 965 return; \ 966 } \ 967 opc = tcg_const_i32(ctx->opcode); \ 968 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \ 969 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \ 970 gen_helper_##name(cpu_env, opc, xa, xb); \ 971 tcg_temp_free_i32(opc); \ 972 tcg_temp_free_ptr(xa); \ 973 tcg_temp_free_ptr(xb); \ 974} 975 976#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \ 977static void gen_##name(DisasContext *ctx) \ 978{ \ 979 TCGv_i64 t0; \ 980 TCGv_i64 t1; \ 981 if (unlikely(!ctx->vsx_enabled)) { \ 982 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 983 return; \ 984 } \ 985 t0 = tcg_temp_new_i64(); \ 986 t1 = tcg_temp_new_i64(); \ 987 get_cpu_vsr(t0, xB(ctx->opcode), true); \ 988 gen_helper_##name(t1, cpu_env, t0); \ 989 set_cpu_vsr(xT(ctx->opcode), t1, true); \ 990 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \ 991 tcg_temp_free_i64(t0); \ 992 tcg_temp_free_i64(t1); \ 993} 994 995GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX) 996GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300) 997GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX) 998GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX) 999GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300) 1000GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX) 1001GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300) 1002GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX) 1003GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX) 1004GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX) 1005GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX) 1006GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX) 1007GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300) 1008GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300) 1009GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX) 1010GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX) 1011GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX) 1012GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX) 1013GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX) 1014GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX) 1015GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300) 1016GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX) 1017GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300) 1018GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207) 1019GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300) 1020GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300) 1021GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300) 1022GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300) 1023GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300) 1024GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300) 1025GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX) 1026GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207) 1027GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX) 1028GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX) 1029GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX) 1030GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX) 1031GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX) 1032GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300) 1033GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX) 1034GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX) 1035GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX) 1036GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX) 1037GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX) 1038GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX) 1039GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207) 1040GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300) 1041GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300) 1042GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300) 1043GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300) 1044GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207) 1045GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207) 1046GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207) 1047GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207) 1048GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207) 1049GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207) 1050GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207) 1051GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207) 1052GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207) 1053GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300) 1054GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300) 1055GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300) 1056 1057GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX) 1058GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX) 1059GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX) 1060GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX) 1061GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX) 1062GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX) 1063GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX) 1064GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX) 1065GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX) 1066GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX) 1067GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX) 1068GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX) 1069GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX) 1070GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX) 1071GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX) 1072GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX) 1073GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX) 1074GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX) 1075GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX) 1076GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX) 1077GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX) 1078GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX) 1079GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX) 1080GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX) 1081GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX) 1082 1083GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX) 1084GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX) 1085GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX) 1086GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX) 1087GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX) 1088GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX) 1089GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX) 1090GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX) 1091GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX) 1092GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX) 1093GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX) 1094GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX) 1095GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300) 1096GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300) 1097GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX) 1098GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX) 1099GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX) 1100GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX) 1101GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX) 1102GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX) 1103GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX) 1104GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX) 1105GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX) 1106GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX) 1107GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX) 1108GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX) 1109GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX) 1110GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX) 1111GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX) 1112 1113static bool trans_XXPERM(DisasContext *ctx, arg_XX3 *a) 1114{ 1115 TCGv_ptr xt, xa, xb; 1116 1117 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 1118 REQUIRE_VSX(ctx); 1119 1120 xt = gen_vsr_ptr(a->xt); 1121 xa = gen_vsr_ptr(a->xa); 1122 xb = gen_vsr_ptr(a->xb); 1123 1124 gen_helper_VPERM(xt, xa, xt, xb); 1125 1126 tcg_temp_free_ptr(xt); 1127 tcg_temp_free_ptr(xa); 1128 tcg_temp_free_ptr(xb); 1129 1130 return true; 1131} 1132 1133static bool trans_XXPERMR(DisasContext *ctx, arg_XX3 *a) 1134{ 1135 TCGv_ptr xt, xa, xb; 1136 1137 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 1138 REQUIRE_VSX(ctx); 1139 1140 xt = gen_vsr_ptr(a->xt); 1141 xa = gen_vsr_ptr(a->xa); 1142 xb = gen_vsr_ptr(a->xb); 1143 1144 gen_helper_VPERMR(xt, xa, xt, xb); 1145 1146 tcg_temp_free_ptr(xt); 1147 tcg_temp_free_ptr(xa); 1148 tcg_temp_free_ptr(xb); 1149 1150 return true; 1151} 1152 1153static bool trans_XXPERMDI(DisasContext *ctx, arg_XX3_dm *a) 1154{ 1155 TCGv_i64 t0, t1; 1156 1157 REQUIRE_INSNS_FLAGS2(ctx, VSX); 1158 REQUIRE_VSX(ctx); 1159 1160 t0 = tcg_temp_new_i64(); 1161 1162 if (unlikely(a->xt == a->xa || a->xt == a->xb)) { 1163 t1 = tcg_temp_new_i64(); 1164 1165 get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0); 1166 get_cpu_vsr(t1, a->xb, (a->dm & 1) == 0); 1167 1168 set_cpu_vsr(a->xt, t0, true); 1169 set_cpu_vsr(a->xt, t1, false); 1170 1171 tcg_temp_free_i64(t1); 1172 } else { 1173 get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0); 1174 set_cpu_vsr(a->xt, t0, true); 1175 1176 get_cpu_vsr(t0, a->xb, (a->dm & 1) == 0); 1177 set_cpu_vsr(a->xt, t0, false); 1178 } 1179 1180 tcg_temp_free_i64(t0); 1181 1182 return true; 1183} 1184 1185static bool trans_XXPERMX(DisasContext *ctx, arg_8RR_XX4_uim3 *a) 1186{ 1187 TCGv_ptr xt, xa, xb, xc; 1188 1189 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1190 REQUIRE_VSX(ctx); 1191 1192 xt = gen_vsr_ptr(a->xt); 1193 xa = gen_vsr_ptr(a->xa); 1194 xb = gen_vsr_ptr(a->xb); 1195 xc = gen_vsr_ptr(a->xc); 1196 1197 gen_helper_XXPERMX(xt, xa, xb, xc, tcg_constant_tl(a->uim3)); 1198 1199 tcg_temp_free_ptr(xt); 1200 tcg_temp_free_ptr(xa); 1201 tcg_temp_free_ptr(xb); 1202 tcg_temp_free_ptr(xc); 1203 1204 return true; 1205} 1206 1207typedef void (*xxgenpcv_genfn)(TCGv_ptr, TCGv_ptr); 1208 1209static bool do_xxgenpcv(DisasContext *ctx, arg_X_imm5 *a, 1210 const xxgenpcv_genfn fn[4]) 1211{ 1212 TCGv_ptr xt, vrb; 1213 1214 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1215 REQUIRE_VSX(ctx); 1216 1217 if (a->imm & ~0x3) { 1218 gen_invalid(ctx); 1219 return true; 1220 } 1221 1222 xt = gen_vsr_ptr(a->xt); 1223 vrb = gen_avr_ptr(a->vrb); 1224 1225 fn[a->imm](xt, vrb); 1226 1227 tcg_temp_free_ptr(xt); 1228 tcg_temp_free_ptr(vrb); 1229 1230 return true; 1231} 1232 1233#define XXGENPCV(NAME) \ 1234 static bool trans_##NAME(DisasContext *ctx, arg_X_imm5 *a) \ 1235 { \ 1236 static const xxgenpcv_genfn fn[4] = { \ 1237 gen_helper_##NAME##_be_exp, \ 1238 gen_helper_##NAME##_be_comp, \ 1239 gen_helper_##NAME##_le_exp, \ 1240 gen_helper_##NAME##_le_comp, \ 1241 }; \ 1242 return do_xxgenpcv(ctx, a, fn); \ 1243 } 1244 1245XXGENPCV(XXGENPCVBM) 1246XXGENPCV(XXGENPCVHM) 1247XXGENPCV(XXGENPCVWM) 1248XXGENPCV(XXGENPCVDM) 1249#undef XXGENPCV 1250 1251static bool do_xsmadd(DisasContext *ctx, int tgt, int src1, int src2, int src3, 1252 void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 1253{ 1254 TCGv_ptr t, s1, s2, s3; 1255 1256 t = gen_vsr_ptr(tgt); 1257 s1 = gen_vsr_ptr(src1); 1258 s2 = gen_vsr_ptr(src2); 1259 s3 = gen_vsr_ptr(src3); 1260 1261 gen_helper(cpu_env, t, s1, s2, s3); 1262 1263 tcg_temp_free_ptr(t); 1264 tcg_temp_free_ptr(s1); 1265 tcg_temp_free_ptr(s2); 1266 tcg_temp_free_ptr(s3); 1267 1268 return true; 1269} 1270 1271static bool do_xsmadd_XX3(DisasContext *ctx, arg_XX3 *a, bool type_a, 1272 void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 1273{ 1274 REQUIRE_VSX(ctx); 1275 1276 if (type_a) { 1277 return do_xsmadd(ctx, a->xt, a->xa, a->xt, a->xb, gen_helper); 1278 } 1279 return do_xsmadd(ctx, a->xt, a->xa, a->xb, a->xt, gen_helper); 1280} 1281 1282TRANS_FLAGS2(VSX, XSMADDADP, do_xsmadd_XX3, true, gen_helper_XSMADDDP) 1283TRANS_FLAGS2(VSX, XSMADDMDP, do_xsmadd_XX3, false, gen_helper_XSMADDDP) 1284TRANS_FLAGS2(VSX, XSMSUBADP, do_xsmadd_XX3, true, gen_helper_XSMSUBDP) 1285TRANS_FLAGS2(VSX, XSMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSMSUBDP) 1286TRANS_FLAGS2(VSX, XSNMADDADP, do_xsmadd_XX3, true, gen_helper_XSNMADDDP) 1287TRANS_FLAGS2(VSX, XSNMADDMDP, do_xsmadd_XX3, false, gen_helper_XSNMADDDP) 1288TRANS_FLAGS2(VSX, XSNMSUBADP, do_xsmadd_XX3, true, gen_helper_XSNMSUBDP) 1289TRANS_FLAGS2(VSX, XSNMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSNMSUBDP) 1290TRANS_FLAGS2(VSX207, XSMADDASP, do_xsmadd_XX3, true, gen_helper_XSMADDSP) 1291TRANS_FLAGS2(VSX207, XSMADDMSP, do_xsmadd_XX3, false, gen_helper_XSMADDSP) 1292TRANS_FLAGS2(VSX207, XSMSUBASP, do_xsmadd_XX3, true, gen_helper_XSMSUBSP) 1293TRANS_FLAGS2(VSX207, XSMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSMSUBSP) 1294TRANS_FLAGS2(VSX207, XSNMADDASP, do_xsmadd_XX3, true, gen_helper_XSNMADDSP) 1295TRANS_FLAGS2(VSX207, XSNMADDMSP, do_xsmadd_XX3, false, gen_helper_XSNMADDSP) 1296TRANS_FLAGS2(VSX207, XSNMSUBASP, do_xsmadd_XX3, true, gen_helper_XSNMSUBSP) 1297TRANS_FLAGS2(VSX207, XSNMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSNMSUBSP) 1298 1299static bool do_xsmadd_X(DisasContext *ctx, arg_X_rc *a, 1300 void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr), 1301 void (*gen_helper_ro)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 1302{ 1303 int vrt, vra, vrb; 1304 1305 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 1306 REQUIRE_VSX(ctx); 1307 1308 vrt = a->rt + 32; 1309 vra = a->ra + 32; 1310 vrb = a->rb + 32; 1311 1312 if (a->rc) { 1313 return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper_ro); 1314 } 1315 1316 return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper); 1317} 1318 1319TRANS(XSMADDQP, do_xsmadd_X, gen_helper_XSMADDQP, gen_helper_XSMADDQPO) 1320TRANS(XSMSUBQP, do_xsmadd_X, gen_helper_XSMSUBQP, gen_helper_XSMSUBQPO) 1321TRANS(XSNMADDQP, do_xsmadd_X, gen_helper_XSNMADDQP, gen_helper_XSNMADDQPO) 1322TRANS(XSNMSUBQP, do_xsmadd_X, gen_helper_XSNMSUBQP, gen_helper_XSNMSUBQPO) 1323 1324#define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type) \ 1325static void gen_##name(DisasContext *ctx) \ 1326{ \ 1327 TCGv_ptr xt, xa, b, c; \ 1328 if (unlikely(!ctx->vsx_enabled)) { \ 1329 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1330 return; \ 1331 } \ 1332 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 1333 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 1334 if (ctx->opcode & PPC_BIT32(25)) { \ 1335 /* \ 1336 * AxT + B \ 1337 */ \ 1338 b = gen_vsr_ptr(xT(ctx->opcode)); \ 1339 c = gen_vsr_ptr(xB(ctx->opcode)); \ 1340 } else { \ 1341 /* \ 1342 * AxB + T \ 1343 */ \ 1344 b = gen_vsr_ptr(xB(ctx->opcode)); \ 1345 c = gen_vsr_ptr(xT(ctx->opcode)); \ 1346 } \ 1347 gen_helper_##name(cpu_env, xt, xa, b, c); \ 1348 tcg_temp_free_ptr(xt); \ 1349 tcg_temp_free_ptr(xa); \ 1350 tcg_temp_free_ptr(b); \ 1351 tcg_temp_free_ptr(c); \ 1352} 1353 1354GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX) 1355GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX) 1356GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX) 1357GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX) 1358GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX) 1359GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX) 1360GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX) 1361GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX) 1362 1363static void gen_xxbrd(DisasContext *ctx) 1364{ 1365 TCGv_i64 xth; 1366 TCGv_i64 xtl; 1367 TCGv_i64 xbh; 1368 TCGv_i64 xbl; 1369 1370 if (unlikely(!ctx->vsx_enabled)) { 1371 gen_exception(ctx, POWERPC_EXCP_VSXU); 1372 return; 1373 } 1374 xth = tcg_temp_new_i64(); 1375 xtl = tcg_temp_new_i64(); 1376 xbh = tcg_temp_new_i64(); 1377 xbl = tcg_temp_new_i64(); 1378 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1379 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1380 1381 tcg_gen_bswap64_i64(xth, xbh); 1382 tcg_gen_bswap64_i64(xtl, xbl); 1383 set_cpu_vsr(xT(ctx->opcode), xth, true); 1384 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1385 1386 tcg_temp_free_i64(xth); 1387 tcg_temp_free_i64(xtl); 1388 tcg_temp_free_i64(xbh); 1389 tcg_temp_free_i64(xbl); 1390} 1391 1392static void gen_xxbrh(DisasContext *ctx) 1393{ 1394 TCGv_i64 xth; 1395 TCGv_i64 xtl; 1396 TCGv_i64 xbh; 1397 TCGv_i64 xbl; 1398 1399 if (unlikely(!ctx->vsx_enabled)) { 1400 gen_exception(ctx, POWERPC_EXCP_VSXU); 1401 return; 1402 } 1403 xth = tcg_temp_new_i64(); 1404 xtl = tcg_temp_new_i64(); 1405 xbh = tcg_temp_new_i64(); 1406 xbl = tcg_temp_new_i64(); 1407 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1408 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1409 1410 gen_bswap16x8(xth, xtl, xbh, xbl); 1411 set_cpu_vsr(xT(ctx->opcode), xth, true); 1412 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1413 1414 tcg_temp_free_i64(xth); 1415 tcg_temp_free_i64(xtl); 1416 tcg_temp_free_i64(xbh); 1417 tcg_temp_free_i64(xbl); 1418} 1419 1420static void gen_xxbrq(DisasContext *ctx) 1421{ 1422 TCGv_i64 xth; 1423 TCGv_i64 xtl; 1424 TCGv_i64 xbh; 1425 TCGv_i64 xbl; 1426 TCGv_i64 t0; 1427 1428 if (unlikely(!ctx->vsx_enabled)) { 1429 gen_exception(ctx, POWERPC_EXCP_VSXU); 1430 return; 1431 } 1432 xth = tcg_temp_new_i64(); 1433 xtl = tcg_temp_new_i64(); 1434 xbh = tcg_temp_new_i64(); 1435 xbl = tcg_temp_new_i64(); 1436 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1437 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1438 t0 = tcg_temp_new_i64(); 1439 1440 tcg_gen_bswap64_i64(t0, xbl); 1441 tcg_gen_bswap64_i64(xtl, xbh); 1442 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1443 tcg_gen_mov_i64(xth, t0); 1444 set_cpu_vsr(xT(ctx->opcode), xth, true); 1445 1446 tcg_temp_free_i64(t0); 1447 tcg_temp_free_i64(xth); 1448 tcg_temp_free_i64(xtl); 1449 tcg_temp_free_i64(xbh); 1450 tcg_temp_free_i64(xbl); 1451} 1452 1453static void gen_xxbrw(DisasContext *ctx) 1454{ 1455 TCGv_i64 xth; 1456 TCGv_i64 xtl; 1457 TCGv_i64 xbh; 1458 TCGv_i64 xbl; 1459 1460 if (unlikely(!ctx->vsx_enabled)) { 1461 gen_exception(ctx, POWERPC_EXCP_VSXU); 1462 return; 1463 } 1464 xth = tcg_temp_new_i64(); 1465 xtl = tcg_temp_new_i64(); 1466 xbh = tcg_temp_new_i64(); 1467 xbl = tcg_temp_new_i64(); 1468 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1469 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1470 1471 gen_bswap32x4(xth, xtl, xbh, xbl); 1472 set_cpu_vsr(xT(ctx->opcode), xth, true); 1473 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1474 1475 tcg_temp_free_i64(xth); 1476 tcg_temp_free_i64(xtl); 1477 tcg_temp_free_i64(xbh); 1478 tcg_temp_free_i64(xbl); 1479} 1480 1481#define VSX_LOGICAL(name, vece, tcg_op) \ 1482static void glue(gen_, name)(DisasContext *ctx) \ 1483 { \ 1484 if (unlikely(!ctx->vsx_enabled)) { \ 1485 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1486 return; \ 1487 } \ 1488 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \ 1489 vsr_full_offset(xA(ctx->opcode)), \ 1490 vsr_full_offset(xB(ctx->opcode)), 16, 16); \ 1491 } 1492 1493VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and) 1494VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc) 1495VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or) 1496VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor) 1497VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor) 1498VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv) 1499VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand) 1500VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc) 1501 1502#define VSX_XXMRG(name, high) \ 1503static void glue(gen_, name)(DisasContext *ctx) \ 1504 { \ 1505 TCGv_i64 a0, a1, b0, b1, tmp; \ 1506 if (unlikely(!ctx->vsx_enabled)) { \ 1507 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1508 return; \ 1509 } \ 1510 a0 = tcg_temp_new_i64(); \ 1511 a1 = tcg_temp_new_i64(); \ 1512 b0 = tcg_temp_new_i64(); \ 1513 b1 = tcg_temp_new_i64(); \ 1514 tmp = tcg_temp_new_i64(); \ 1515 get_cpu_vsr(a0, xA(ctx->opcode), high); \ 1516 get_cpu_vsr(a1, xA(ctx->opcode), high); \ 1517 get_cpu_vsr(b0, xB(ctx->opcode), high); \ 1518 get_cpu_vsr(b1, xB(ctx->opcode), high); \ 1519 tcg_gen_shri_i64(a0, a0, 32); \ 1520 tcg_gen_shri_i64(b0, b0, 32); \ 1521 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \ 1522 set_cpu_vsr(xT(ctx->opcode), tmp, true); \ 1523 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \ 1524 set_cpu_vsr(xT(ctx->opcode), tmp, false); \ 1525 tcg_temp_free_i64(a0); \ 1526 tcg_temp_free_i64(a1); \ 1527 tcg_temp_free_i64(b0); \ 1528 tcg_temp_free_i64(b1); \ 1529 tcg_temp_free_i64(tmp); \ 1530 } 1531 1532VSX_XXMRG(xxmrghw, 1) 1533VSX_XXMRG(xxmrglw, 0) 1534 1535static bool trans_XXSEL(DisasContext *ctx, arg_XX4 *a) 1536{ 1537 REQUIRE_INSNS_FLAGS2(ctx, VSX); 1538 REQUIRE_VSX(ctx); 1539 1540 tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(a->xt), vsr_full_offset(a->xc), 1541 vsr_full_offset(a->xb), vsr_full_offset(a->xa), 16, 16); 1542 1543 return true; 1544} 1545 1546static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2_uim2 *a) 1547{ 1548 int tofs, bofs; 1549 1550 REQUIRE_VSX(ctx); 1551 1552 tofs = vsr_full_offset(a->xt); 1553 bofs = vsr_full_offset(a->xb); 1554 bofs += a->uim << MO_32; 1555#ifndef HOST_WORDS_BIG_ENDIAN 1556 bofs ^= 8 | 4; 1557#endif 1558 1559 tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16); 1560 return true; 1561} 1562 1563#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff)) 1564 1565static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a) 1566{ 1567 if (a->xt < 32) { 1568 REQUIRE_VSX(ctx); 1569 } else { 1570 REQUIRE_VECTOR(ctx); 1571 } 1572 tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm); 1573 return true; 1574} 1575 1576static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a) 1577{ 1578 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1579 REQUIRE_VSX(ctx); 1580 1581 tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si); 1582 1583 return true; 1584} 1585 1586static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a) 1587{ 1588 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1589 REQUIRE_VSX(ctx); 1590 1591 tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16, 1592 helper_todouble(a->si)); 1593 return true; 1594} 1595 1596static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a) 1597{ 1598 TCGv_i32 imm; 1599 1600 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1601 REQUIRE_VSX(ctx); 1602 1603 imm = tcg_constant_i32(a->si); 1604 1605 tcg_gen_st_i32(imm, cpu_env, 1606 offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix))); 1607 tcg_gen_st_i32(imm, cpu_env, 1608 offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix))); 1609 1610 return true; 1611} 1612 1613static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a) 1614{ 1615 static const uint64_t values[32] = { 1616 0, /* Unspecified */ 1617 0x3FFF000000000000llu, /* QP +1.0 */ 1618 0x4000000000000000llu, /* QP +2.0 */ 1619 0x4000800000000000llu, /* QP +3.0 */ 1620 0x4001000000000000llu, /* QP +4.0 */ 1621 0x4001400000000000llu, /* QP +5.0 */ 1622 0x4001800000000000llu, /* QP +6.0 */ 1623 0x4001C00000000000llu, /* QP +7.0 */ 1624 0x7FFF000000000000llu, /* QP +Inf */ 1625 0x7FFF800000000000llu, /* QP dQNaN */ 1626 0, /* Unspecified */ 1627 0, /* Unspecified */ 1628 0, /* Unspecified */ 1629 0, /* Unspecified */ 1630 0, /* Unspecified */ 1631 0, /* Unspecified */ 1632 0x8000000000000000llu, /* QP -0.0 */ 1633 0xBFFF000000000000llu, /* QP -1.0 */ 1634 0xC000000000000000llu, /* QP -2.0 */ 1635 0xC000800000000000llu, /* QP -3.0 */ 1636 0xC001000000000000llu, /* QP -4.0 */ 1637 0xC001400000000000llu, /* QP -5.0 */ 1638 0xC001800000000000llu, /* QP -6.0 */ 1639 0xC001C00000000000llu, /* QP -7.0 */ 1640 0xFFFF000000000000llu, /* QP -Inf */ 1641 }; 1642 1643 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1644 REQUIRE_VSX(ctx); 1645 1646 if (values[a->uim]) { 1647 set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false); 1648 set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true); 1649 } else { 1650 gen_invalid(ctx); 1651 } 1652 1653 return true; 1654} 1655 1656static bool trans_XVTLSBB(DisasContext *ctx, arg_XX2_bf_xb *a) 1657{ 1658 TCGv_i64 xb, t0, t1, all_true, all_false, mask, zero; 1659 1660 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1661 REQUIRE_VSX(ctx); 1662 1663 xb = tcg_temp_new_i64(); 1664 t0 = tcg_temp_new_i64(); 1665 t1 = tcg_temp_new_i64(); 1666 all_true = tcg_temp_new_i64(); 1667 all_false = tcg_temp_new_i64(); 1668 mask = tcg_constant_i64(dup_const(MO_8, 1)); 1669 zero = tcg_constant_i64(0); 1670 1671 get_cpu_vsr(xb, a->xb, true); 1672 tcg_gen_and_i64(t0, mask, xb); 1673 get_cpu_vsr(xb, a->xb, false); 1674 tcg_gen_and_i64(t1, mask, xb); 1675 1676 tcg_gen_or_i64(all_false, t0, t1); 1677 tcg_gen_and_i64(all_true, t0, t1); 1678 1679 tcg_gen_setcond_i64(TCG_COND_EQ, all_false, all_false, zero); 1680 tcg_gen_shli_i64(all_false, all_false, 1); 1681 tcg_gen_setcond_i64(TCG_COND_EQ, all_true, all_true, mask); 1682 tcg_gen_shli_i64(all_true, all_true, 3); 1683 1684 tcg_gen_or_i64(t0, all_false, all_true); 1685 tcg_gen_extrl_i64_i32(cpu_crf[a->bf], t0); 1686 1687 tcg_temp_free_i64(xb); 1688 tcg_temp_free_i64(t0); 1689 tcg_temp_free_i64(t1); 1690 tcg_temp_free_i64(all_true); 1691 tcg_temp_free_i64(all_false); 1692 1693 return true; 1694} 1695 1696static void gen_xxsldwi(DisasContext *ctx) 1697{ 1698 TCGv_i64 xth, xtl; 1699 if (unlikely(!ctx->vsx_enabled)) { 1700 gen_exception(ctx, POWERPC_EXCP_VSXU); 1701 return; 1702 } 1703 xth = tcg_temp_new_i64(); 1704 xtl = tcg_temp_new_i64(); 1705 1706 switch (SHW(ctx->opcode)) { 1707 case 0: { 1708 get_cpu_vsr(xth, xA(ctx->opcode), true); 1709 get_cpu_vsr(xtl, xA(ctx->opcode), false); 1710 break; 1711 } 1712 case 1: { 1713 TCGv_i64 t0 = tcg_temp_new_i64(); 1714 get_cpu_vsr(xth, xA(ctx->opcode), true); 1715 tcg_gen_shli_i64(xth, xth, 32); 1716 get_cpu_vsr(t0, xA(ctx->opcode), false); 1717 tcg_gen_shri_i64(t0, t0, 32); 1718 tcg_gen_or_i64(xth, xth, t0); 1719 get_cpu_vsr(xtl, xA(ctx->opcode), false); 1720 tcg_gen_shli_i64(xtl, xtl, 32); 1721 get_cpu_vsr(t0, xB(ctx->opcode), true); 1722 tcg_gen_shri_i64(t0, t0, 32); 1723 tcg_gen_or_i64(xtl, xtl, t0); 1724 tcg_temp_free_i64(t0); 1725 break; 1726 } 1727 case 2: { 1728 get_cpu_vsr(xth, xA(ctx->opcode), false); 1729 get_cpu_vsr(xtl, xB(ctx->opcode), true); 1730 break; 1731 } 1732 case 3: { 1733 TCGv_i64 t0 = tcg_temp_new_i64(); 1734 get_cpu_vsr(xth, xA(ctx->opcode), false); 1735 tcg_gen_shli_i64(xth, xth, 32); 1736 get_cpu_vsr(t0, xB(ctx->opcode), true); 1737 tcg_gen_shri_i64(t0, t0, 32); 1738 tcg_gen_or_i64(xth, xth, t0); 1739 get_cpu_vsr(xtl, xB(ctx->opcode), true); 1740 tcg_gen_shli_i64(xtl, xtl, 32); 1741 get_cpu_vsr(t0, xB(ctx->opcode), false); 1742 tcg_gen_shri_i64(t0, t0, 32); 1743 tcg_gen_or_i64(xtl, xtl, t0); 1744 tcg_temp_free_i64(t0); 1745 break; 1746 } 1747 } 1748 1749 set_cpu_vsr(xT(ctx->opcode), xth, true); 1750 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1751 1752 tcg_temp_free_i64(xth); 1753 tcg_temp_free_i64(xtl); 1754} 1755 1756#define VSX_EXTRACT_INSERT(name) \ 1757static void gen_##name(DisasContext *ctx) \ 1758{ \ 1759 TCGv_ptr xt, xb; \ 1760 TCGv_i32 t0; \ 1761 TCGv_i64 t1; \ 1762 uint8_t uimm = UIMM4(ctx->opcode); \ 1763 \ 1764 if (unlikely(!ctx->vsx_enabled)) { \ 1765 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1766 return; \ 1767 } \ 1768 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 1769 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 1770 t0 = tcg_temp_new_i32(); \ 1771 t1 = tcg_temp_new_i64(); \ 1772 /* \ 1773 * uimm > 15 out of bound and for \ 1774 * uimm > 12 handle as per hardware in helper \ 1775 */ \ 1776 if (uimm > 15) { \ 1777 tcg_gen_movi_i64(t1, 0); \ 1778 set_cpu_vsr(xT(ctx->opcode), t1, true); \ 1779 set_cpu_vsr(xT(ctx->opcode), t1, false); \ 1780 return; \ 1781 } \ 1782 tcg_gen_movi_i32(t0, uimm); \ 1783 gen_helper_##name(cpu_env, xt, xb, t0); \ 1784 tcg_temp_free_ptr(xb); \ 1785 tcg_temp_free_ptr(xt); \ 1786 tcg_temp_free_i32(t0); \ 1787 tcg_temp_free_i64(t1); \ 1788} 1789 1790VSX_EXTRACT_INSERT(xxextractuw) 1791VSX_EXTRACT_INSERT(xxinsertw) 1792 1793#ifdef TARGET_PPC64 1794static void gen_xsxexpdp(DisasContext *ctx) 1795{ 1796 TCGv rt = cpu_gpr[rD(ctx->opcode)]; 1797 TCGv_i64 t0; 1798 if (unlikely(!ctx->vsx_enabled)) { 1799 gen_exception(ctx, POWERPC_EXCP_VSXU); 1800 return; 1801 } 1802 t0 = tcg_temp_new_i64(); 1803 get_cpu_vsr(t0, xB(ctx->opcode), true); 1804 tcg_gen_extract_i64(rt, t0, 52, 11); 1805 tcg_temp_free_i64(t0); 1806} 1807 1808static void gen_xsxexpqp(DisasContext *ctx) 1809{ 1810 TCGv_i64 xth; 1811 TCGv_i64 xtl; 1812 TCGv_i64 xbh; 1813 1814 if (unlikely(!ctx->vsx_enabled)) { 1815 gen_exception(ctx, POWERPC_EXCP_VSXU); 1816 return; 1817 } 1818 xth = tcg_temp_new_i64(); 1819 xtl = tcg_temp_new_i64(); 1820 xbh = tcg_temp_new_i64(); 1821 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true); 1822 1823 tcg_gen_extract_i64(xth, xbh, 48, 15); 1824 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true); 1825 tcg_gen_movi_i64(xtl, 0); 1826 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false); 1827 1828 tcg_temp_free_i64(xbh); 1829 tcg_temp_free_i64(xth); 1830 tcg_temp_free_i64(xtl); 1831} 1832 1833static void gen_xsiexpdp(DisasContext *ctx) 1834{ 1835 TCGv_i64 xth; 1836 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1837 TCGv rb = cpu_gpr[rB(ctx->opcode)]; 1838 TCGv_i64 t0; 1839 1840 if (unlikely(!ctx->vsx_enabled)) { 1841 gen_exception(ctx, POWERPC_EXCP_VSXU); 1842 return; 1843 } 1844 t0 = tcg_temp_new_i64(); 1845 xth = tcg_temp_new_i64(); 1846 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF); 1847 tcg_gen_andi_i64(t0, rb, 0x7FF); 1848 tcg_gen_shli_i64(t0, t0, 52); 1849 tcg_gen_or_i64(xth, xth, t0); 1850 set_cpu_vsr(xT(ctx->opcode), xth, true); 1851 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); 1852 tcg_temp_free_i64(t0); 1853 tcg_temp_free_i64(xth); 1854} 1855 1856static void gen_xsiexpqp(DisasContext *ctx) 1857{ 1858 TCGv_i64 xth; 1859 TCGv_i64 xtl; 1860 TCGv_i64 xah; 1861 TCGv_i64 xal; 1862 TCGv_i64 xbh; 1863 TCGv_i64 t0; 1864 1865 if (unlikely(!ctx->vsx_enabled)) { 1866 gen_exception(ctx, POWERPC_EXCP_VSXU); 1867 return; 1868 } 1869 xth = tcg_temp_new_i64(); 1870 xtl = tcg_temp_new_i64(); 1871 xah = tcg_temp_new_i64(); 1872 xal = tcg_temp_new_i64(); 1873 get_cpu_vsr(xah, rA(ctx->opcode) + 32, true); 1874 get_cpu_vsr(xal, rA(ctx->opcode) + 32, false); 1875 xbh = tcg_temp_new_i64(); 1876 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true); 1877 t0 = tcg_temp_new_i64(); 1878 1879 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF); 1880 tcg_gen_andi_i64(t0, xbh, 0x7FFF); 1881 tcg_gen_shli_i64(t0, t0, 48); 1882 tcg_gen_or_i64(xth, xth, t0); 1883 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true); 1884 tcg_gen_mov_i64(xtl, xal); 1885 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false); 1886 1887 tcg_temp_free_i64(t0); 1888 tcg_temp_free_i64(xth); 1889 tcg_temp_free_i64(xtl); 1890 tcg_temp_free_i64(xah); 1891 tcg_temp_free_i64(xal); 1892 tcg_temp_free_i64(xbh); 1893} 1894 1895static void gen_xsxsigdp(DisasContext *ctx) 1896{ 1897 TCGv rt = cpu_gpr[rD(ctx->opcode)]; 1898 TCGv_i64 t0, t1, zr, nan, exp; 1899 1900 if (unlikely(!ctx->vsx_enabled)) { 1901 gen_exception(ctx, POWERPC_EXCP_VSXU); 1902 return; 1903 } 1904 exp = tcg_temp_new_i64(); 1905 t0 = tcg_temp_new_i64(); 1906 t1 = tcg_temp_new_i64(); 1907 zr = tcg_const_i64(0); 1908 nan = tcg_const_i64(2047); 1909 1910 get_cpu_vsr(t1, xB(ctx->opcode), true); 1911 tcg_gen_extract_i64(exp, t1, 52, 11); 1912 tcg_gen_movi_i64(t0, 0x0010000000000000); 1913 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 1914 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 1915 get_cpu_vsr(t1, xB(ctx->opcode), true); 1916 tcg_gen_deposit_i64(rt, t0, t1, 0, 52); 1917 1918 tcg_temp_free_i64(t0); 1919 tcg_temp_free_i64(t1); 1920 tcg_temp_free_i64(exp); 1921 tcg_temp_free_i64(zr); 1922 tcg_temp_free_i64(nan); 1923} 1924 1925static void gen_xsxsigqp(DisasContext *ctx) 1926{ 1927 TCGv_i64 t0, zr, nan, exp; 1928 TCGv_i64 xth; 1929 TCGv_i64 xtl; 1930 TCGv_i64 xbh; 1931 TCGv_i64 xbl; 1932 1933 if (unlikely(!ctx->vsx_enabled)) { 1934 gen_exception(ctx, POWERPC_EXCP_VSXU); 1935 return; 1936 } 1937 xth = tcg_temp_new_i64(); 1938 xtl = tcg_temp_new_i64(); 1939 xbh = tcg_temp_new_i64(); 1940 xbl = tcg_temp_new_i64(); 1941 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true); 1942 get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false); 1943 exp = tcg_temp_new_i64(); 1944 t0 = tcg_temp_new_i64(); 1945 zr = tcg_const_i64(0); 1946 nan = tcg_const_i64(32767); 1947 1948 tcg_gen_extract_i64(exp, xbh, 48, 15); 1949 tcg_gen_movi_i64(t0, 0x0001000000000000); 1950 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 1951 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 1952 tcg_gen_deposit_i64(xth, t0, xbh, 0, 48); 1953 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true); 1954 tcg_gen_mov_i64(xtl, xbl); 1955 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false); 1956 1957 tcg_temp_free_i64(t0); 1958 tcg_temp_free_i64(exp); 1959 tcg_temp_free_i64(zr); 1960 tcg_temp_free_i64(nan); 1961 tcg_temp_free_i64(xth); 1962 tcg_temp_free_i64(xtl); 1963 tcg_temp_free_i64(xbh); 1964 tcg_temp_free_i64(xbl); 1965} 1966#endif 1967 1968static void gen_xviexpsp(DisasContext *ctx) 1969{ 1970 TCGv_i64 xth; 1971 TCGv_i64 xtl; 1972 TCGv_i64 xah; 1973 TCGv_i64 xal; 1974 TCGv_i64 xbh; 1975 TCGv_i64 xbl; 1976 TCGv_i64 t0; 1977 1978 if (unlikely(!ctx->vsx_enabled)) { 1979 gen_exception(ctx, POWERPC_EXCP_VSXU); 1980 return; 1981 } 1982 xth = tcg_temp_new_i64(); 1983 xtl = tcg_temp_new_i64(); 1984 xah = tcg_temp_new_i64(); 1985 xal = tcg_temp_new_i64(); 1986 xbh = tcg_temp_new_i64(); 1987 xbl = tcg_temp_new_i64(); 1988 get_cpu_vsr(xah, xA(ctx->opcode), true); 1989 get_cpu_vsr(xal, xA(ctx->opcode), false); 1990 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1991 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1992 t0 = tcg_temp_new_i64(); 1993 1994 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF); 1995 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF); 1996 tcg_gen_shli_i64(t0, t0, 23); 1997 tcg_gen_or_i64(xth, xth, t0); 1998 set_cpu_vsr(xT(ctx->opcode), xth, true); 1999 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF); 2000 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF); 2001 tcg_gen_shli_i64(t0, t0, 23); 2002 tcg_gen_or_i64(xtl, xtl, t0); 2003 set_cpu_vsr(xT(ctx->opcode), xtl, false); 2004 2005 tcg_temp_free_i64(t0); 2006 tcg_temp_free_i64(xth); 2007 tcg_temp_free_i64(xtl); 2008 tcg_temp_free_i64(xah); 2009 tcg_temp_free_i64(xal); 2010 tcg_temp_free_i64(xbh); 2011 tcg_temp_free_i64(xbl); 2012} 2013 2014static void gen_xviexpdp(DisasContext *ctx) 2015{ 2016 TCGv_i64 xth; 2017 TCGv_i64 xtl; 2018 TCGv_i64 xah; 2019 TCGv_i64 xal; 2020 TCGv_i64 xbh; 2021 TCGv_i64 xbl; 2022 2023 if (unlikely(!ctx->vsx_enabled)) { 2024 gen_exception(ctx, POWERPC_EXCP_VSXU); 2025 return; 2026 } 2027 xth = tcg_temp_new_i64(); 2028 xtl = tcg_temp_new_i64(); 2029 xah = tcg_temp_new_i64(); 2030 xal = tcg_temp_new_i64(); 2031 xbh = tcg_temp_new_i64(); 2032 xbl = tcg_temp_new_i64(); 2033 get_cpu_vsr(xah, xA(ctx->opcode), true); 2034 get_cpu_vsr(xal, xA(ctx->opcode), false); 2035 get_cpu_vsr(xbh, xB(ctx->opcode), true); 2036 get_cpu_vsr(xbl, xB(ctx->opcode), false); 2037 2038 tcg_gen_deposit_i64(xth, xah, xbh, 52, 11); 2039 set_cpu_vsr(xT(ctx->opcode), xth, true); 2040 2041 tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11); 2042 set_cpu_vsr(xT(ctx->opcode), xtl, false); 2043 2044 tcg_temp_free_i64(xth); 2045 tcg_temp_free_i64(xtl); 2046 tcg_temp_free_i64(xah); 2047 tcg_temp_free_i64(xal); 2048 tcg_temp_free_i64(xbh); 2049 tcg_temp_free_i64(xbl); 2050} 2051 2052static void gen_xvxexpsp(DisasContext *ctx) 2053{ 2054 TCGv_i64 xth; 2055 TCGv_i64 xtl; 2056 TCGv_i64 xbh; 2057 TCGv_i64 xbl; 2058 2059 if (unlikely(!ctx->vsx_enabled)) { 2060 gen_exception(ctx, POWERPC_EXCP_VSXU); 2061 return; 2062 } 2063 xth = tcg_temp_new_i64(); 2064 xtl = tcg_temp_new_i64(); 2065 xbh = tcg_temp_new_i64(); 2066 xbl = tcg_temp_new_i64(); 2067 get_cpu_vsr(xbh, xB(ctx->opcode), true); 2068 get_cpu_vsr(xbl, xB(ctx->opcode), false); 2069 2070 tcg_gen_shri_i64(xth, xbh, 23); 2071 tcg_gen_andi_i64(xth, xth, 0xFF000000FF); 2072 set_cpu_vsr(xT(ctx->opcode), xth, true); 2073 tcg_gen_shri_i64(xtl, xbl, 23); 2074 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF); 2075 set_cpu_vsr(xT(ctx->opcode), xtl, false); 2076 2077 tcg_temp_free_i64(xth); 2078 tcg_temp_free_i64(xtl); 2079 tcg_temp_free_i64(xbh); 2080 tcg_temp_free_i64(xbl); 2081} 2082 2083static void gen_xvxexpdp(DisasContext *ctx) 2084{ 2085 TCGv_i64 xth; 2086 TCGv_i64 xtl; 2087 TCGv_i64 xbh; 2088 TCGv_i64 xbl; 2089 2090 if (unlikely(!ctx->vsx_enabled)) { 2091 gen_exception(ctx, POWERPC_EXCP_VSXU); 2092 return; 2093 } 2094 xth = tcg_temp_new_i64(); 2095 xtl = tcg_temp_new_i64(); 2096 xbh = tcg_temp_new_i64(); 2097 xbl = tcg_temp_new_i64(); 2098 get_cpu_vsr(xbh, xB(ctx->opcode), true); 2099 get_cpu_vsr(xbl, xB(ctx->opcode), false); 2100 2101 tcg_gen_extract_i64(xth, xbh, 52, 11); 2102 set_cpu_vsr(xT(ctx->opcode), xth, true); 2103 tcg_gen_extract_i64(xtl, xbl, 52, 11); 2104 set_cpu_vsr(xT(ctx->opcode), xtl, false); 2105 2106 tcg_temp_free_i64(xth); 2107 tcg_temp_free_i64(xtl); 2108 tcg_temp_free_i64(xbh); 2109 tcg_temp_free_i64(xbl); 2110} 2111 2112GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300) 2113 2114static void gen_xvxsigdp(DisasContext *ctx) 2115{ 2116 TCGv_i64 xth; 2117 TCGv_i64 xtl; 2118 TCGv_i64 xbh; 2119 TCGv_i64 xbl; 2120 TCGv_i64 t0, zr, nan, exp; 2121 2122 if (unlikely(!ctx->vsx_enabled)) { 2123 gen_exception(ctx, POWERPC_EXCP_VSXU); 2124 return; 2125 } 2126 xth = tcg_temp_new_i64(); 2127 xtl = tcg_temp_new_i64(); 2128 xbh = tcg_temp_new_i64(); 2129 xbl = tcg_temp_new_i64(); 2130 get_cpu_vsr(xbh, xB(ctx->opcode), true); 2131 get_cpu_vsr(xbl, xB(ctx->opcode), false); 2132 exp = tcg_temp_new_i64(); 2133 t0 = tcg_temp_new_i64(); 2134 zr = tcg_const_i64(0); 2135 nan = tcg_const_i64(2047); 2136 2137 tcg_gen_extract_i64(exp, xbh, 52, 11); 2138 tcg_gen_movi_i64(t0, 0x0010000000000000); 2139 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 2140 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 2141 tcg_gen_deposit_i64(xth, t0, xbh, 0, 52); 2142 set_cpu_vsr(xT(ctx->opcode), xth, true); 2143 2144 tcg_gen_extract_i64(exp, xbl, 52, 11); 2145 tcg_gen_movi_i64(t0, 0x0010000000000000); 2146 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 2147 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 2148 tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52); 2149 set_cpu_vsr(xT(ctx->opcode), xtl, false); 2150 2151 tcg_temp_free_i64(t0); 2152 tcg_temp_free_i64(exp); 2153 tcg_temp_free_i64(zr); 2154 tcg_temp_free_i64(nan); 2155 tcg_temp_free_i64(xth); 2156 tcg_temp_free_i64(xtl); 2157 tcg_temp_free_i64(xbh); 2158 tcg_temp_free_i64(xbl); 2159} 2160 2161static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ, 2162 int rt, bool store, bool paired) 2163{ 2164 TCGv ea; 2165 TCGv_i64 xt; 2166 MemOp mop; 2167 int rt1, rt2; 2168 2169 xt = tcg_temp_new_i64(); 2170 2171 mop = DEF_MEMOP(MO_UQ); 2172 2173 gen_set_access_type(ctx, ACCESS_INT); 2174 ea = do_ea_calc(ctx, ra, displ); 2175 2176 if (paired && ctx->le_mode) { 2177 rt1 = rt + 1; 2178 rt2 = rt; 2179 } else { 2180 rt1 = rt; 2181 rt2 = rt + 1; 2182 } 2183 2184 if (store) { 2185 get_cpu_vsr(xt, rt1, !ctx->le_mode); 2186 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2187 gen_addr_add(ctx, ea, ea, 8); 2188 get_cpu_vsr(xt, rt1, ctx->le_mode); 2189 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2190 if (paired) { 2191 gen_addr_add(ctx, ea, ea, 8); 2192 get_cpu_vsr(xt, rt2, !ctx->le_mode); 2193 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2194 gen_addr_add(ctx, ea, ea, 8); 2195 get_cpu_vsr(xt, rt2, ctx->le_mode); 2196 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2197 } 2198 } else { 2199 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2200 set_cpu_vsr(rt1, xt, !ctx->le_mode); 2201 gen_addr_add(ctx, ea, ea, 8); 2202 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2203 set_cpu_vsr(rt1, xt, ctx->le_mode); 2204 if (paired) { 2205 gen_addr_add(ctx, ea, ea, 8); 2206 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2207 set_cpu_vsr(rt2, xt, !ctx->le_mode); 2208 gen_addr_add(ctx, ea, ea, 8); 2209 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2210 set_cpu_vsr(rt2, xt, ctx->le_mode); 2211 } 2212 } 2213 2214 tcg_temp_free(ea); 2215 tcg_temp_free_i64(xt); 2216 return true; 2217} 2218 2219static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired) 2220{ 2221 if (paired || a->rt >= 32) { 2222 REQUIRE_VSX(ctx); 2223 } else { 2224 REQUIRE_VECTOR(ctx); 2225 } 2226 2227 return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired); 2228} 2229 2230static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a, 2231 bool store, bool paired) 2232{ 2233 arg_D d; 2234 REQUIRE_VSX(ctx); 2235 2236 if (!resolve_PLS_D(ctx, &d, a)) { 2237 return true; 2238 } 2239 2240 return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired); 2241} 2242 2243static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired) 2244{ 2245 if (paired || a->rt >= 32) { 2246 REQUIRE_VSX(ctx); 2247 } else { 2248 REQUIRE_VECTOR(ctx); 2249 } 2250 2251 return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired); 2252} 2253 2254static bool do_lstxsd(DisasContext *ctx, int rt, int ra, TCGv displ, bool store) 2255{ 2256 TCGv ea; 2257 TCGv_i64 xt; 2258 MemOp mop; 2259 2260 if (store) { 2261 REQUIRE_VECTOR(ctx); 2262 } else { 2263 REQUIRE_VSX(ctx); 2264 } 2265 2266 xt = tcg_temp_new_i64(); 2267 mop = DEF_MEMOP(MO_UQ); 2268 2269 gen_set_access_type(ctx, ACCESS_INT); 2270 ea = do_ea_calc(ctx, ra, displ); 2271 2272 if (store) { 2273 get_cpu_vsr(xt, rt + 32, true); 2274 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2275 } else { 2276 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2277 set_cpu_vsr(rt + 32, xt, true); 2278 set_cpu_vsr(rt + 32, tcg_constant_i64(0), false); 2279 } 2280 2281 tcg_temp_free(ea); 2282 tcg_temp_free_i64(xt); 2283 2284 return true; 2285} 2286 2287static bool do_lstxsd_DS(DisasContext *ctx, arg_D *a, bool store) 2288{ 2289 return do_lstxsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store); 2290} 2291 2292static bool do_plstxsd_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store) 2293{ 2294 arg_D d; 2295 2296 if (!resolve_PLS_D(ctx, &d, a)) { 2297 return true; 2298 } 2299 2300 return do_lstxsd(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store); 2301} 2302 2303static bool do_lstxssp(DisasContext *ctx, int rt, int ra, TCGv displ, bool store) 2304{ 2305 TCGv ea; 2306 TCGv_i64 xt; 2307 2308 REQUIRE_VECTOR(ctx); 2309 2310 xt = tcg_temp_new_i64(); 2311 2312 gen_set_access_type(ctx, ACCESS_INT); 2313 ea = do_ea_calc(ctx, ra, displ); 2314 2315 if (store) { 2316 get_cpu_vsr(xt, rt + 32, true); 2317 gen_qemu_st32fs(ctx, xt, ea); 2318 } else { 2319 gen_qemu_ld32fs(ctx, xt, ea); 2320 set_cpu_vsr(rt + 32, xt, true); 2321 set_cpu_vsr(rt + 32, tcg_constant_i64(0), false); 2322 } 2323 2324 tcg_temp_free(ea); 2325 tcg_temp_free_i64(xt); 2326 2327 return true; 2328} 2329 2330static bool do_lstxssp_DS(DisasContext *ctx, arg_D *a, bool store) 2331{ 2332 return do_lstxssp(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store); 2333} 2334 2335static bool do_plstxssp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store) 2336{ 2337 arg_D d; 2338 2339 if (!resolve_PLS_D(ctx, &d, a)) { 2340 return true; 2341 } 2342 2343 return do_lstxssp(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store); 2344} 2345 2346TRANS_FLAGS2(ISA300, LXSD, do_lstxsd_DS, false) 2347TRANS_FLAGS2(ISA300, STXSD, do_lstxsd_DS, true) 2348TRANS_FLAGS2(ISA300, LXSSP, do_lstxssp_DS, false) 2349TRANS_FLAGS2(ISA300, STXSSP, do_lstxssp_DS, true) 2350TRANS_FLAGS2(ISA300, STXV, do_lstxv_D, true, false) 2351TRANS_FLAGS2(ISA300, LXV, do_lstxv_D, false, false) 2352TRANS_FLAGS2(ISA310, STXVP, do_lstxv_D, true, true) 2353TRANS_FLAGS2(ISA310, LXVP, do_lstxv_D, false, true) 2354TRANS_FLAGS2(ISA300, STXVX, do_lstxv_X, true, false) 2355TRANS_FLAGS2(ISA300, LXVX, do_lstxv_X, false, false) 2356TRANS_FLAGS2(ISA310, STXVPX, do_lstxv_X, true, true) 2357TRANS_FLAGS2(ISA310, LXVPX, do_lstxv_X, false, true) 2358TRANS64_FLAGS2(ISA310, PLXSD, do_plstxsd_PLS_D, false) 2359TRANS64_FLAGS2(ISA310, PSTXSD, do_plstxsd_PLS_D, true) 2360TRANS64_FLAGS2(ISA310, PLXSSP, do_plstxssp_PLS_D, false) 2361TRANS64_FLAGS2(ISA310, PSTXSSP, do_plstxssp_PLS_D, true) 2362TRANS64_FLAGS2(ISA310, PSTXV, do_lstxv_PLS_D, true, false) 2363TRANS64_FLAGS2(ISA310, PLXV, do_lstxv_PLS_D, false, false) 2364TRANS64_FLAGS2(ISA310, PSTXVP, do_lstxv_PLS_D, true, true) 2365TRANS64_FLAGS2(ISA310, PLXVP, do_lstxv_PLS_D, false, true) 2366 2367static bool do_lstrm(DisasContext *ctx, arg_X *a, MemOp mop, bool store) 2368{ 2369 TCGv ea; 2370 TCGv_i64 xt; 2371 2372 REQUIRE_VSX(ctx); 2373 2374 xt = tcg_temp_new_i64(); 2375 2376 gen_set_access_type(ctx, ACCESS_INT); 2377 ea = do_ea_calc(ctx, a->ra , cpu_gpr[a->rb]); 2378 2379 if (store) { 2380 get_cpu_vsr(xt, a->rt, false); 2381 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2382 } else { 2383 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2384 set_cpu_vsr(a->rt, xt, false); 2385 set_cpu_vsr(a->rt, tcg_constant_i64(0), true); 2386 } 2387 2388 tcg_temp_free(ea); 2389 tcg_temp_free_i64(xt); 2390 return true; 2391} 2392 2393TRANS_FLAGS2(ISA310, LXVRBX, do_lstrm, DEF_MEMOP(MO_UB), false) 2394TRANS_FLAGS2(ISA310, LXVRHX, do_lstrm, DEF_MEMOP(MO_UW), false) 2395TRANS_FLAGS2(ISA310, LXVRWX, do_lstrm, DEF_MEMOP(MO_UL), false) 2396TRANS_FLAGS2(ISA310, LXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), false) 2397TRANS_FLAGS2(ISA310, STXVRBX, do_lstrm, DEF_MEMOP(MO_UB), true) 2398TRANS_FLAGS2(ISA310, STXVRHX, do_lstrm, DEF_MEMOP(MO_UW), true) 2399TRANS_FLAGS2(ISA310, STXVRWX, do_lstrm, DEF_MEMOP(MO_UL), true) 2400TRANS_FLAGS2(ISA310, STXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), true) 2401 2402static void gen_xxeval_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b, TCGv_i64 c, 2403 int64_t imm) 2404{ 2405 /* 2406 * Instead of processing imm bit-by-bit, we'll skip the computation of 2407 * conjunctions whose corresponding bit is unset. 2408 */ 2409 int bit; 2410 TCGv_i64 conj, disj; 2411 2412 conj = tcg_temp_new_i64(); 2413 disj = tcg_const_i64(0); 2414 2415 /* Iterate over set bits from the least to the most significant bit */ 2416 while (imm) { 2417 /* 2418 * Get the next bit to be processed with ctz64. Invert the result of 2419 * ctz64 to match the indexing used by PowerISA. 2420 */ 2421 bit = 7 - ctz64(imm); 2422 if (bit & 0x4) { 2423 tcg_gen_mov_i64(conj, a); 2424 } else { 2425 tcg_gen_not_i64(conj, a); 2426 } 2427 if (bit & 0x2) { 2428 tcg_gen_and_i64(conj, conj, b); 2429 } else { 2430 tcg_gen_andc_i64(conj, conj, b); 2431 } 2432 if (bit & 0x1) { 2433 tcg_gen_and_i64(conj, conj, c); 2434 } else { 2435 tcg_gen_andc_i64(conj, conj, c); 2436 } 2437 tcg_gen_or_i64(disj, disj, conj); 2438 2439 /* Unset the least significant bit that is set */ 2440 imm &= imm - 1; 2441 } 2442 2443 tcg_gen_mov_i64(t, disj); 2444 2445 tcg_temp_free_i64(conj); 2446 tcg_temp_free_i64(disj); 2447} 2448 2449static void gen_xxeval_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b, 2450 TCGv_vec c, int64_t imm) 2451{ 2452 /* 2453 * Instead of processing imm bit-by-bit, we'll skip the computation of 2454 * conjunctions whose corresponding bit is unset. 2455 */ 2456 int bit; 2457 TCGv_vec disj, conj; 2458 2459 disj = tcg_const_zeros_vec_matching(t); 2460 conj = tcg_temp_new_vec_matching(t); 2461 2462 /* Iterate over set bits from the least to the most significant bit */ 2463 while (imm) { 2464 /* 2465 * Get the next bit to be processed with ctz64. Invert the result of 2466 * ctz64 to match the indexing used by PowerISA. 2467 */ 2468 bit = 7 - ctz64(imm); 2469 if (bit & 0x4) { 2470 tcg_gen_mov_vec(conj, a); 2471 } else { 2472 tcg_gen_not_vec(vece, conj, a); 2473 } 2474 if (bit & 0x2) { 2475 tcg_gen_and_vec(vece, conj, conj, b); 2476 } else { 2477 tcg_gen_andc_vec(vece, conj, conj, b); 2478 } 2479 if (bit & 0x1) { 2480 tcg_gen_and_vec(vece, conj, conj, c); 2481 } else { 2482 tcg_gen_andc_vec(vece, conj, conj, c); 2483 } 2484 tcg_gen_or_vec(vece, disj, disj, conj); 2485 2486 /* Unset the least significant bit that is set */ 2487 imm &= imm - 1; 2488 } 2489 2490 tcg_gen_mov_vec(t, disj); 2491 2492 tcg_temp_free_vec(disj); 2493 tcg_temp_free_vec(conj); 2494} 2495 2496static bool trans_XXEVAL(DisasContext *ctx, arg_8RR_XX4_imm *a) 2497{ 2498 static const TCGOpcode vecop_list[] = { 2499 INDEX_op_andc_vec, 0 2500 }; 2501 static const GVecGen4i op = { 2502 .fniv = gen_xxeval_vec, 2503 .fno = gen_helper_XXEVAL, 2504 .fni8 = gen_xxeval_i64, 2505 .opt_opc = vecop_list, 2506 .vece = MO_64 2507 }; 2508 int xt = vsr_full_offset(a->xt), xa = vsr_full_offset(a->xa), 2509 xb = vsr_full_offset(a->xb), xc = vsr_full_offset(a->xc); 2510 2511 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 2512 REQUIRE_VSX(ctx); 2513 2514 /* Equivalent functions that can be implemented with a single gen_gvec */ 2515 switch (a->imm) { 2516 case 0b00000000: /* true */ 2517 set_cpu_vsr(a->xt, tcg_constant_i64(0), true); 2518 set_cpu_vsr(a->xt, tcg_constant_i64(0), false); 2519 break; 2520 case 0b00000011: /* and(B,A) */ 2521 tcg_gen_gvec_and(MO_64, xt, xb, xa, 16, 16); 2522 break; 2523 case 0b00000101: /* and(C,A) */ 2524 tcg_gen_gvec_and(MO_64, xt, xc, xa, 16, 16); 2525 break; 2526 case 0b00001111: /* A */ 2527 tcg_gen_gvec_mov(MO_64, xt, xa, 16, 16); 2528 break; 2529 case 0b00010001: /* and(C,B) */ 2530 tcg_gen_gvec_and(MO_64, xt, xc, xb, 16, 16); 2531 break; 2532 case 0b00011011: /* C?B:A */ 2533 tcg_gen_gvec_bitsel(MO_64, xt, xc, xb, xa, 16, 16); 2534 break; 2535 case 0b00011101: /* B?C:A */ 2536 tcg_gen_gvec_bitsel(MO_64, xt, xb, xc, xa, 16, 16); 2537 break; 2538 case 0b00100111: /* C?A:B */ 2539 tcg_gen_gvec_bitsel(MO_64, xt, xc, xa, xb, 16, 16); 2540 break; 2541 case 0b00110011: /* B */ 2542 tcg_gen_gvec_mov(MO_64, xt, xb, 16, 16); 2543 break; 2544 case 0b00110101: /* A?C:B */ 2545 tcg_gen_gvec_bitsel(MO_64, xt, xa, xc, xb, 16, 16); 2546 break; 2547 case 0b00111100: /* xor(B,A) */ 2548 tcg_gen_gvec_xor(MO_64, xt, xb, xa, 16, 16); 2549 break; 2550 case 0b00111111: /* or(B,A) */ 2551 tcg_gen_gvec_or(MO_64, xt, xb, xa, 16, 16); 2552 break; 2553 case 0b01000111: /* B?A:C */ 2554 tcg_gen_gvec_bitsel(MO_64, xt, xb, xa, xc, 16, 16); 2555 break; 2556 case 0b01010011: /* A?B:C */ 2557 tcg_gen_gvec_bitsel(MO_64, xt, xa, xb, xc, 16, 16); 2558 break; 2559 case 0b01010101: /* C */ 2560 tcg_gen_gvec_mov(MO_64, xt, xc, 16, 16); 2561 break; 2562 case 0b01011010: /* xor(C,A) */ 2563 tcg_gen_gvec_xor(MO_64, xt, xc, xa, 16, 16); 2564 break; 2565 case 0b01011111: /* or(C,A) */ 2566 tcg_gen_gvec_or(MO_64, xt, xc, xa, 16, 16); 2567 break; 2568 case 0b01100110: /* xor(C,B) */ 2569 tcg_gen_gvec_xor(MO_64, xt, xc, xb, 16, 16); 2570 break; 2571 case 0b01110111: /* or(C,B) */ 2572 tcg_gen_gvec_or(MO_64, xt, xc, xb, 16, 16); 2573 break; 2574 case 0b10001000: /* nor(C,B) */ 2575 tcg_gen_gvec_nor(MO_64, xt, xc, xb, 16, 16); 2576 break; 2577 case 0b10011001: /* eqv(C,B) */ 2578 tcg_gen_gvec_eqv(MO_64, xt, xc, xb, 16, 16); 2579 break; 2580 case 0b10100000: /* nor(C,A) */ 2581 tcg_gen_gvec_nor(MO_64, xt, xc, xa, 16, 16); 2582 break; 2583 case 0b10100101: /* eqv(C,A) */ 2584 tcg_gen_gvec_eqv(MO_64, xt, xc, xa, 16, 16); 2585 break; 2586 case 0b10101010: /* not(C) */ 2587 tcg_gen_gvec_not(MO_64, xt, xc, 16, 16); 2588 break; 2589 case 0b11000000: /* nor(B,A) */ 2590 tcg_gen_gvec_nor(MO_64, xt, xb, xa, 16, 16); 2591 break; 2592 case 0b11000011: /* eqv(B,A) */ 2593 tcg_gen_gvec_eqv(MO_64, xt, xb, xa, 16, 16); 2594 break; 2595 case 0b11001100: /* not(B) */ 2596 tcg_gen_gvec_not(MO_64, xt, xb, 16, 16); 2597 break; 2598 case 0b11101110: /* nand(C,B) */ 2599 tcg_gen_gvec_nand(MO_64, xt, xc, xb, 16, 16); 2600 break; 2601 case 0b11110000: /* not(A) */ 2602 tcg_gen_gvec_not(MO_64, xt, xa, 16, 16); 2603 break; 2604 case 0b11111010: /* nand(C,A) */ 2605 tcg_gen_gvec_nand(MO_64, xt, xc, xa, 16, 16); 2606 break; 2607 case 0b11111100: /* nand(B,A) */ 2608 tcg_gen_gvec_nand(MO_64, xt, xb, xa, 16, 16); 2609 break; 2610 case 0b11111111: /* true */ 2611 set_cpu_vsr(a->xt, tcg_constant_i64(-1), true); 2612 set_cpu_vsr(a->xt, tcg_constant_i64(-1), false); 2613 break; 2614 default: 2615 /* Fallback to compute all conjunctions/disjunctions */ 2616 tcg_gen_gvec_4i(xt, xa, xb, xc, 16, 16, a->imm, &op); 2617 } 2618 2619 return true; 2620} 2621 2622static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b, 2623 TCGv_vec c) 2624{ 2625 TCGv_vec tmp = tcg_temp_new_vec_matching(c); 2626 tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1); 2627 tcg_gen_bitsel_vec(vece, t, tmp, b, a); 2628 tcg_temp_free_vec(tmp); 2629} 2630 2631static bool do_xxblendv(DisasContext *ctx, arg_8RR_XX4 *a, unsigned vece) 2632{ 2633 static const TCGOpcode vecop_list[] = { 2634 INDEX_op_sari_vec, 0 2635 }; 2636 static const GVecGen4 ops[4] = { 2637 { 2638 .fniv = gen_xxblendv_vec, 2639 .fno = gen_helper_XXBLENDVB, 2640 .opt_opc = vecop_list, 2641 .vece = MO_8 2642 }, 2643 { 2644 .fniv = gen_xxblendv_vec, 2645 .fno = gen_helper_XXBLENDVH, 2646 .opt_opc = vecop_list, 2647 .vece = MO_16 2648 }, 2649 { 2650 .fniv = gen_xxblendv_vec, 2651 .fno = gen_helper_XXBLENDVW, 2652 .opt_opc = vecop_list, 2653 .vece = MO_32 2654 }, 2655 { 2656 .fniv = gen_xxblendv_vec, 2657 .fno = gen_helper_XXBLENDVD, 2658 .opt_opc = vecop_list, 2659 .vece = MO_64 2660 } 2661 }; 2662 2663 REQUIRE_VSX(ctx); 2664 2665 tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa), 2666 vsr_full_offset(a->xb), vsr_full_offset(a->xc), 2667 16, 16, &ops[vece]); 2668 2669 return true; 2670} 2671 2672TRANS(XXBLENDVB, do_xxblendv, MO_8) 2673TRANS(XXBLENDVH, do_xxblendv, MO_16) 2674TRANS(XXBLENDVW, do_xxblendv, MO_32) 2675TRANS(XXBLENDVD, do_xxblendv, MO_64) 2676 2677static bool do_helper_XX3(DisasContext *ctx, arg_XX3 *a, 2678 void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 2679{ 2680 TCGv_ptr xt, xa, xb; 2681 2682 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 2683 REQUIRE_VSX(ctx); 2684 2685 xt = gen_vsr_ptr(a->xt); 2686 xa = gen_vsr_ptr(a->xa); 2687 xb = gen_vsr_ptr(a->xb); 2688 2689 helper(cpu_env, xt, xa, xb); 2690 2691 tcg_temp_free_ptr(xt); 2692 tcg_temp_free_ptr(xa); 2693 tcg_temp_free_ptr(xb); 2694 2695 return true; 2696} 2697 2698TRANS(XSCMPEQDP, do_helper_XX3, gen_helper_XSCMPEQDP) 2699TRANS(XSCMPGEDP, do_helper_XX3, gen_helper_XSCMPGEDP) 2700TRANS(XSCMPGTDP, do_helper_XX3, gen_helper_XSCMPGTDP) 2701TRANS(XSMAXCDP, do_helper_XX3, gen_helper_XSMAXCDP) 2702TRANS(XSMINCDP, do_helper_XX3, gen_helper_XSMINCDP) 2703TRANS(XSMAXJDP, do_helper_XX3, gen_helper_XSMAXJDP) 2704TRANS(XSMINJDP, do_helper_XX3, gen_helper_XSMINJDP) 2705 2706static bool do_helper_X(arg_X *a, 2707 void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 2708{ 2709 TCGv_ptr rt, ra, rb; 2710 2711 rt = gen_avr_ptr(a->rt); 2712 ra = gen_avr_ptr(a->ra); 2713 rb = gen_avr_ptr(a->rb); 2714 2715 helper(cpu_env, rt, ra, rb); 2716 2717 tcg_temp_free_ptr(rt); 2718 tcg_temp_free_ptr(ra); 2719 tcg_temp_free_ptr(rb); 2720 2721 return true; 2722} 2723 2724static bool do_xscmpqp(DisasContext *ctx, arg_X *a, 2725 void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr)) 2726{ 2727 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 2728 REQUIRE_VSX(ctx); 2729 2730 return do_helper_X(a, helper); 2731} 2732 2733TRANS(XSCMPEQQP, do_xscmpqp, gen_helper_XSCMPEQQP) 2734TRANS(XSCMPGEQP, do_xscmpqp, gen_helper_XSCMPGEQP) 2735TRANS(XSCMPGTQP, do_xscmpqp, gen_helper_XSCMPGTQP) 2736TRANS(XSMAXCQP, do_xscmpqp, gen_helper_XSMAXCQP) 2737TRANS(XSMINCQP, do_xscmpqp, gen_helper_XSMINCQP) 2738 2739static bool trans_XVCVSPBF16(DisasContext *ctx, arg_XX2 *a) 2740{ 2741 TCGv_ptr xt, xb; 2742 2743 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 2744 REQUIRE_VSX(ctx); 2745 2746 xt = gen_vsr_ptr(a->xt); 2747 xb = gen_vsr_ptr(a->xb); 2748 2749 gen_helper_XVCVSPBF16(cpu_env, xt, xb); 2750 2751 tcg_temp_free_ptr(xt); 2752 tcg_temp_free_ptr(xb); 2753 2754 return true; 2755} 2756 2757static bool trans_XVCVBF16SPN(DisasContext *ctx, arg_XX2 *a) 2758{ 2759 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 2760 REQUIRE_VSX(ctx); 2761 2762 tcg_gen_gvec_shli(MO_32, vsr_full_offset(a->xt), vsr_full_offset(a->xb), 2763 16, 16, 16); 2764 2765 return true; 2766} 2767 2768#undef GEN_XX2FORM 2769#undef GEN_XX3FORM 2770#undef GEN_XX2IFORM 2771#undef GEN_XX3_RC_FORM 2772#undef GEN_XX3FORM_DM 2773#undef VSX_LOGICAL 2774