1/*** VSX extension ***/ 2 3static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high) 4{ 5 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high)); 6} 7 8static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high) 9{ 10 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high)); 11} 12 13static inline TCGv_ptr gen_vsr_ptr(int reg) 14{ 15 TCGv_ptr r = tcg_temp_new_ptr(); 16 tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg)); 17 return r; 18} 19 20#define VSX_LOAD_SCALAR(name, operation) \ 21static void gen_##name(DisasContext *ctx) \ 22{ \ 23 TCGv EA; \ 24 TCGv_i64 t0; \ 25 if (unlikely(!ctx->vsx_enabled)) { \ 26 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 27 return; \ 28 } \ 29 t0 = tcg_temp_new_i64(); \ 30 gen_set_access_type(ctx, ACCESS_INT); \ 31 EA = tcg_temp_new(); \ 32 gen_addr_reg_index(ctx, EA); \ 33 gen_qemu_##operation(ctx, t0, EA); \ 34 set_cpu_vsr(xT(ctx->opcode), t0, true); \ 35 /* NOTE: cpu_vsrl is undefined */ \ 36 tcg_temp_free(EA); \ 37 tcg_temp_free_i64(t0); \ 38} 39 40VSX_LOAD_SCALAR(lxsdx, ld64_i64) 41VSX_LOAD_SCALAR(lxsiwax, ld32s_i64) 42VSX_LOAD_SCALAR(lxsibzx, ld8u_i64) 43VSX_LOAD_SCALAR(lxsihzx, ld16u_i64) 44VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64) 45VSX_LOAD_SCALAR(lxsspx, ld32fs) 46 47static void gen_lxvd2x(DisasContext *ctx) 48{ 49 TCGv EA; 50 TCGv_i64 t0; 51 if (unlikely(!ctx->vsx_enabled)) { 52 gen_exception(ctx, POWERPC_EXCP_VSXU); 53 return; 54 } 55 t0 = tcg_temp_new_i64(); 56 gen_set_access_type(ctx, ACCESS_INT); 57 EA = tcg_temp_new(); 58 gen_addr_reg_index(ctx, EA); 59 gen_qemu_ld64_i64(ctx, t0, EA); 60 set_cpu_vsr(xT(ctx->opcode), t0, true); 61 tcg_gen_addi_tl(EA, EA, 8); 62 gen_qemu_ld64_i64(ctx, t0, EA); 63 set_cpu_vsr(xT(ctx->opcode), t0, false); 64 tcg_temp_free(EA); 65 tcg_temp_free_i64(t0); 66} 67 68static void gen_lxvw4x(DisasContext *ctx) 69{ 70 TCGv EA; 71 TCGv_i64 xth; 72 TCGv_i64 xtl; 73 if (unlikely(!ctx->vsx_enabled)) { 74 gen_exception(ctx, POWERPC_EXCP_VSXU); 75 return; 76 } 77 xth = tcg_temp_new_i64(); 78 xtl = tcg_temp_new_i64(); 79 80 gen_set_access_type(ctx, ACCESS_INT); 81 EA = tcg_temp_new(); 82 83 gen_addr_reg_index(ctx, EA); 84 if (ctx->le_mode) { 85 TCGv_i64 t0 = tcg_temp_new_i64(); 86 TCGv_i64 t1 = tcg_temp_new_i64(); 87 88 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ); 89 tcg_gen_shri_i64(t1, t0, 32); 90 tcg_gen_deposit_i64(xth, t1, t0, 32, 32); 91 tcg_gen_addi_tl(EA, EA, 8); 92 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ); 93 tcg_gen_shri_i64(t1, t0, 32); 94 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32); 95 tcg_temp_free_i64(t0); 96 tcg_temp_free_i64(t1); 97 } else { 98 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ); 99 tcg_gen_addi_tl(EA, EA, 8); 100 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ); 101 } 102 set_cpu_vsr(xT(ctx->opcode), xth, true); 103 set_cpu_vsr(xT(ctx->opcode), xtl, false); 104 tcg_temp_free(EA); 105 tcg_temp_free_i64(xth); 106 tcg_temp_free_i64(xtl); 107} 108 109static void gen_lxvwsx(DisasContext *ctx) 110{ 111 TCGv EA; 112 TCGv_i32 data; 113 114 if (xT(ctx->opcode) < 32) { 115 if (unlikely(!ctx->vsx_enabled)) { 116 gen_exception(ctx, POWERPC_EXCP_VSXU); 117 return; 118 } 119 } else { 120 if (unlikely(!ctx->altivec_enabled)) { 121 gen_exception(ctx, POWERPC_EXCP_VPU); 122 return; 123 } 124 } 125 126 gen_set_access_type(ctx, ACCESS_INT); 127 EA = tcg_temp_new(); 128 129 gen_addr_reg_index(ctx, EA); 130 131 data = tcg_temp_new_i32(); 132 tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL)); 133 tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data); 134 135 tcg_temp_free(EA); 136 tcg_temp_free_i32(data); 137} 138 139static void gen_lxvdsx(DisasContext *ctx) 140{ 141 TCGv EA; 142 TCGv_i64 data; 143 144 if (unlikely(!ctx->vsx_enabled)) { 145 gen_exception(ctx, POWERPC_EXCP_VSXU); 146 return; 147 } 148 149 gen_set_access_type(ctx, ACCESS_INT); 150 EA = tcg_temp_new(); 151 152 gen_addr_reg_index(ctx, EA); 153 154 data = tcg_temp_new_i64(); 155 tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_Q)); 156 tcg_gen_gvec_dup_i64(MO_Q, vsr_full_offset(xT(ctx->opcode)), 16, 16, data); 157 158 tcg_temp_free(EA); 159 tcg_temp_free_i64(data); 160} 161 162static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl, 163 TCGv_i64 inh, TCGv_i64 inl) 164{ 165 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF); 166 TCGv_i64 t0 = tcg_temp_new_i64(); 167 TCGv_i64 t1 = tcg_temp_new_i64(); 168 169 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */ 170 tcg_gen_and_i64(t0, inh, mask); 171 tcg_gen_shli_i64(t0, t0, 8); 172 tcg_gen_shri_i64(t1, inh, 8); 173 tcg_gen_and_i64(t1, t1, mask); 174 tcg_gen_or_i64(outh, t0, t1); 175 176 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */ 177 tcg_gen_and_i64(t0, inl, mask); 178 tcg_gen_shli_i64(t0, t0, 8); 179 tcg_gen_shri_i64(t1, inl, 8); 180 tcg_gen_and_i64(t1, t1, mask); 181 tcg_gen_or_i64(outl, t0, t1); 182 183 tcg_temp_free_i64(t0); 184 tcg_temp_free_i64(t1); 185 tcg_temp_free_i64(mask); 186} 187 188static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl, 189 TCGv_i64 inh, TCGv_i64 inl) 190{ 191 TCGv_i64 hi = tcg_temp_new_i64(); 192 TCGv_i64 lo = tcg_temp_new_i64(); 193 194 tcg_gen_bswap64_i64(hi, inh); 195 tcg_gen_bswap64_i64(lo, inl); 196 tcg_gen_shri_i64(outh, hi, 32); 197 tcg_gen_deposit_i64(outh, outh, hi, 32, 32); 198 tcg_gen_shri_i64(outl, lo, 32); 199 tcg_gen_deposit_i64(outl, outl, lo, 32, 32); 200 201 tcg_temp_free_i64(hi); 202 tcg_temp_free_i64(lo); 203} 204static void gen_lxvh8x(DisasContext *ctx) 205{ 206 TCGv EA; 207 TCGv_i64 xth; 208 TCGv_i64 xtl; 209 210 if (unlikely(!ctx->vsx_enabled)) { 211 gen_exception(ctx, POWERPC_EXCP_VSXU); 212 return; 213 } 214 xth = tcg_temp_new_i64(); 215 xtl = tcg_temp_new_i64(); 216 gen_set_access_type(ctx, ACCESS_INT); 217 218 EA = tcg_temp_new(); 219 gen_addr_reg_index(ctx, EA); 220 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ); 221 tcg_gen_addi_tl(EA, EA, 8); 222 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ); 223 if (ctx->le_mode) { 224 gen_bswap16x8(xth, xtl, xth, xtl); 225 } 226 set_cpu_vsr(xT(ctx->opcode), xth, true); 227 set_cpu_vsr(xT(ctx->opcode), xtl, false); 228 tcg_temp_free(EA); 229 tcg_temp_free_i64(xth); 230 tcg_temp_free_i64(xtl); 231} 232 233static void gen_lxvb16x(DisasContext *ctx) 234{ 235 TCGv EA; 236 TCGv_i64 xth; 237 TCGv_i64 xtl; 238 239 if (unlikely(!ctx->vsx_enabled)) { 240 gen_exception(ctx, POWERPC_EXCP_VSXU); 241 return; 242 } 243 xth = tcg_temp_new_i64(); 244 xtl = tcg_temp_new_i64(); 245 gen_set_access_type(ctx, ACCESS_INT); 246 EA = tcg_temp_new(); 247 gen_addr_reg_index(ctx, EA); 248 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ); 249 tcg_gen_addi_tl(EA, EA, 8); 250 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ); 251 set_cpu_vsr(xT(ctx->opcode), xth, true); 252 set_cpu_vsr(xT(ctx->opcode), xtl, false); 253 tcg_temp_free(EA); 254 tcg_temp_free_i64(xth); 255 tcg_temp_free_i64(xtl); 256} 257 258#ifdef TARGET_PPC64 259#define VSX_VECTOR_LOAD_STORE_LENGTH(name) \ 260static void gen_##name(DisasContext *ctx) \ 261{ \ 262 TCGv EA; \ 263 TCGv_ptr xt; \ 264 \ 265 if (xT(ctx->opcode) < 32) { \ 266 if (unlikely(!ctx->vsx_enabled)) { \ 267 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 268 return; \ 269 } \ 270 } else { \ 271 if (unlikely(!ctx->altivec_enabled)) { \ 272 gen_exception(ctx, POWERPC_EXCP_VPU); \ 273 return; \ 274 } \ 275 } \ 276 EA = tcg_temp_new(); \ 277 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 278 gen_set_access_type(ctx, ACCESS_INT); \ 279 gen_addr_register(ctx, EA); \ 280 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \ 281 tcg_temp_free(EA); \ 282 tcg_temp_free_ptr(xt); \ 283} 284 285VSX_VECTOR_LOAD_STORE_LENGTH(lxvl) 286VSX_VECTOR_LOAD_STORE_LENGTH(lxvll) 287VSX_VECTOR_LOAD_STORE_LENGTH(stxvl) 288VSX_VECTOR_LOAD_STORE_LENGTH(stxvll) 289#endif 290 291#define VSX_LOAD_SCALAR_DS(name, operation) \ 292static void gen_##name(DisasContext *ctx) \ 293{ \ 294 TCGv EA; \ 295 TCGv_i64 xth; \ 296 \ 297 if (unlikely(!ctx->altivec_enabled)) { \ 298 gen_exception(ctx, POWERPC_EXCP_VPU); \ 299 return; \ 300 } \ 301 xth = tcg_temp_new_i64(); \ 302 gen_set_access_type(ctx, ACCESS_INT); \ 303 EA = tcg_temp_new(); \ 304 gen_addr_imm_index(ctx, EA, 0x03); \ 305 gen_qemu_##operation(ctx, xth, EA); \ 306 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true); \ 307 /* NOTE: cpu_vsrl is undefined */ \ 308 tcg_temp_free(EA); \ 309 tcg_temp_free_i64(xth); \ 310} 311 312VSX_LOAD_SCALAR_DS(lxsd, ld64_i64) 313VSX_LOAD_SCALAR_DS(lxssp, ld32fs) 314 315#define VSX_STORE_SCALAR(name, operation) \ 316static void gen_##name(DisasContext *ctx) \ 317{ \ 318 TCGv EA; \ 319 TCGv_i64 t0; \ 320 if (unlikely(!ctx->vsx_enabled)) { \ 321 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 322 return; \ 323 } \ 324 t0 = tcg_temp_new_i64(); \ 325 gen_set_access_type(ctx, ACCESS_INT); \ 326 EA = tcg_temp_new(); \ 327 gen_addr_reg_index(ctx, EA); \ 328 get_cpu_vsr(t0, xS(ctx->opcode), true); \ 329 gen_qemu_##operation(ctx, t0, EA); \ 330 tcg_temp_free(EA); \ 331 tcg_temp_free_i64(t0); \ 332} 333 334VSX_STORE_SCALAR(stxsdx, st64_i64) 335 336VSX_STORE_SCALAR(stxsibx, st8_i64) 337VSX_STORE_SCALAR(stxsihx, st16_i64) 338VSX_STORE_SCALAR(stxsiwx, st32_i64) 339VSX_STORE_SCALAR(stxsspx, st32fs) 340 341static void gen_stxvd2x(DisasContext *ctx) 342{ 343 TCGv EA; 344 TCGv_i64 t0; 345 if (unlikely(!ctx->vsx_enabled)) { 346 gen_exception(ctx, POWERPC_EXCP_VSXU); 347 return; 348 } 349 t0 = tcg_temp_new_i64(); 350 gen_set_access_type(ctx, ACCESS_INT); 351 EA = tcg_temp_new(); 352 gen_addr_reg_index(ctx, EA); 353 get_cpu_vsr(t0, xS(ctx->opcode), true); 354 gen_qemu_st64_i64(ctx, t0, EA); 355 tcg_gen_addi_tl(EA, EA, 8); 356 get_cpu_vsr(t0, xS(ctx->opcode), false); 357 gen_qemu_st64_i64(ctx, t0, EA); 358 tcg_temp_free(EA); 359 tcg_temp_free_i64(t0); 360} 361 362static void gen_stxvw4x(DisasContext *ctx) 363{ 364 TCGv EA; 365 TCGv_i64 xsh; 366 TCGv_i64 xsl; 367 368 if (unlikely(!ctx->vsx_enabled)) { 369 gen_exception(ctx, POWERPC_EXCP_VSXU); 370 return; 371 } 372 xsh = tcg_temp_new_i64(); 373 xsl = tcg_temp_new_i64(); 374 get_cpu_vsr(xsh, xS(ctx->opcode), true); 375 get_cpu_vsr(xsl, xS(ctx->opcode), false); 376 gen_set_access_type(ctx, ACCESS_INT); 377 EA = tcg_temp_new(); 378 gen_addr_reg_index(ctx, EA); 379 if (ctx->le_mode) { 380 TCGv_i64 t0 = tcg_temp_new_i64(); 381 TCGv_i64 t1 = tcg_temp_new_i64(); 382 383 tcg_gen_shri_i64(t0, xsh, 32); 384 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32); 385 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ); 386 tcg_gen_addi_tl(EA, EA, 8); 387 tcg_gen_shri_i64(t0, xsl, 32); 388 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32); 389 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ); 390 tcg_temp_free_i64(t0); 391 tcg_temp_free_i64(t1); 392 } else { 393 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ); 394 tcg_gen_addi_tl(EA, EA, 8); 395 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ); 396 } 397 tcg_temp_free(EA); 398 tcg_temp_free_i64(xsh); 399 tcg_temp_free_i64(xsl); 400} 401 402static void gen_stxvh8x(DisasContext *ctx) 403{ 404 TCGv EA; 405 TCGv_i64 xsh; 406 TCGv_i64 xsl; 407 408 if (unlikely(!ctx->vsx_enabled)) { 409 gen_exception(ctx, POWERPC_EXCP_VSXU); 410 return; 411 } 412 xsh = tcg_temp_new_i64(); 413 xsl = tcg_temp_new_i64(); 414 get_cpu_vsr(xsh, xS(ctx->opcode), true); 415 get_cpu_vsr(xsl, xS(ctx->opcode), false); 416 gen_set_access_type(ctx, ACCESS_INT); 417 EA = tcg_temp_new(); 418 gen_addr_reg_index(ctx, EA); 419 if (ctx->le_mode) { 420 TCGv_i64 outh = tcg_temp_new_i64(); 421 TCGv_i64 outl = tcg_temp_new_i64(); 422 423 gen_bswap16x8(outh, outl, xsh, xsl); 424 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ); 425 tcg_gen_addi_tl(EA, EA, 8); 426 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ); 427 tcg_temp_free_i64(outh); 428 tcg_temp_free_i64(outl); 429 } else { 430 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ); 431 tcg_gen_addi_tl(EA, EA, 8); 432 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ); 433 } 434 tcg_temp_free(EA); 435 tcg_temp_free_i64(xsh); 436 tcg_temp_free_i64(xsl); 437} 438 439static void gen_stxvb16x(DisasContext *ctx) 440{ 441 TCGv EA; 442 TCGv_i64 xsh; 443 TCGv_i64 xsl; 444 445 if (unlikely(!ctx->vsx_enabled)) { 446 gen_exception(ctx, POWERPC_EXCP_VSXU); 447 return; 448 } 449 xsh = tcg_temp_new_i64(); 450 xsl = tcg_temp_new_i64(); 451 get_cpu_vsr(xsh, xS(ctx->opcode), true); 452 get_cpu_vsr(xsl, xS(ctx->opcode), false); 453 gen_set_access_type(ctx, ACCESS_INT); 454 EA = tcg_temp_new(); 455 gen_addr_reg_index(ctx, EA); 456 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ); 457 tcg_gen_addi_tl(EA, EA, 8); 458 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ); 459 tcg_temp_free(EA); 460 tcg_temp_free_i64(xsh); 461 tcg_temp_free_i64(xsl); 462} 463 464#define VSX_STORE_SCALAR_DS(name, operation) \ 465static void gen_##name(DisasContext *ctx) \ 466{ \ 467 TCGv EA; \ 468 TCGv_i64 xth; \ 469 \ 470 if (unlikely(!ctx->altivec_enabled)) { \ 471 gen_exception(ctx, POWERPC_EXCP_VPU); \ 472 return; \ 473 } \ 474 xth = tcg_temp_new_i64(); \ 475 get_cpu_vsr(xth, rD(ctx->opcode) + 32, true); \ 476 gen_set_access_type(ctx, ACCESS_INT); \ 477 EA = tcg_temp_new(); \ 478 gen_addr_imm_index(ctx, EA, 0x03); \ 479 gen_qemu_##operation(ctx, xth, EA); \ 480 /* NOTE: cpu_vsrl is undefined */ \ 481 tcg_temp_free(EA); \ 482 tcg_temp_free_i64(xth); \ 483} 484 485VSX_STORE_SCALAR_DS(stxsd, st64_i64) 486VSX_STORE_SCALAR_DS(stxssp, st32fs) 487 488static void gen_mfvsrwz(DisasContext *ctx) 489{ 490 if (xS(ctx->opcode) < 32) { 491 if (unlikely(!ctx->fpu_enabled)) { 492 gen_exception(ctx, POWERPC_EXCP_FPU); 493 return; 494 } 495 } else { 496 if (unlikely(!ctx->altivec_enabled)) { 497 gen_exception(ctx, POWERPC_EXCP_VPU); 498 return; 499 } 500 } 501 TCGv_i64 tmp = tcg_temp_new_i64(); 502 TCGv_i64 xsh = tcg_temp_new_i64(); 503 get_cpu_vsr(xsh, xS(ctx->opcode), true); 504 tcg_gen_ext32u_i64(tmp, xsh); 505 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp); 506 tcg_temp_free_i64(tmp); 507 tcg_temp_free_i64(xsh); 508} 509 510static void gen_mtvsrwa(DisasContext *ctx) 511{ 512 if (xS(ctx->opcode) < 32) { 513 if (unlikely(!ctx->fpu_enabled)) { 514 gen_exception(ctx, POWERPC_EXCP_FPU); 515 return; 516 } 517 } else { 518 if (unlikely(!ctx->altivec_enabled)) { 519 gen_exception(ctx, POWERPC_EXCP_VPU); 520 return; 521 } 522 } 523 TCGv_i64 tmp = tcg_temp_new_i64(); 524 TCGv_i64 xsh = tcg_temp_new_i64(); 525 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]); 526 tcg_gen_ext32s_i64(xsh, tmp); 527 set_cpu_vsr(xT(ctx->opcode), xsh, true); 528 tcg_temp_free_i64(tmp); 529 tcg_temp_free_i64(xsh); 530} 531 532static void gen_mtvsrwz(DisasContext *ctx) 533{ 534 if (xS(ctx->opcode) < 32) { 535 if (unlikely(!ctx->fpu_enabled)) { 536 gen_exception(ctx, POWERPC_EXCP_FPU); 537 return; 538 } 539 } else { 540 if (unlikely(!ctx->altivec_enabled)) { 541 gen_exception(ctx, POWERPC_EXCP_VPU); 542 return; 543 } 544 } 545 TCGv_i64 tmp = tcg_temp_new_i64(); 546 TCGv_i64 xsh = tcg_temp_new_i64(); 547 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]); 548 tcg_gen_ext32u_i64(xsh, tmp); 549 set_cpu_vsr(xT(ctx->opcode), xsh, true); 550 tcg_temp_free_i64(tmp); 551 tcg_temp_free_i64(xsh); 552} 553 554#if defined(TARGET_PPC64) 555static void gen_mfvsrd(DisasContext *ctx) 556{ 557 TCGv_i64 t0; 558 if (xS(ctx->opcode) < 32) { 559 if (unlikely(!ctx->fpu_enabled)) { 560 gen_exception(ctx, POWERPC_EXCP_FPU); 561 return; 562 } 563 } else { 564 if (unlikely(!ctx->altivec_enabled)) { 565 gen_exception(ctx, POWERPC_EXCP_VPU); 566 return; 567 } 568 } 569 t0 = tcg_temp_new_i64(); 570 get_cpu_vsr(t0, xS(ctx->opcode), true); 571 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0); 572 tcg_temp_free_i64(t0); 573} 574 575static void gen_mtvsrd(DisasContext *ctx) 576{ 577 TCGv_i64 t0; 578 if (xS(ctx->opcode) < 32) { 579 if (unlikely(!ctx->fpu_enabled)) { 580 gen_exception(ctx, POWERPC_EXCP_FPU); 581 return; 582 } 583 } else { 584 if (unlikely(!ctx->altivec_enabled)) { 585 gen_exception(ctx, POWERPC_EXCP_VPU); 586 return; 587 } 588 } 589 t0 = tcg_temp_new_i64(); 590 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]); 591 set_cpu_vsr(xT(ctx->opcode), t0, true); 592 tcg_temp_free_i64(t0); 593} 594 595static void gen_mfvsrld(DisasContext *ctx) 596{ 597 TCGv_i64 t0; 598 if (xS(ctx->opcode) < 32) { 599 if (unlikely(!ctx->vsx_enabled)) { 600 gen_exception(ctx, POWERPC_EXCP_VSXU); 601 return; 602 } 603 } else { 604 if (unlikely(!ctx->altivec_enabled)) { 605 gen_exception(ctx, POWERPC_EXCP_VPU); 606 return; 607 } 608 } 609 t0 = tcg_temp_new_i64(); 610 get_cpu_vsr(t0, xS(ctx->opcode), false); 611 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0); 612 tcg_temp_free_i64(t0); 613} 614 615static void gen_mtvsrdd(DisasContext *ctx) 616{ 617 TCGv_i64 t0; 618 if (xT(ctx->opcode) < 32) { 619 if (unlikely(!ctx->vsx_enabled)) { 620 gen_exception(ctx, POWERPC_EXCP_VSXU); 621 return; 622 } 623 } else { 624 if (unlikely(!ctx->altivec_enabled)) { 625 gen_exception(ctx, POWERPC_EXCP_VPU); 626 return; 627 } 628 } 629 630 t0 = tcg_temp_new_i64(); 631 if (!rA(ctx->opcode)) { 632 tcg_gen_movi_i64(t0, 0); 633 } else { 634 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]); 635 } 636 set_cpu_vsr(xT(ctx->opcode), t0, true); 637 638 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]); 639 set_cpu_vsr(xT(ctx->opcode), t0, false); 640 tcg_temp_free_i64(t0); 641} 642 643static void gen_mtvsrws(DisasContext *ctx) 644{ 645 TCGv_i64 t0; 646 if (xT(ctx->opcode) < 32) { 647 if (unlikely(!ctx->vsx_enabled)) { 648 gen_exception(ctx, POWERPC_EXCP_VSXU); 649 return; 650 } 651 } else { 652 if (unlikely(!ctx->altivec_enabled)) { 653 gen_exception(ctx, POWERPC_EXCP_VPU); 654 return; 655 } 656 } 657 658 t0 = tcg_temp_new_i64(); 659 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)], 660 cpu_gpr[rA(ctx->opcode)], 32, 32); 661 set_cpu_vsr(xT(ctx->opcode), t0, false); 662 set_cpu_vsr(xT(ctx->opcode), t0, true); 663 tcg_temp_free_i64(t0); 664} 665 666#endif 667 668static void gen_xxpermdi(DisasContext *ctx) 669{ 670 TCGv_i64 xh, xl; 671 672 if (unlikely(!ctx->vsx_enabled)) { 673 gen_exception(ctx, POWERPC_EXCP_VSXU); 674 return; 675 } 676 677 xh = tcg_temp_new_i64(); 678 xl = tcg_temp_new_i64(); 679 680 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) || 681 (xT(ctx->opcode) == xB(ctx->opcode)))) { 682 get_cpu_vsr(xh, xA(ctx->opcode), (DM(ctx->opcode) & 2) == 0); 683 get_cpu_vsr(xl, xB(ctx->opcode), (DM(ctx->opcode) & 1) == 0); 684 685 set_cpu_vsr(xT(ctx->opcode), xh, true); 686 set_cpu_vsr(xT(ctx->opcode), xl, false); 687 } else { 688 if ((DM(ctx->opcode) & 2) == 0) { 689 get_cpu_vsr(xh, xA(ctx->opcode), true); 690 set_cpu_vsr(xT(ctx->opcode), xh, true); 691 } else { 692 get_cpu_vsr(xh, xA(ctx->opcode), false); 693 set_cpu_vsr(xT(ctx->opcode), xh, true); 694 } 695 if ((DM(ctx->opcode) & 1) == 0) { 696 get_cpu_vsr(xl, xB(ctx->opcode), true); 697 set_cpu_vsr(xT(ctx->opcode), xl, false); 698 } else { 699 get_cpu_vsr(xl, xB(ctx->opcode), false); 700 set_cpu_vsr(xT(ctx->opcode), xl, false); 701 } 702 } 703 tcg_temp_free_i64(xh); 704 tcg_temp_free_i64(xl); 705} 706 707#define OP_ABS 1 708#define OP_NABS 2 709#define OP_NEG 3 710#define OP_CPSGN 4 711#define SGN_MASK_DP 0x8000000000000000ull 712#define SGN_MASK_SP 0x8000000080000000ull 713 714#define VSX_SCALAR_MOVE(name, op, sgn_mask) \ 715static void glue(gen_, name)(DisasContext *ctx) \ 716 { \ 717 TCGv_i64 xb, sgm; \ 718 if (unlikely(!ctx->vsx_enabled)) { \ 719 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 720 return; \ 721 } \ 722 xb = tcg_temp_new_i64(); \ 723 sgm = tcg_temp_new_i64(); \ 724 get_cpu_vsr(xb, xB(ctx->opcode), true); \ 725 tcg_gen_movi_i64(sgm, sgn_mask); \ 726 switch (op) { \ 727 case OP_ABS: { \ 728 tcg_gen_andc_i64(xb, xb, sgm); \ 729 break; \ 730 } \ 731 case OP_NABS: { \ 732 tcg_gen_or_i64(xb, xb, sgm); \ 733 break; \ 734 } \ 735 case OP_NEG: { \ 736 tcg_gen_xor_i64(xb, xb, sgm); \ 737 break; \ 738 } \ 739 case OP_CPSGN: { \ 740 TCGv_i64 xa = tcg_temp_new_i64(); \ 741 get_cpu_vsr(xa, xA(ctx->opcode), true); \ 742 tcg_gen_and_i64(xa, xa, sgm); \ 743 tcg_gen_andc_i64(xb, xb, sgm); \ 744 tcg_gen_or_i64(xb, xb, xa); \ 745 tcg_temp_free_i64(xa); \ 746 break; \ 747 } \ 748 } \ 749 set_cpu_vsr(xT(ctx->opcode), xb, true); \ 750 tcg_temp_free_i64(xb); \ 751 tcg_temp_free_i64(sgm); \ 752 } 753 754VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP) 755VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP) 756VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP) 757VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP) 758 759#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \ 760static void glue(gen_, name)(DisasContext *ctx) \ 761{ \ 762 int xa; \ 763 int xt = rD(ctx->opcode) + 32; \ 764 int xb = rB(ctx->opcode) + 32; \ 765 TCGv_i64 xah, xbh, xbl, sgm, tmp; \ 766 \ 767 if (unlikely(!ctx->vsx_enabled)) { \ 768 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 769 return; \ 770 } \ 771 xbh = tcg_temp_new_i64(); \ 772 xbl = tcg_temp_new_i64(); \ 773 sgm = tcg_temp_new_i64(); \ 774 tmp = tcg_temp_new_i64(); \ 775 get_cpu_vsr(xbh, xb, true); \ 776 get_cpu_vsr(xbl, xb, false); \ 777 tcg_gen_movi_i64(sgm, sgn_mask); \ 778 switch (op) { \ 779 case OP_ABS: \ 780 tcg_gen_andc_i64(xbh, xbh, sgm); \ 781 break; \ 782 case OP_NABS: \ 783 tcg_gen_or_i64(xbh, xbh, sgm); \ 784 break; \ 785 case OP_NEG: \ 786 tcg_gen_xor_i64(xbh, xbh, sgm); \ 787 break; \ 788 case OP_CPSGN: \ 789 xah = tcg_temp_new_i64(); \ 790 xa = rA(ctx->opcode) + 32; \ 791 get_cpu_vsr(tmp, xa, true); \ 792 tcg_gen_and_i64(xah, tmp, sgm); \ 793 tcg_gen_andc_i64(xbh, xbh, sgm); \ 794 tcg_gen_or_i64(xbh, xbh, xah); \ 795 tcg_temp_free_i64(xah); \ 796 break; \ 797 } \ 798 set_cpu_vsr(xt, xbh, true); \ 799 set_cpu_vsr(xt, xbl, false); \ 800 tcg_temp_free_i64(xbl); \ 801 tcg_temp_free_i64(xbh); \ 802 tcg_temp_free_i64(sgm); \ 803 tcg_temp_free_i64(tmp); \ 804} 805 806VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP) 807VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP) 808VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP) 809VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP) 810 811#define VSX_VECTOR_MOVE(name, op, sgn_mask) \ 812static void glue(gen_, name)(DisasContext *ctx) \ 813 { \ 814 TCGv_i64 xbh, xbl, sgm; \ 815 if (unlikely(!ctx->vsx_enabled)) { \ 816 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 817 return; \ 818 } \ 819 xbh = tcg_temp_new_i64(); \ 820 xbl = tcg_temp_new_i64(); \ 821 sgm = tcg_temp_new_i64(); \ 822 get_cpu_vsr(xbh, xB(ctx->opcode), true); \ 823 get_cpu_vsr(xbl, xB(ctx->opcode), false); \ 824 tcg_gen_movi_i64(sgm, sgn_mask); \ 825 switch (op) { \ 826 case OP_ABS: { \ 827 tcg_gen_andc_i64(xbh, xbh, sgm); \ 828 tcg_gen_andc_i64(xbl, xbl, sgm); \ 829 break; \ 830 } \ 831 case OP_NABS: { \ 832 tcg_gen_or_i64(xbh, xbh, sgm); \ 833 tcg_gen_or_i64(xbl, xbl, sgm); \ 834 break; \ 835 } \ 836 case OP_NEG: { \ 837 tcg_gen_xor_i64(xbh, xbh, sgm); \ 838 tcg_gen_xor_i64(xbl, xbl, sgm); \ 839 break; \ 840 } \ 841 case OP_CPSGN: { \ 842 TCGv_i64 xah = tcg_temp_new_i64(); \ 843 TCGv_i64 xal = tcg_temp_new_i64(); \ 844 get_cpu_vsr(xah, xA(ctx->opcode), true); \ 845 get_cpu_vsr(xal, xA(ctx->opcode), false); \ 846 tcg_gen_and_i64(xah, xah, sgm); \ 847 tcg_gen_and_i64(xal, xal, sgm); \ 848 tcg_gen_andc_i64(xbh, xbh, sgm); \ 849 tcg_gen_andc_i64(xbl, xbl, sgm); \ 850 tcg_gen_or_i64(xbh, xbh, xah); \ 851 tcg_gen_or_i64(xbl, xbl, xal); \ 852 tcg_temp_free_i64(xah); \ 853 tcg_temp_free_i64(xal); \ 854 break; \ 855 } \ 856 } \ 857 set_cpu_vsr(xT(ctx->opcode), xbh, true); \ 858 set_cpu_vsr(xT(ctx->opcode), xbl, false); \ 859 tcg_temp_free_i64(xbh); \ 860 tcg_temp_free_i64(xbl); \ 861 tcg_temp_free_i64(sgm); \ 862 } 863 864VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP) 865VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP) 866VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP) 867VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP) 868VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP) 869VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP) 870VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP) 871VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP) 872 873#define VSX_CMP(name, op1, op2, inval, type) \ 874static void gen_##name(DisasContext *ctx) \ 875{ \ 876 TCGv_i32 ignored; \ 877 TCGv_ptr xt, xa, xb; \ 878 if (unlikely(!ctx->vsx_enabled)) { \ 879 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 880 return; \ 881 } \ 882 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 883 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 884 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 885 if ((ctx->opcode >> (31 - 21)) & 1) { \ 886 gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb); \ 887 } else { \ 888 ignored = tcg_temp_new_i32(); \ 889 gen_helper_##name(ignored, cpu_env, xt, xa, xb); \ 890 tcg_temp_free_i32(ignored); \ 891 } \ 892 gen_helper_float_check_status(cpu_env); \ 893 tcg_temp_free_ptr(xt); \ 894 tcg_temp_free_ptr(xa); \ 895 tcg_temp_free_ptr(xb); \ 896} 897 898VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX) 899VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX) 900VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX) 901VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300) 902VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX) 903VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX) 904VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX) 905VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX) 906 907static void gen_xscvqpdp(DisasContext *ctx) 908{ 909 TCGv_i32 opc; 910 TCGv_ptr xt, xb; 911 if (unlikely(!ctx->vsx_enabled)) { 912 gen_exception(ctx, POWERPC_EXCP_VSXU); 913 return; 914 } 915 opc = tcg_const_i32(ctx->opcode); 916 xt = gen_vsr_ptr(xT(ctx->opcode)); 917 xb = gen_vsr_ptr(xB(ctx->opcode)); 918 gen_helper_xscvqpdp(cpu_env, opc, xt, xb); 919 tcg_temp_free_i32(opc); 920 tcg_temp_free_ptr(xt); 921 tcg_temp_free_ptr(xb); 922} 923 924#define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \ 925static void gen_##name(DisasContext *ctx) \ 926{ \ 927 TCGv_i32 opc; \ 928 if (unlikely(!ctx->vsx_enabled)) { \ 929 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 930 return; \ 931 } \ 932 opc = tcg_const_i32(ctx->opcode); \ 933 gen_helper_##name(cpu_env, opc); \ 934 tcg_temp_free_i32(opc); \ 935} 936 937#define GEN_VSX_HELPER_X3(name, op1, op2, inval, type) \ 938static void gen_##name(DisasContext *ctx) \ 939{ \ 940 TCGv_ptr xt, xa, xb; \ 941 if (unlikely(!ctx->vsx_enabled)) { \ 942 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 943 return; \ 944 } \ 945 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 946 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 947 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 948 gen_helper_##name(cpu_env, xt, xa, xb); \ 949 tcg_temp_free_ptr(xt); \ 950 tcg_temp_free_ptr(xa); \ 951 tcg_temp_free_ptr(xb); \ 952} 953 954#define GEN_VSX_HELPER_X2(name, op1, op2, inval, type) \ 955static void gen_##name(DisasContext *ctx) \ 956{ \ 957 TCGv_ptr xt, xb; \ 958 if (unlikely(!ctx->vsx_enabled)) { \ 959 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 960 return; \ 961 } \ 962 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 963 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 964 gen_helper_##name(cpu_env, xt, xb); \ 965 tcg_temp_free_ptr(xt); \ 966 tcg_temp_free_ptr(xb); \ 967} 968 969#define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type) \ 970static void gen_##name(DisasContext *ctx) \ 971{ \ 972 TCGv_i32 opc; \ 973 TCGv_ptr xa, xb; \ 974 if (unlikely(!ctx->vsx_enabled)) { \ 975 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 976 return; \ 977 } \ 978 opc = tcg_const_i32(ctx->opcode); \ 979 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 980 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 981 gen_helper_##name(cpu_env, opc, xa, xb); \ 982 tcg_temp_free_i32(opc); \ 983 tcg_temp_free_ptr(xa); \ 984 tcg_temp_free_ptr(xb); \ 985} 986 987#define GEN_VSX_HELPER_X1(name, op1, op2, inval, type) \ 988static void gen_##name(DisasContext *ctx) \ 989{ \ 990 TCGv_i32 opc; \ 991 TCGv_ptr xb; \ 992 if (unlikely(!ctx->vsx_enabled)) { \ 993 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 994 return; \ 995 } \ 996 opc = tcg_const_i32(ctx->opcode); \ 997 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 998 gen_helper_##name(cpu_env, opc, xb); \ 999 tcg_temp_free_i32(opc); \ 1000 tcg_temp_free_ptr(xb); \ 1001} 1002 1003#define GEN_VSX_HELPER_R3(name, op1, op2, inval, type) \ 1004static void gen_##name(DisasContext *ctx) \ 1005{ \ 1006 TCGv_i32 opc; \ 1007 TCGv_ptr xt, xa, xb; \ 1008 if (unlikely(!ctx->vsx_enabled)) { \ 1009 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1010 return; \ 1011 } \ 1012 opc = tcg_const_i32(ctx->opcode); \ 1013 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \ 1014 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \ 1015 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \ 1016 gen_helper_##name(cpu_env, opc, xt, xa, xb); \ 1017 tcg_temp_free_i32(opc); \ 1018 tcg_temp_free_ptr(xt); \ 1019 tcg_temp_free_ptr(xa); \ 1020 tcg_temp_free_ptr(xb); \ 1021} 1022 1023#define GEN_VSX_HELPER_R2(name, op1, op2, inval, type) \ 1024static void gen_##name(DisasContext *ctx) \ 1025{ \ 1026 TCGv_i32 opc; \ 1027 TCGv_ptr xt, xb; \ 1028 if (unlikely(!ctx->vsx_enabled)) { \ 1029 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1030 return; \ 1031 } \ 1032 opc = tcg_const_i32(ctx->opcode); \ 1033 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \ 1034 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \ 1035 gen_helper_##name(cpu_env, opc, xt, xb); \ 1036 tcg_temp_free_i32(opc); \ 1037 tcg_temp_free_ptr(xt); \ 1038 tcg_temp_free_ptr(xb); \ 1039} 1040 1041#define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type) \ 1042static void gen_##name(DisasContext *ctx) \ 1043{ \ 1044 TCGv_i32 opc; \ 1045 TCGv_ptr xa, xb; \ 1046 if (unlikely(!ctx->vsx_enabled)) { \ 1047 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1048 return; \ 1049 } \ 1050 opc = tcg_const_i32(ctx->opcode); \ 1051 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \ 1052 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \ 1053 gen_helper_##name(cpu_env, opc, xa, xb); \ 1054 tcg_temp_free_i32(opc); \ 1055 tcg_temp_free_ptr(xa); \ 1056 tcg_temp_free_ptr(xb); \ 1057} 1058 1059#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \ 1060static void gen_##name(DisasContext *ctx) \ 1061{ \ 1062 TCGv_i64 t0; \ 1063 TCGv_i64 t1; \ 1064 if (unlikely(!ctx->vsx_enabled)) { \ 1065 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1066 return; \ 1067 } \ 1068 t0 = tcg_temp_new_i64(); \ 1069 t1 = tcg_temp_new_i64(); \ 1070 get_cpu_vsr(t0, xB(ctx->opcode), true); \ 1071 gen_helper_##name(t1, cpu_env, t0); \ 1072 set_cpu_vsr(xT(ctx->opcode), t1, true); \ 1073 tcg_temp_free_i64(t0); \ 1074 tcg_temp_free_i64(t1); \ 1075} 1076 1077GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX) 1078GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300) 1079GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX) 1080GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX) 1081GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300) 1082GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX) 1083GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300) 1084GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX) 1085GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX) 1086GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX) 1087GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX) 1088GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX) 1089GEN_VSX_HELPER_X3(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300) 1090GEN_VSX_HELPER_X3(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300) 1091GEN_VSX_HELPER_X3(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300) 1092GEN_VSX_HELPER_X3(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300) 1093GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300) 1094GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300) 1095GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX) 1096GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX) 1097GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX) 1098GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX) 1099GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX) 1100GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX) 1101GEN_VSX_HELPER_R3(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300) 1102GEN_VSX_HELPER_R3(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300) 1103GEN_VSX_HELPER_R3(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300) 1104GEN_VSX_HELPER_R3(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300) 1105GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300) 1106GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX) 1107GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300) 1108GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207) 1109GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300) 1110GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300) 1111GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300) 1112GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300) 1113GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300) 1114GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300) 1115GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX) 1116GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207) 1117GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX) 1118GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX) 1119GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX) 1120GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX) 1121GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX) 1122GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300) 1123GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX) 1124GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX) 1125GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX) 1126GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX) 1127GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX) 1128GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX) 1129GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207) 1130GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300) 1131GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300) 1132GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300) 1133GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300) 1134GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207) 1135GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207) 1136GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207) 1137GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207) 1138GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207) 1139GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207) 1140GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207) 1141GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207) 1142GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207) 1143GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300) 1144GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300) 1145GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300) 1146 1147GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX) 1148GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX) 1149GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX) 1150GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX) 1151GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX) 1152GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX) 1153GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX) 1154GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX) 1155GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX) 1156GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX) 1157GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX) 1158GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX) 1159GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX) 1160GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX) 1161GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX) 1162GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX) 1163GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX) 1164GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX) 1165GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX) 1166GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX) 1167GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX) 1168GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX) 1169GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX) 1170GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX) 1171GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX) 1172 1173GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX) 1174GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX) 1175GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX) 1176GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX) 1177GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX) 1178GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX) 1179GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX) 1180GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX) 1181GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX) 1182GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX) 1183GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX) 1184GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX) 1185GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300) 1186GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300) 1187GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX) 1188GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX) 1189GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX) 1190GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX) 1191GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX) 1192GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX) 1193GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX) 1194GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX) 1195GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX) 1196GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX) 1197GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX) 1198GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX) 1199GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX) 1200GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX) 1201GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX) 1202GEN_VSX_HELPER_X3(xxperm, 0x08, 0x03, 0, PPC2_ISA300) 1203GEN_VSX_HELPER_X3(xxpermr, 0x08, 0x07, 0, PPC2_ISA300) 1204 1205#define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type) \ 1206static void gen_##name(DisasContext *ctx) \ 1207{ \ 1208 TCGv_ptr xt, xa, b, c; \ 1209 if (unlikely(!ctx->vsx_enabled)) { \ 1210 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1211 return; \ 1212 } \ 1213 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 1214 xa = gen_vsr_ptr(xA(ctx->opcode)); \ 1215 if (ctx->opcode & PPC_BIT32(25)) { \ 1216 /* \ 1217 * AxT + B \ 1218 */ \ 1219 b = gen_vsr_ptr(xT(ctx->opcode)); \ 1220 c = gen_vsr_ptr(xB(ctx->opcode)); \ 1221 } else { \ 1222 /* \ 1223 * AxB + T \ 1224 */ \ 1225 b = gen_vsr_ptr(xB(ctx->opcode)); \ 1226 c = gen_vsr_ptr(xT(ctx->opcode)); \ 1227 } \ 1228 gen_helper_##name(cpu_env, xt, xa, b, c); \ 1229 tcg_temp_free_ptr(xt); \ 1230 tcg_temp_free_ptr(xa); \ 1231 tcg_temp_free_ptr(b); \ 1232 tcg_temp_free_ptr(c); \ 1233} 1234 1235GEN_VSX_HELPER_VSX_MADD(xsmadddp, 0x04, 0x04, 0x05, 0, PPC2_VSX) 1236GEN_VSX_HELPER_VSX_MADD(xsmsubdp, 0x04, 0x06, 0x07, 0, PPC2_VSX) 1237GEN_VSX_HELPER_VSX_MADD(xsnmadddp, 0x04, 0x14, 0x15, 0, PPC2_VSX) 1238GEN_VSX_HELPER_VSX_MADD(xsnmsubdp, 0x04, 0x16, 0x17, 0, PPC2_VSX) 1239GEN_VSX_HELPER_VSX_MADD(xsmaddsp, 0x04, 0x00, 0x01, 0, PPC2_VSX207) 1240GEN_VSX_HELPER_VSX_MADD(xsmsubsp, 0x04, 0x02, 0x03, 0, PPC2_VSX207) 1241GEN_VSX_HELPER_VSX_MADD(xsnmaddsp, 0x04, 0x10, 0x11, 0, PPC2_VSX207) 1242GEN_VSX_HELPER_VSX_MADD(xsnmsubsp, 0x04, 0x12, 0x13, 0, PPC2_VSX207) 1243GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX) 1244GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX) 1245GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX) 1246GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX) 1247GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX) 1248GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX) 1249GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX) 1250GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX) 1251 1252static void gen_xxbrd(DisasContext *ctx) 1253{ 1254 TCGv_i64 xth; 1255 TCGv_i64 xtl; 1256 TCGv_i64 xbh; 1257 TCGv_i64 xbl; 1258 1259 if (unlikely(!ctx->vsx_enabled)) { 1260 gen_exception(ctx, POWERPC_EXCP_VSXU); 1261 return; 1262 } 1263 xth = tcg_temp_new_i64(); 1264 xtl = tcg_temp_new_i64(); 1265 xbh = tcg_temp_new_i64(); 1266 xbl = tcg_temp_new_i64(); 1267 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1268 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1269 1270 tcg_gen_bswap64_i64(xth, xbh); 1271 tcg_gen_bswap64_i64(xtl, xbl); 1272 set_cpu_vsr(xT(ctx->opcode), xth, true); 1273 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1274 1275 tcg_temp_free_i64(xth); 1276 tcg_temp_free_i64(xtl); 1277 tcg_temp_free_i64(xbh); 1278 tcg_temp_free_i64(xbl); 1279} 1280 1281static void gen_xxbrh(DisasContext *ctx) 1282{ 1283 TCGv_i64 xth; 1284 TCGv_i64 xtl; 1285 TCGv_i64 xbh; 1286 TCGv_i64 xbl; 1287 1288 if (unlikely(!ctx->vsx_enabled)) { 1289 gen_exception(ctx, POWERPC_EXCP_VSXU); 1290 return; 1291 } 1292 xth = tcg_temp_new_i64(); 1293 xtl = tcg_temp_new_i64(); 1294 xbh = tcg_temp_new_i64(); 1295 xbl = tcg_temp_new_i64(); 1296 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1297 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1298 1299 gen_bswap16x8(xth, xtl, xbh, xbl); 1300 set_cpu_vsr(xT(ctx->opcode), xth, true); 1301 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1302 1303 tcg_temp_free_i64(xth); 1304 tcg_temp_free_i64(xtl); 1305 tcg_temp_free_i64(xbh); 1306 tcg_temp_free_i64(xbl); 1307} 1308 1309static void gen_xxbrq(DisasContext *ctx) 1310{ 1311 TCGv_i64 xth; 1312 TCGv_i64 xtl; 1313 TCGv_i64 xbh; 1314 TCGv_i64 xbl; 1315 TCGv_i64 t0; 1316 1317 if (unlikely(!ctx->vsx_enabled)) { 1318 gen_exception(ctx, POWERPC_EXCP_VSXU); 1319 return; 1320 } 1321 xth = tcg_temp_new_i64(); 1322 xtl = tcg_temp_new_i64(); 1323 xbh = tcg_temp_new_i64(); 1324 xbl = tcg_temp_new_i64(); 1325 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1326 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1327 t0 = tcg_temp_new_i64(); 1328 1329 tcg_gen_bswap64_i64(t0, xbl); 1330 tcg_gen_bswap64_i64(xtl, xbh); 1331 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1332 tcg_gen_mov_i64(xth, t0); 1333 set_cpu_vsr(xT(ctx->opcode), xth, true); 1334 1335 tcg_temp_free_i64(t0); 1336 tcg_temp_free_i64(xth); 1337 tcg_temp_free_i64(xtl); 1338 tcg_temp_free_i64(xbh); 1339 tcg_temp_free_i64(xbl); 1340} 1341 1342static void gen_xxbrw(DisasContext *ctx) 1343{ 1344 TCGv_i64 xth; 1345 TCGv_i64 xtl; 1346 TCGv_i64 xbh; 1347 TCGv_i64 xbl; 1348 1349 if (unlikely(!ctx->vsx_enabled)) { 1350 gen_exception(ctx, POWERPC_EXCP_VSXU); 1351 return; 1352 } 1353 xth = tcg_temp_new_i64(); 1354 xtl = tcg_temp_new_i64(); 1355 xbh = tcg_temp_new_i64(); 1356 xbl = tcg_temp_new_i64(); 1357 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1358 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1359 1360 gen_bswap32x4(xth, xtl, xbh, xbl); 1361 set_cpu_vsr(xT(ctx->opcode), xth, true); 1362 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1363 1364 tcg_temp_free_i64(xth); 1365 tcg_temp_free_i64(xtl); 1366 tcg_temp_free_i64(xbh); 1367 tcg_temp_free_i64(xbl); 1368} 1369 1370#define VSX_LOGICAL(name, vece, tcg_op) \ 1371static void glue(gen_, name)(DisasContext *ctx) \ 1372 { \ 1373 if (unlikely(!ctx->vsx_enabled)) { \ 1374 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1375 return; \ 1376 } \ 1377 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \ 1378 vsr_full_offset(xA(ctx->opcode)), \ 1379 vsr_full_offset(xB(ctx->opcode)), 16, 16); \ 1380 } 1381 1382VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and) 1383VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc) 1384VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or) 1385VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor) 1386VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor) 1387VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv) 1388VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand) 1389VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc) 1390 1391#define VSX_XXMRG(name, high) \ 1392static void glue(gen_, name)(DisasContext *ctx) \ 1393 { \ 1394 TCGv_i64 a0, a1, b0, b1, tmp; \ 1395 if (unlikely(!ctx->vsx_enabled)) { \ 1396 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1397 return; \ 1398 } \ 1399 a0 = tcg_temp_new_i64(); \ 1400 a1 = tcg_temp_new_i64(); \ 1401 b0 = tcg_temp_new_i64(); \ 1402 b1 = tcg_temp_new_i64(); \ 1403 tmp = tcg_temp_new_i64(); \ 1404 get_cpu_vsr(a0, xA(ctx->opcode), high); \ 1405 get_cpu_vsr(a1, xA(ctx->opcode), high); \ 1406 get_cpu_vsr(b0, xB(ctx->opcode), high); \ 1407 get_cpu_vsr(b1, xB(ctx->opcode), high); \ 1408 tcg_gen_shri_i64(a0, a0, 32); \ 1409 tcg_gen_shri_i64(b0, b0, 32); \ 1410 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \ 1411 set_cpu_vsr(xT(ctx->opcode), tmp, true); \ 1412 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \ 1413 set_cpu_vsr(xT(ctx->opcode), tmp, false); \ 1414 tcg_temp_free_i64(a0); \ 1415 tcg_temp_free_i64(a1); \ 1416 tcg_temp_free_i64(b0); \ 1417 tcg_temp_free_i64(b1); \ 1418 tcg_temp_free_i64(tmp); \ 1419 } 1420 1421VSX_XXMRG(xxmrghw, 1) 1422VSX_XXMRG(xxmrglw, 0) 1423 1424static void gen_xxsel(DisasContext *ctx) 1425{ 1426 int rt = xT(ctx->opcode); 1427 int ra = xA(ctx->opcode); 1428 int rb = xB(ctx->opcode); 1429 int rc = xC(ctx->opcode); 1430 1431 if (unlikely(!ctx->vsx_enabled)) { 1432 gen_exception(ctx, POWERPC_EXCP_VSXU); 1433 return; 1434 } 1435 tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(rt), vsr_full_offset(rc), 1436 vsr_full_offset(rb), vsr_full_offset(ra), 16, 16); 1437} 1438 1439static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2 *a) 1440{ 1441 int tofs, bofs; 1442 1443 REQUIRE_VSX(ctx); 1444 1445 tofs = vsr_full_offset(a->xt); 1446 bofs = vsr_full_offset(a->xb); 1447 bofs += a->uim << MO_32; 1448#ifndef HOST_WORDS_BIG_ENDIAN 1449 bofs ^= 8 | 4; 1450#endif 1451 1452 tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16); 1453 return true; 1454} 1455 1456#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff)) 1457 1458static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a) 1459{ 1460 if (a->xt < 32) { 1461 REQUIRE_VSX(ctx); 1462 } else { 1463 REQUIRE_VECTOR(ctx); 1464 } 1465 tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm); 1466 return true; 1467} 1468 1469static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a) 1470{ 1471 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1472 REQUIRE_VSX(ctx); 1473 1474 tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si); 1475 1476 return true; 1477} 1478 1479static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a) 1480{ 1481 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1482 REQUIRE_VSX(ctx); 1483 1484 tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16, 1485 helper_todouble(a->si)); 1486 return true; 1487} 1488 1489static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a) 1490{ 1491 TCGv_i32 imm; 1492 1493 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1494 REQUIRE_VSX(ctx); 1495 1496 imm = tcg_constant_i32(a->si); 1497 1498 tcg_gen_st_i32(imm, cpu_env, 1499 offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix))); 1500 tcg_gen_st_i32(imm, cpu_env, 1501 offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix))); 1502 1503 return true; 1504} 1505 1506static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a) 1507{ 1508 static const uint64_t values[32] = { 1509 0, /* Unspecified */ 1510 0x3FFF000000000000llu, /* QP +1.0 */ 1511 0x4000000000000000llu, /* QP +2.0 */ 1512 0x4000800000000000llu, /* QP +3.0 */ 1513 0x4001000000000000llu, /* QP +4.0 */ 1514 0x4001400000000000llu, /* QP +5.0 */ 1515 0x4001800000000000llu, /* QP +6.0 */ 1516 0x4001C00000000000llu, /* QP +7.0 */ 1517 0x7FFF000000000000llu, /* QP +Inf */ 1518 0x7FFF800000000000llu, /* QP dQNaN */ 1519 0, /* Unspecified */ 1520 0, /* Unspecified */ 1521 0, /* Unspecified */ 1522 0, /* Unspecified */ 1523 0, /* Unspecified */ 1524 0, /* Unspecified */ 1525 0x8000000000000000llu, /* QP -0.0 */ 1526 0xBFFF000000000000llu, /* QP -1.0 */ 1527 0xC000000000000000llu, /* QP -2.0 */ 1528 0xC000800000000000llu, /* QP -3.0 */ 1529 0xC001000000000000llu, /* QP -4.0 */ 1530 0xC001400000000000llu, /* QP -5.0 */ 1531 0xC001800000000000llu, /* QP -6.0 */ 1532 0xC001C00000000000llu, /* QP -7.0 */ 1533 0xFFFF000000000000llu, /* QP -Inf */ 1534 }; 1535 1536 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 1537 REQUIRE_VSX(ctx); 1538 1539 if (values[a->uim]) { 1540 set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false); 1541 set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true); 1542 } else { 1543 gen_invalid(ctx); 1544 } 1545 1546 return true; 1547} 1548 1549static void gen_xxsldwi(DisasContext *ctx) 1550{ 1551 TCGv_i64 xth, xtl; 1552 if (unlikely(!ctx->vsx_enabled)) { 1553 gen_exception(ctx, POWERPC_EXCP_VSXU); 1554 return; 1555 } 1556 xth = tcg_temp_new_i64(); 1557 xtl = tcg_temp_new_i64(); 1558 1559 switch (SHW(ctx->opcode)) { 1560 case 0: { 1561 get_cpu_vsr(xth, xA(ctx->opcode), true); 1562 get_cpu_vsr(xtl, xA(ctx->opcode), false); 1563 break; 1564 } 1565 case 1: { 1566 TCGv_i64 t0 = tcg_temp_new_i64(); 1567 get_cpu_vsr(xth, xA(ctx->opcode), true); 1568 tcg_gen_shli_i64(xth, xth, 32); 1569 get_cpu_vsr(t0, xA(ctx->opcode), false); 1570 tcg_gen_shri_i64(t0, t0, 32); 1571 tcg_gen_or_i64(xth, xth, t0); 1572 get_cpu_vsr(xtl, xA(ctx->opcode), false); 1573 tcg_gen_shli_i64(xtl, xtl, 32); 1574 get_cpu_vsr(t0, xB(ctx->opcode), true); 1575 tcg_gen_shri_i64(t0, t0, 32); 1576 tcg_gen_or_i64(xtl, xtl, t0); 1577 tcg_temp_free_i64(t0); 1578 break; 1579 } 1580 case 2: { 1581 get_cpu_vsr(xth, xA(ctx->opcode), false); 1582 get_cpu_vsr(xtl, xB(ctx->opcode), true); 1583 break; 1584 } 1585 case 3: { 1586 TCGv_i64 t0 = tcg_temp_new_i64(); 1587 get_cpu_vsr(xth, xA(ctx->opcode), false); 1588 tcg_gen_shli_i64(xth, xth, 32); 1589 get_cpu_vsr(t0, xB(ctx->opcode), true); 1590 tcg_gen_shri_i64(t0, t0, 32); 1591 tcg_gen_or_i64(xth, xth, t0); 1592 get_cpu_vsr(xtl, xB(ctx->opcode), true); 1593 tcg_gen_shli_i64(xtl, xtl, 32); 1594 get_cpu_vsr(t0, xB(ctx->opcode), false); 1595 tcg_gen_shri_i64(t0, t0, 32); 1596 tcg_gen_or_i64(xtl, xtl, t0); 1597 tcg_temp_free_i64(t0); 1598 break; 1599 } 1600 } 1601 1602 set_cpu_vsr(xT(ctx->opcode), xth, true); 1603 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1604 1605 tcg_temp_free_i64(xth); 1606 tcg_temp_free_i64(xtl); 1607} 1608 1609#define VSX_EXTRACT_INSERT(name) \ 1610static void gen_##name(DisasContext *ctx) \ 1611{ \ 1612 TCGv_ptr xt, xb; \ 1613 TCGv_i32 t0; \ 1614 TCGv_i64 t1; \ 1615 uint8_t uimm = UIMM4(ctx->opcode); \ 1616 \ 1617 if (unlikely(!ctx->vsx_enabled)) { \ 1618 gen_exception(ctx, POWERPC_EXCP_VSXU); \ 1619 return; \ 1620 } \ 1621 xt = gen_vsr_ptr(xT(ctx->opcode)); \ 1622 xb = gen_vsr_ptr(xB(ctx->opcode)); \ 1623 t0 = tcg_temp_new_i32(); \ 1624 t1 = tcg_temp_new_i64(); \ 1625 /* \ 1626 * uimm > 15 out of bound and for \ 1627 * uimm > 12 handle as per hardware in helper \ 1628 */ \ 1629 if (uimm > 15) { \ 1630 tcg_gen_movi_i64(t1, 0); \ 1631 set_cpu_vsr(xT(ctx->opcode), t1, true); \ 1632 set_cpu_vsr(xT(ctx->opcode), t1, false); \ 1633 return; \ 1634 } \ 1635 tcg_gen_movi_i32(t0, uimm); \ 1636 gen_helper_##name(cpu_env, xt, xb, t0); \ 1637 tcg_temp_free_ptr(xb); \ 1638 tcg_temp_free_ptr(xt); \ 1639 tcg_temp_free_i32(t0); \ 1640 tcg_temp_free_i64(t1); \ 1641} 1642 1643VSX_EXTRACT_INSERT(xxextractuw) 1644VSX_EXTRACT_INSERT(xxinsertw) 1645 1646#ifdef TARGET_PPC64 1647static void gen_xsxexpdp(DisasContext *ctx) 1648{ 1649 TCGv rt = cpu_gpr[rD(ctx->opcode)]; 1650 TCGv_i64 t0; 1651 if (unlikely(!ctx->vsx_enabled)) { 1652 gen_exception(ctx, POWERPC_EXCP_VSXU); 1653 return; 1654 } 1655 t0 = tcg_temp_new_i64(); 1656 get_cpu_vsr(t0, xB(ctx->opcode), true); 1657 tcg_gen_extract_i64(rt, t0, 52, 11); 1658 tcg_temp_free_i64(t0); 1659} 1660 1661static void gen_xsxexpqp(DisasContext *ctx) 1662{ 1663 TCGv_i64 xth; 1664 TCGv_i64 xtl; 1665 TCGv_i64 xbh; 1666 1667 if (unlikely(!ctx->vsx_enabled)) { 1668 gen_exception(ctx, POWERPC_EXCP_VSXU); 1669 return; 1670 } 1671 xth = tcg_temp_new_i64(); 1672 xtl = tcg_temp_new_i64(); 1673 xbh = tcg_temp_new_i64(); 1674 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true); 1675 1676 tcg_gen_extract_i64(xth, xbh, 48, 15); 1677 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true); 1678 tcg_gen_movi_i64(xtl, 0); 1679 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false); 1680 1681 tcg_temp_free_i64(xbh); 1682 tcg_temp_free_i64(xth); 1683 tcg_temp_free_i64(xtl); 1684} 1685 1686static void gen_xsiexpdp(DisasContext *ctx) 1687{ 1688 TCGv_i64 xth; 1689 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 1690 TCGv rb = cpu_gpr[rB(ctx->opcode)]; 1691 TCGv_i64 t0; 1692 1693 if (unlikely(!ctx->vsx_enabled)) { 1694 gen_exception(ctx, POWERPC_EXCP_VSXU); 1695 return; 1696 } 1697 t0 = tcg_temp_new_i64(); 1698 xth = tcg_temp_new_i64(); 1699 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF); 1700 tcg_gen_andi_i64(t0, rb, 0x7FF); 1701 tcg_gen_shli_i64(t0, t0, 52); 1702 tcg_gen_or_i64(xth, xth, t0); 1703 set_cpu_vsr(xT(ctx->opcode), xth, true); 1704 /* dword[1] is undefined */ 1705 tcg_temp_free_i64(t0); 1706 tcg_temp_free_i64(xth); 1707} 1708 1709static void gen_xsiexpqp(DisasContext *ctx) 1710{ 1711 TCGv_i64 xth; 1712 TCGv_i64 xtl; 1713 TCGv_i64 xah; 1714 TCGv_i64 xal; 1715 TCGv_i64 xbh; 1716 TCGv_i64 t0; 1717 1718 if (unlikely(!ctx->vsx_enabled)) { 1719 gen_exception(ctx, POWERPC_EXCP_VSXU); 1720 return; 1721 } 1722 xth = tcg_temp_new_i64(); 1723 xtl = tcg_temp_new_i64(); 1724 xah = tcg_temp_new_i64(); 1725 xal = tcg_temp_new_i64(); 1726 get_cpu_vsr(xah, rA(ctx->opcode) + 32, true); 1727 get_cpu_vsr(xal, rA(ctx->opcode) + 32, false); 1728 xbh = tcg_temp_new_i64(); 1729 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true); 1730 t0 = tcg_temp_new_i64(); 1731 1732 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF); 1733 tcg_gen_andi_i64(t0, xbh, 0x7FFF); 1734 tcg_gen_shli_i64(t0, t0, 48); 1735 tcg_gen_or_i64(xth, xth, t0); 1736 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true); 1737 tcg_gen_mov_i64(xtl, xal); 1738 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false); 1739 1740 tcg_temp_free_i64(t0); 1741 tcg_temp_free_i64(xth); 1742 tcg_temp_free_i64(xtl); 1743 tcg_temp_free_i64(xah); 1744 tcg_temp_free_i64(xal); 1745 tcg_temp_free_i64(xbh); 1746} 1747 1748static void gen_xsxsigdp(DisasContext *ctx) 1749{ 1750 TCGv rt = cpu_gpr[rD(ctx->opcode)]; 1751 TCGv_i64 t0, t1, zr, nan, exp; 1752 1753 if (unlikely(!ctx->vsx_enabled)) { 1754 gen_exception(ctx, POWERPC_EXCP_VSXU); 1755 return; 1756 } 1757 exp = tcg_temp_new_i64(); 1758 t0 = tcg_temp_new_i64(); 1759 t1 = tcg_temp_new_i64(); 1760 zr = tcg_const_i64(0); 1761 nan = tcg_const_i64(2047); 1762 1763 get_cpu_vsr(t1, xB(ctx->opcode), true); 1764 tcg_gen_extract_i64(exp, t1, 52, 11); 1765 tcg_gen_movi_i64(t0, 0x0010000000000000); 1766 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 1767 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 1768 get_cpu_vsr(t1, xB(ctx->opcode), true); 1769 tcg_gen_deposit_i64(rt, t0, t1, 0, 52); 1770 1771 tcg_temp_free_i64(t0); 1772 tcg_temp_free_i64(t1); 1773 tcg_temp_free_i64(exp); 1774 tcg_temp_free_i64(zr); 1775 tcg_temp_free_i64(nan); 1776} 1777 1778static void gen_xsxsigqp(DisasContext *ctx) 1779{ 1780 TCGv_i64 t0, zr, nan, exp; 1781 TCGv_i64 xth; 1782 TCGv_i64 xtl; 1783 TCGv_i64 xbh; 1784 TCGv_i64 xbl; 1785 1786 if (unlikely(!ctx->vsx_enabled)) { 1787 gen_exception(ctx, POWERPC_EXCP_VSXU); 1788 return; 1789 } 1790 xth = tcg_temp_new_i64(); 1791 xtl = tcg_temp_new_i64(); 1792 xbh = tcg_temp_new_i64(); 1793 xbl = tcg_temp_new_i64(); 1794 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true); 1795 get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false); 1796 exp = tcg_temp_new_i64(); 1797 t0 = tcg_temp_new_i64(); 1798 zr = tcg_const_i64(0); 1799 nan = tcg_const_i64(32767); 1800 1801 tcg_gen_extract_i64(exp, xbh, 48, 15); 1802 tcg_gen_movi_i64(t0, 0x0001000000000000); 1803 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 1804 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 1805 tcg_gen_deposit_i64(xth, t0, xbh, 0, 48); 1806 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true); 1807 tcg_gen_mov_i64(xtl, xbl); 1808 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false); 1809 1810 tcg_temp_free_i64(t0); 1811 tcg_temp_free_i64(exp); 1812 tcg_temp_free_i64(zr); 1813 tcg_temp_free_i64(nan); 1814 tcg_temp_free_i64(xth); 1815 tcg_temp_free_i64(xtl); 1816 tcg_temp_free_i64(xbh); 1817 tcg_temp_free_i64(xbl); 1818} 1819#endif 1820 1821static void gen_xviexpsp(DisasContext *ctx) 1822{ 1823 TCGv_i64 xth; 1824 TCGv_i64 xtl; 1825 TCGv_i64 xah; 1826 TCGv_i64 xal; 1827 TCGv_i64 xbh; 1828 TCGv_i64 xbl; 1829 TCGv_i64 t0; 1830 1831 if (unlikely(!ctx->vsx_enabled)) { 1832 gen_exception(ctx, POWERPC_EXCP_VSXU); 1833 return; 1834 } 1835 xth = tcg_temp_new_i64(); 1836 xtl = tcg_temp_new_i64(); 1837 xah = tcg_temp_new_i64(); 1838 xal = tcg_temp_new_i64(); 1839 xbh = tcg_temp_new_i64(); 1840 xbl = tcg_temp_new_i64(); 1841 get_cpu_vsr(xah, xA(ctx->opcode), true); 1842 get_cpu_vsr(xal, xA(ctx->opcode), false); 1843 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1844 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1845 t0 = tcg_temp_new_i64(); 1846 1847 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF); 1848 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF); 1849 tcg_gen_shli_i64(t0, t0, 23); 1850 tcg_gen_or_i64(xth, xth, t0); 1851 set_cpu_vsr(xT(ctx->opcode), xth, true); 1852 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF); 1853 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF); 1854 tcg_gen_shli_i64(t0, t0, 23); 1855 tcg_gen_or_i64(xtl, xtl, t0); 1856 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1857 1858 tcg_temp_free_i64(t0); 1859 tcg_temp_free_i64(xth); 1860 tcg_temp_free_i64(xtl); 1861 tcg_temp_free_i64(xah); 1862 tcg_temp_free_i64(xal); 1863 tcg_temp_free_i64(xbh); 1864 tcg_temp_free_i64(xbl); 1865} 1866 1867static void gen_xviexpdp(DisasContext *ctx) 1868{ 1869 TCGv_i64 xth; 1870 TCGv_i64 xtl; 1871 TCGv_i64 xah; 1872 TCGv_i64 xal; 1873 TCGv_i64 xbh; 1874 TCGv_i64 xbl; 1875 1876 if (unlikely(!ctx->vsx_enabled)) { 1877 gen_exception(ctx, POWERPC_EXCP_VSXU); 1878 return; 1879 } 1880 xth = tcg_temp_new_i64(); 1881 xtl = tcg_temp_new_i64(); 1882 xah = tcg_temp_new_i64(); 1883 xal = tcg_temp_new_i64(); 1884 xbh = tcg_temp_new_i64(); 1885 xbl = tcg_temp_new_i64(); 1886 get_cpu_vsr(xah, xA(ctx->opcode), true); 1887 get_cpu_vsr(xal, xA(ctx->opcode), false); 1888 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1889 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1890 1891 tcg_gen_deposit_i64(xth, xah, xbh, 52, 11); 1892 set_cpu_vsr(xT(ctx->opcode), xth, true); 1893 1894 tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11); 1895 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1896 1897 tcg_temp_free_i64(xth); 1898 tcg_temp_free_i64(xtl); 1899 tcg_temp_free_i64(xah); 1900 tcg_temp_free_i64(xal); 1901 tcg_temp_free_i64(xbh); 1902 tcg_temp_free_i64(xbl); 1903} 1904 1905static void gen_xvxexpsp(DisasContext *ctx) 1906{ 1907 TCGv_i64 xth; 1908 TCGv_i64 xtl; 1909 TCGv_i64 xbh; 1910 TCGv_i64 xbl; 1911 1912 if (unlikely(!ctx->vsx_enabled)) { 1913 gen_exception(ctx, POWERPC_EXCP_VSXU); 1914 return; 1915 } 1916 xth = tcg_temp_new_i64(); 1917 xtl = tcg_temp_new_i64(); 1918 xbh = tcg_temp_new_i64(); 1919 xbl = tcg_temp_new_i64(); 1920 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1921 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1922 1923 tcg_gen_shri_i64(xth, xbh, 23); 1924 tcg_gen_andi_i64(xth, xth, 0xFF000000FF); 1925 set_cpu_vsr(xT(ctx->opcode), xth, true); 1926 tcg_gen_shri_i64(xtl, xbl, 23); 1927 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF); 1928 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1929 1930 tcg_temp_free_i64(xth); 1931 tcg_temp_free_i64(xtl); 1932 tcg_temp_free_i64(xbh); 1933 tcg_temp_free_i64(xbl); 1934} 1935 1936static void gen_xvxexpdp(DisasContext *ctx) 1937{ 1938 TCGv_i64 xth; 1939 TCGv_i64 xtl; 1940 TCGv_i64 xbh; 1941 TCGv_i64 xbl; 1942 1943 if (unlikely(!ctx->vsx_enabled)) { 1944 gen_exception(ctx, POWERPC_EXCP_VSXU); 1945 return; 1946 } 1947 xth = tcg_temp_new_i64(); 1948 xtl = tcg_temp_new_i64(); 1949 xbh = tcg_temp_new_i64(); 1950 xbl = tcg_temp_new_i64(); 1951 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1952 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1953 1954 tcg_gen_extract_i64(xth, xbh, 52, 11); 1955 set_cpu_vsr(xT(ctx->opcode), xth, true); 1956 tcg_gen_extract_i64(xtl, xbl, 52, 11); 1957 set_cpu_vsr(xT(ctx->opcode), xtl, false); 1958 1959 tcg_temp_free_i64(xth); 1960 tcg_temp_free_i64(xtl); 1961 tcg_temp_free_i64(xbh); 1962 tcg_temp_free_i64(xbl); 1963} 1964 1965GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300) 1966 1967static void gen_xvxsigdp(DisasContext *ctx) 1968{ 1969 TCGv_i64 xth; 1970 TCGv_i64 xtl; 1971 TCGv_i64 xbh; 1972 TCGv_i64 xbl; 1973 TCGv_i64 t0, zr, nan, exp; 1974 1975 if (unlikely(!ctx->vsx_enabled)) { 1976 gen_exception(ctx, POWERPC_EXCP_VSXU); 1977 return; 1978 } 1979 xth = tcg_temp_new_i64(); 1980 xtl = tcg_temp_new_i64(); 1981 xbh = tcg_temp_new_i64(); 1982 xbl = tcg_temp_new_i64(); 1983 get_cpu_vsr(xbh, xB(ctx->opcode), true); 1984 get_cpu_vsr(xbl, xB(ctx->opcode), false); 1985 exp = tcg_temp_new_i64(); 1986 t0 = tcg_temp_new_i64(); 1987 zr = tcg_const_i64(0); 1988 nan = tcg_const_i64(2047); 1989 1990 tcg_gen_extract_i64(exp, xbh, 52, 11); 1991 tcg_gen_movi_i64(t0, 0x0010000000000000); 1992 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 1993 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 1994 tcg_gen_deposit_i64(xth, t0, xbh, 0, 52); 1995 set_cpu_vsr(xT(ctx->opcode), xth, true); 1996 1997 tcg_gen_extract_i64(exp, xbl, 52, 11); 1998 tcg_gen_movi_i64(t0, 0x0010000000000000); 1999 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0); 2000 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0); 2001 tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52); 2002 set_cpu_vsr(xT(ctx->opcode), xtl, false); 2003 2004 tcg_temp_free_i64(t0); 2005 tcg_temp_free_i64(exp); 2006 tcg_temp_free_i64(zr); 2007 tcg_temp_free_i64(nan); 2008 tcg_temp_free_i64(xth); 2009 tcg_temp_free_i64(xtl); 2010 tcg_temp_free_i64(xbh); 2011 tcg_temp_free_i64(xbl); 2012} 2013 2014static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ, 2015 int rt, bool store, bool paired) 2016{ 2017 TCGv ea; 2018 TCGv_i64 xt; 2019 MemOp mop; 2020 int rt1, rt2; 2021 2022 xt = tcg_temp_new_i64(); 2023 2024 mop = DEF_MEMOP(MO_Q); 2025 2026 gen_set_access_type(ctx, ACCESS_INT); 2027 ea = do_ea_calc(ctx, ra, displ); 2028 2029 if (paired && ctx->le_mode) { 2030 rt1 = rt + 1; 2031 rt2 = rt; 2032 } else { 2033 rt1 = rt; 2034 rt2 = rt + 1; 2035 } 2036 2037 if (store) { 2038 get_cpu_vsr(xt, rt1, !ctx->le_mode); 2039 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2040 gen_addr_add(ctx, ea, ea, 8); 2041 get_cpu_vsr(xt, rt1, ctx->le_mode); 2042 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2043 if (paired) { 2044 gen_addr_add(ctx, ea, ea, 8); 2045 get_cpu_vsr(xt, rt2, !ctx->le_mode); 2046 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2047 gen_addr_add(ctx, ea, ea, 8); 2048 get_cpu_vsr(xt, rt2, ctx->le_mode); 2049 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop); 2050 } 2051 } else { 2052 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2053 set_cpu_vsr(rt1, xt, !ctx->le_mode); 2054 gen_addr_add(ctx, ea, ea, 8); 2055 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2056 set_cpu_vsr(rt1, xt, ctx->le_mode); 2057 if (paired) { 2058 gen_addr_add(ctx, ea, ea, 8); 2059 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2060 set_cpu_vsr(rt2, xt, !ctx->le_mode); 2061 gen_addr_add(ctx, ea, ea, 8); 2062 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop); 2063 set_cpu_vsr(rt2, xt, ctx->le_mode); 2064 } 2065 } 2066 2067 tcg_temp_free(ea); 2068 tcg_temp_free_i64(xt); 2069 return true; 2070} 2071 2072static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired) 2073{ 2074 if (paired) { 2075 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 2076 } else { 2077 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 2078 } 2079 2080 if (paired || a->rt >= 32) { 2081 REQUIRE_VSX(ctx); 2082 } else { 2083 REQUIRE_VECTOR(ctx); 2084 } 2085 2086 return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired); 2087} 2088 2089static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a, 2090 bool store, bool paired) 2091{ 2092 arg_D d; 2093 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 2094 REQUIRE_VSX(ctx); 2095 2096 if (!resolve_PLS_D(ctx, &d, a)) { 2097 return true; 2098 } 2099 2100 return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired); 2101} 2102 2103static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired) 2104{ 2105 if (paired) { 2106 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 2107 } else { 2108 REQUIRE_INSNS_FLAGS2(ctx, ISA300); 2109 } 2110 2111 if (paired || a->rt >= 32) { 2112 REQUIRE_VSX(ctx); 2113 } else { 2114 REQUIRE_VECTOR(ctx); 2115 } 2116 2117 return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired); 2118} 2119 2120TRANS(STXV, do_lstxv_D, true, false) 2121TRANS(LXV, do_lstxv_D, false, false) 2122TRANS(STXVP, do_lstxv_D, true, true) 2123TRANS(LXVP, do_lstxv_D, false, true) 2124TRANS(STXVX, do_lstxv_X, true, false) 2125TRANS(LXVX, do_lstxv_X, false, false) 2126TRANS(STXVPX, do_lstxv_X, true, true) 2127TRANS(LXVPX, do_lstxv_X, false, true) 2128TRANS64(PSTXV, do_lstxv_PLS_D, true, false) 2129TRANS64(PLXV, do_lstxv_PLS_D, false, false) 2130TRANS64(PSTXVP, do_lstxv_PLS_D, true, true) 2131TRANS64(PLXVP, do_lstxv_PLS_D, false, true) 2132 2133static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b, 2134 TCGv_vec c) 2135{ 2136 TCGv_vec tmp = tcg_temp_new_vec_matching(c); 2137 tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1); 2138 tcg_gen_bitsel_vec(vece, t, tmp, b, a); 2139 tcg_temp_free_vec(tmp); 2140} 2141 2142static bool do_xxblendv(DisasContext *ctx, arg_XX4 *a, unsigned vece) 2143{ 2144 static const TCGOpcode vecop_list[] = { 2145 INDEX_op_sari_vec, 0 2146 }; 2147 static const GVecGen4 ops[4] = { 2148 { 2149 .fniv = gen_xxblendv_vec, 2150 .fno = gen_helper_XXBLENDVB, 2151 .opt_opc = vecop_list, 2152 .vece = MO_8 2153 }, 2154 { 2155 .fniv = gen_xxblendv_vec, 2156 .fno = gen_helper_XXBLENDVH, 2157 .opt_opc = vecop_list, 2158 .vece = MO_16 2159 }, 2160 { 2161 .fniv = gen_xxblendv_vec, 2162 .fno = gen_helper_XXBLENDVW, 2163 .opt_opc = vecop_list, 2164 .vece = MO_32 2165 }, 2166 { 2167 .fniv = gen_xxblendv_vec, 2168 .fno = gen_helper_XXBLENDVD, 2169 .opt_opc = vecop_list, 2170 .vece = MO_64 2171 } 2172 }; 2173 2174 REQUIRE_VSX(ctx); 2175 2176 tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa), 2177 vsr_full_offset(a->xb), vsr_full_offset(a->xc), 2178 16, 16, &ops[vece]); 2179 2180 return true; 2181} 2182 2183TRANS(XXBLENDVB, do_xxblendv, MO_8) 2184TRANS(XXBLENDVH, do_xxblendv, MO_16) 2185TRANS(XXBLENDVW, do_xxblendv, MO_32) 2186TRANS(XXBLENDVD, do_xxblendv, MO_64) 2187 2188#undef GEN_XX2FORM 2189#undef GEN_XX3FORM 2190#undef GEN_XX2IFORM 2191#undef GEN_XX3_RC_FORM 2192#undef GEN_XX3FORM_DM 2193#undef VSX_LOGICAL 2194