Lines Matching +full:0 +full:b00000000
150 TCGv_i64 mask = tcg_constant_i64(0x00FF00FF00FF00FF);
544 tcg_gen_movi_i64(t0, 0);
582 #define SGN_MASK_DP 0x8000000000000000ull
583 #define SGN_MASK_SP 0x8000000080000000ull
584 #define EXP_MASK_DP 0x7FF0000000000000ull
585 #define EXP_MASK_SP 0x7F8000007F800000ull
624 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
720 0
769 0
971 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
974 GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
975 GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
976 GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
977 GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
978 GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
979 GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
980 GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
981 GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
982 GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
983 GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
984 GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
985 GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
986 GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
987 GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
988 GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
989 GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
990 GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
991 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
992 GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
993 GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
994 GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
995 GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
996 GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
997 GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
998 GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1027 /* test if +0 */
1031 tcg_constant_vec_matching(t, vece, 0));
1034 /* test if -0 */
1042 /* test if +0 or -0 */
1048 tcg_constant_vec_matching(t, vece, 0));
1059 tcg_constant_vec_matching(t, vece, 0));
1086 tcg_constant_vec_matching(t, vece, 0));
1103 INDEX_op_cmp_vec, 0
1115 case 0:
1116 set_cpu_vsr(a->xt, tcg_constant_i64(0), true);
1117 set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
1119 case ((1 << 0) | (1 << 1)):
1123 case (1 << 0):
1132 /* test if +0 or -0 */
1136 /* test if -0 */
1140 /* test if +0 */
1197 set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
1201 GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1202 GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1203 GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1204 GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1205 GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1206 GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1207 GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1208 GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1209 GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1210 GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1211 GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1212 GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1213 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1214 GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1215 GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1216 GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1217 GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1218 GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1219 GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1220 GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1221 GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1222 GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1224 GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1225 GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1226 GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1227 GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1228 GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1229 GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1230 GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1231 GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1232 GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1233 GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1234 GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1235 GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1236 GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1237 GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1238 GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1239 GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1240 GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1241 GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1242 GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1244 GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1245 GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1246 GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1247 GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1248 GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1249 GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1250 GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1251 GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1252 GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1253 GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1254 GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1255 GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1256 GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1257 GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1258 GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1259 GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1260 GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1261 GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1262 GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1263 GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1264 GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1308 get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1309 get_cpu_vsr(t1, a->xb, (a->dm & 1) == 0);
1314 get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1317 get_cpu_vsr(t0, a->xb, (a->dm & 1) == 0);
1349 if (a->imm & ~0x3) {
1472 GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1473 GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1474 GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1475 GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1476 GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1477 GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1478 GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1479 GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1623 VSX_XXMRG(xxmrglw, 0)
1653 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1696 offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix)));
1706 0, /* Unspecified */
1707 0x3FFF000000000000llu, /* QP +1.0 */
1708 0x4000000000000000llu, /* QP +2.0 */
1709 0x4000800000000000llu, /* QP +3.0 */
1710 0x4001000000000000llu, /* QP +4.0 */
1711 0x4001400000000000llu, /* QP +5.0 */
1712 0x4001800000000000llu, /* QP +6.0 */
1713 0x4001C00000000000llu, /* QP +7.0 */
1714 0x7FFF000000000000llu, /* QP +Inf */
1715 0x7FFF800000000000llu, /* QP dQNaN */
1716 0, /* Unspecified */
1717 0, /* Unspecified */
1718 0, /* Unspecified */
1719 0, /* Unspecified */
1720 0, /* Unspecified */
1721 0, /* Unspecified */
1722 0x8000000000000000llu, /* QP -0.0 */
1723 0xBFFF000000000000llu, /* QP -1.0 */
1724 0xC000000000000000llu, /* QP -2.0 */
1725 0xC000800000000000llu, /* QP -3.0 */
1726 0xC001000000000000llu, /* QP -4.0 */
1727 0xC001400000000000llu, /* QP -5.0 */
1728 0xC001800000000000llu, /* QP -6.0 */
1729 0xC001C00000000000llu, /* QP -7.0 */
1730 0xFFFF000000000000llu, /* QP -Inf */
1737 set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false);
1759 zero = tcg_constant_i64(0);
1790 case 0: {
1837 TCGv_i64 zero = tcg_constant_i64(0);
1892 tcg_gen_movi_i64(xtl, 0);
1909 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1910 tcg_gen_andi_i64(t0, rb, 0x7FF);
1914 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false);
1940 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1941 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1961 zr = tcg_constant_i64(0);
1966 tcg_gen_movi_i64(t0, 0x0010000000000000);
1970 tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1993 zr = tcg_constant_i64(0);
1997 tcg_gen_movi_i64(t0, 0x0001000000000000);
2000 tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
2033 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
2034 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
2038 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
2039 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
2095 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
2098 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
2160 zr = tcg_constant_i64(0);
2164 tcg_gen_movi_i64(t0, 0x0010000000000000);
2167 tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
2171 tcg_gen_movi_i64(t0, 0x0010000000000000);
2174 tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2280 set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2319 set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2379 set_cpu_vsr(a->rt, tcg_constant_i64(0), true);
2405 tcg_gen_movi_i64(disj, 0);
2414 if (bit & 0x4) {
2419 if (bit & 0x2) {
2424 if (bit & 0x1) {
2450 tcg_gen_dupi_vec(vece, disj, 0);
2459 if (bit & 0x4) {
2464 if (bit & 0x2) {
2469 if (bit & 0x1) {
2486 INDEX_op_andc_vec, 0
2503 case 0b00000000: /* false */
2504 set_cpu_vsr(a->xt, tcg_constant_i64(0), true);
2505 set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
2507 case 0b00000011: /* and(B,A) */
2510 case 0b00000101: /* and(C,A) */
2513 case 0b00001111: /* A */
2516 case 0b00010001: /* and(C,B) */
2519 case 0b00011011: /* C?B:A */
2522 case 0b00011101: /* B?C:A */
2525 case 0b00100111: /* C?A:B */
2528 case 0b00110011: /* B */
2531 case 0b00110101: /* A?C:B */
2534 case 0b00111100: /* xor(B,A) */
2537 case 0b00111111: /* or(B,A) */
2540 case 0b01000111: /* B?A:C */
2543 case 0b01010011: /* A?B:C */
2546 case 0b01010101: /* C */
2549 case 0b01011010: /* xor(C,A) */
2552 case 0b01011111: /* or(C,A) */
2555 case 0b01100110: /* xor(C,B) */
2558 case 0b01110111: /* or(C,B) */
2561 case 0b10001000: /* nor(C,B) */
2564 case 0b10011001: /* eqv(C,B) */
2567 case 0b10100000: /* nor(C,A) */
2570 case 0b10100101: /* eqv(C,A) */
2573 case 0b10101010: /* not(C) */
2576 case 0b11000000: /* nor(B,A) */
2579 case 0b11000011: /* eqv(B,A) */
2582 case 0b11001100: /* not(B) */
2585 case 0b11101110: /* nand(C,B) */
2588 case 0b11110000: /* not(A) */
2591 case 0b11111010: /* nand(C,A) */
2594 case 0b11111100: /* nand(B,A) */
2597 case 0b11111111: /* true */
2620 INDEX_op_sari_vec, 0
2792 tcg_gen_gvec_dup_imm(MO_64, acc_full_offset(a->ra), 64, 64, 0);