Lines Matching refs:int64_t

151 GEN_VEXT_LD_ELEM(lde_d, int64_t, H8, ldq)  in GEN_VEXT_LD_ELEM()
164 GEN_VEXT_ST_ELEM(ste_d, int64_t, H8, stq)
232 GEN_VEXT_LD_STRIDE(vlse64_v, int64_t, lde_d) in GEN_VEXT_LD_STRIDE()
247 GEN_VEXT_ST_STRIDE(vsse64_v, int64_t, ste_d)
302 GEN_VEXT_LD_US(vle64_v, int64_t, lde_d) in GEN_VEXT_LD_US()
323 GEN_VEXT_ST_US(vse64_v, int64_t, ste_d)
410 GEN_VEXT_LD_INDEX(vlxei8_64_v, int64_t, idx_b, lde_d) in GEN_VEXT_LD_INDEX()
414 GEN_VEXT_LD_INDEX(vlxei16_64_v, int64_t, idx_h, lde_d) in GEN_VEXT_LD_INDEX()
418 GEN_VEXT_LD_INDEX(vlxei32_64_v, int64_t, idx_w, lde_d) in GEN_VEXT_LD_INDEX()
422 GEN_VEXT_LD_INDEX(vlxei64_64_v, int64_t, idx_d, lde_d) in GEN_VEXT_LD_INDEX()
436 GEN_VEXT_ST_INDEX(vsxei8_64_v, int64_t, idx_b, ste_d)
440 GEN_VEXT_ST_INDEX(vsxei16_64_v, int64_t, idx_h, ste_d)
444 GEN_VEXT_ST_INDEX(vsxei32_64_v, int64_t, idx_w, ste_d)
448 GEN_VEXT_ST_INDEX(vsxei64_64_v, int64_t, idx_d, ste_d)
540 GEN_VEXT_LDFF(vle64ff_v, int64_t, lde_d) in GEN_VEXT_LDFF()
599 GEN_VEXT_LD_WHOLE(vl1re64_v, int64_t, lde_d) in GEN_VEXT_LD_WHOLE()
603 GEN_VEXT_LD_WHOLE(vl2re64_v, int64_t, lde_d) in GEN_VEXT_LD_WHOLE()
607 GEN_VEXT_LD_WHOLE(vl4re64_v, int64_t, lde_d) in GEN_VEXT_LD_WHOLE()
611 GEN_VEXT_LD_WHOLE(vl8re64_v, int64_t, lde_d) in GEN_VEXT_LD_WHOLE()
634 #define OP_SSS_D int64_t, int64_t, int64_t, int64_t, int64_t
638 #define OP_SUS_D int64_t, uint64_t, int64_t, uint64_t, int64_t
641 #define WOP_SSS_W int64_t, int32_t, int32_t, int64_t, int64_t
644 #define WOP_SUS_W int64_t, uint32_t, int32_t, uint64_t, int64_t
647 #define WOP_SSU_W int64_t, int32_t, uint32_t, int64_t, uint64_t
650 #define NOP_SSS_W int32_t, int32_t, int64_t, int32_t, int64_t
749 #define WOP_SSS_W int64_t, int32_t, int32_t, int64_t, int64_t
755 #define WOP_WSSS_W int64_t, int32_t, int64_t, int64_t, int64_t
1098 GEN_VEXT_SHIFT_VV(vsra_vv_d, uint64_t, int64_t, H8, H8, DO_SRL, 0x3f)
1135 GEN_VEXT_SHIFT_VX(vsll_vx_d, uint64_t, int64_t, H8, H8, DO_SLL, 0x3f)
1145 GEN_VEXT_SHIFT_VX(vsra_vx_d, int64_t, int64_t, H8, H8, DO_SRL, 0x3f)
1153 GEN_VEXT_SHIFT_VV(vnsra_wv_w, uint32_t, int64_t, H4, H8, DO_SRL, 0x3f)
1159 GEN_VEXT_SHIFT_VX(vnsra_wx_w, int32_t, int64_t, H4, H8, DO_SRL, 0x3f)
1221 GEN_VEXT_CMP_VV(vmslt_vv_d, int64_t, H8, DO_MSLT)
1231 GEN_VEXT_CMP_VV(vmsle_vv_d, int64_t, H8, DO_MSLE)
1286 GEN_VEXT_CMP_VX(vmslt_vx_d, int64_t, H8, DO_MSLT)
1296 GEN_VEXT_CMP_VX(vmsle_vx_d, int64_t, H8, DO_MSLE)
1306 GEN_VEXT_CMP_VX(vmsgt_vx_d, int64_t, H8, DO_MSGT)
1398 return (int64_t)s2 * (int64_t)s1 >> 32; in do_mulh_w()
1401 static int64_t do_mulh_d(int64_t s2, int64_t s1) in do_mulh_d()
1444 return (int64_t)s2 * (uint64_t)s1 >> 32; in do_mulhsu_w()
1466 static int64_t do_mulhsu_d(int64_t s2, uint64_t s1) in do_mulhsu_d()
1803 GEN_VEXT_VMV_VV(vmv_v_v_d, int64_t, H8)
1826 GEN_VEXT_VMV_VX(vmv_v_x_d, int64_t, H8)
1850 GEN_VEXT_VMERGE_VV(vmerge_vvm_d, int64_t, H8)
1876 GEN_VEXT_VMERGE_VX(vmerge_vxm_d, int64_t, H8)
2128 static inline int64_t sadd64(CPURISCVState *env, int vxrm, int64_t a, in sadd64()
2129 int64_t b) in sadd64()
2131 int64_t res = a + b; in sadd64()
2251 static inline int64_t ssub64(CPURISCVState *env, int vxrm, int64_t a, in ssub64()
2252 int64_t b) in ssub64()
2254 int64_t res = a - b; in ssub64()
2311 int64_t res = (int64_t)a + b; in aadd32()
2317 static inline int64_t aadd64(CPURISCVState *env, int vxrm, int64_t a, in aadd64()
2318 int64_t b) in aadd64()
2320 int64_t res = a + b; in aadd64()
2322 int64_t over = (res ^ a) & (res ^ b) & INT64_MIN; in aadd64()
2386 int64_t res = (int64_t)a - b; in RVVCALL()
2392 static inline int64_t asub64(CPURISCVState *env, int vxrm, int64_t a, in asub64()
2393 int64_t b) in asub64()
2395 int64_t res = (int64_t)a - b; in asub64()
2397 int64_t over = (res ^ a) & (a ^ b) & INT64_MIN; in asub64()
2424 int64_t res = (int64_t)a - b; in RVVCALL()
2502 int64_t res; in vsmul32()
2504 res = (int64_t)a * (int64_t)b; in vsmul32()
2519 static int64_t vsmul64(CPURISCVState *env, int vxrm, int64_t a, int64_t b) in vsmul64()
2523 int64_t res; in vsmul64()
2642 static inline int64_t
2643 vssra64(CPURISCVState *env, int vxrm, int64_t a, int64_t b) in vssra64()
2709 vnclip32(CPURISCVState *env, int vxrm, int64_t a, int32_t b) in vnclip32()
2712 int64_t res; in vnclip32()
4224 GEN_VFMERGE_VF(vfmerge_vfm_d, int64_t, H8)
4395 GEN_VEXT_RED(vredsum_vs_d, int64_t, int64_t, H8, H8, DO_ADD)
4407 GEN_VEXT_RED(vredmax_vs_d, int64_t, int64_t, H8, H8, DO_MAX)
4419 GEN_VEXT_RED(vredmin_vs_d, int64_t, int64_t, H8, H8, DO_MIN)
4425 GEN_VEXT_RED(vredand_vs_d, int64_t, int64_t, H8, H8, DO_AND)
4431 GEN_VEXT_RED(vredor_vs_d, int64_t, int64_t, H8, H8, DO_OR)
4437 GEN_VEXT_RED(vredxor_vs_d, int64_t, int64_t, H8, H8, DO_XOR)
4443 GEN_VEXT_RED(vwredsum_vs_w, int64_t, int32_t, H8, H4, DO_ADD)
5108 GEN_VEXT_INT_EXT(vsext_vf2_d, int64_t, int32_t, H8, H4)
5110 GEN_VEXT_INT_EXT(vsext_vf4_d, int64_t, int16_t, H8, H2)
5111 GEN_VEXT_INT_EXT(vsext_vf8_d, int64_t, int8_t, H8, H1)