1*bc556c66SDavid Miller /* 2*bc556c66SDavid Miller * vxeh2_vcvt: vector-enhancements facility 2 vector convert * 3*bc556c66SDavid Miller */ 4*bc556c66SDavid Miller #include <stdint.h> 5*bc556c66SDavid Miller #include "vx.h" 6*bc556c66SDavid Miller 7*bc556c66SDavid Miller #define M_S 8 8*bc556c66SDavid Miller #define M4_XxC 4 9*bc556c66SDavid Miller #define M4_def M4_XxC 10*bc556c66SDavid Miller 11*bc556c66SDavid Miller static inline void vcfps(S390Vector *v1, S390Vector *v2, 12*bc556c66SDavid Miller const uint8_t m3, const uint8_t m4, const uint8_t m5) 13*bc556c66SDavid Miller { 14*bc556c66SDavid Miller asm volatile("vcfps %[v1], %[v2], %[m3], %[m4], %[m5]\n" 15*bc556c66SDavid Miller : [v1] "=v" (v1->v) 16*bc556c66SDavid Miller : [v2] "v" (v2->v) 17*bc556c66SDavid Miller , [m3] "i" (m3) 18*bc556c66SDavid Miller , [m4] "i" (m4) 19*bc556c66SDavid Miller , [m5] "i" (m5)); 20*bc556c66SDavid Miller } 21*bc556c66SDavid Miller 22*bc556c66SDavid Miller static inline void vcfpl(S390Vector *v1, S390Vector *v2, 23*bc556c66SDavid Miller const uint8_t m3, const uint8_t m4, const uint8_t m5) 24*bc556c66SDavid Miller { 25*bc556c66SDavid Miller asm volatile("vcfpl %[v1], %[v2], %[m3], %[m4], %[m5]\n" 26*bc556c66SDavid Miller : [v1] "=v" (v1->v) 27*bc556c66SDavid Miller : [v2] "v" (v2->v) 28*bc556c66SDavid Miller , [m3] "i" (m3) 29*bc556c66SDavid Miller , [m4] "i" (m4) 30*bc556c66SDavid Miller , [m5] "i" (m5)); 31*bc556c66SDavid Miller } 32*bc556c66SDavid Miller 33*bc556c66SDavid Miller static inline void vcsfp(S390Vector *v1, S390Vector *v2, 34*bc556c66SDavid Miller const uint8_t m3, const uint8_t m4, const uint8_t m5) 35*bc556c66SDavid Miller { 36*bc556c66SDavid Miller asm volatile("vcsfp %[v1], %[v2], %[m3], %[m4], %[m5]\n" 37*bc556c66SDavid Miller : [v1] "=v" (v1->v) 38*bc556c66SDavid Miller : [v2] "v" (v2->v) 39*bc556c66SDavid Miller , [m3] "i" (m3) 40*bc556c66SDavid Miller , [m4] "i" (m4) 41*bc556c66SDavid Miller , [m5] "i" (m5)); 42*bc556c66SDavid Miller } 43*bc556c66SDavid Miller 44*bc556c66SDavid Miller static inline void vclfp(S390Vector *v1, S390Vector *v2, 45*bc556c66SDavid Miller const uint8_t m3, const uint8_t m4, const uint8_t m5) 46*bc556c66SDavid Miller { 47*bc556c66SDavid Miller asm volatile("vclfp %[v1], %[v2], %[m3], %[m4], %[m5]\n" 48*bc556c66SDavid Miller : [v1] "=v" (v1->v) 49*bc556c66SDavid Miller : [v2] "v" (v2->v) 50*bc556c66SDavid Miller , [m3] "i" (m3) 51*bc556c66SDavid Miller , [m4] "i" (m4) 52*bc556c66SDavid Miller , [m5] "i" (m5)); 53*bc556c66SDavid Miller } 54*bc556c66SDavid Miller 55*bc556c66SDavid Miller int main(int argc, char *argv[]) 56*bc556c66SDavid Miller { 57*bc556c66SDavid Miller S390Vector vd; 58*bc556c66SDavid Miller S390Vector vs_i32 = { .w[0] = 1, .w[1] = 64, .w[2] = 1024, .w[3] = -10 }; 59*bc556c66SDavid Miller S390Vector vs_u32 = { .w[0] = 2, .w[1] = 32, .w[2] = 4096, .w[3] = 8888 }; 60*bc556c66SDavid Miller S390Vector vs_f32 = { .f[0] = 3.987, .f[1] = 5.123, 61*bc556c66SDavid Miller .f[2] = 4.499, .f[3] = 0.512 }; 62*bc556c66SDavid Miller 63*bc556c66SDavid Miller vd.d[0] = vd.d[1] = 0; 64*bc556c66SDavid Miller vcfps(&vd, &vs_i32, 2, M4_def, 0); 65*bc556c66SDavid Miller if (1 != vd.f[0] || 1024 != vd.f[2] || 64 != vd.f[1] || -10 != vd.f[3]) { 66*bc556c66SDavid Miller return 1; 67*bc556c66SDavid Miller } 68*bc556c66SDavid Miller 69*bc556c66SDavid Miller vd.d[0] = vd.d[1] = 0; 70*bc556c66SDavid Miller vcfpl(&vd, &vs_u32, 2, M4_def, 0); 71*bc556c66SDavid Miller if (2 != vd.f[0] || 4096 != vd.f[2] || 32 != vd.f[1] || 8888 != vd.f[3]) { 72*bc556c66SDavid Miller return 1; 73*bc556c66SDavid Miller } 74*bc556c66SDavid Miller 75*bc556c66SDavid Miller vd.d[0] = vd.d[1] = 0; 76*bc556c66SDavid Miller vcsfp(&vd, &vs_f32, 2, M4_def, 0); 77*bc556c66SDavid Miller if (4 != vd.w[0] || 4 != vd.w[2] || 5 != vd.w[1] || 1 != vd.w[3]) { 78*bc556c66SDavid Miller return 1; 79*bc556c66SDavid Miller } 80*bc556c66SDavid Miller 81*bc556c66SDavid Miller vd.d[0] = vd.d[1] = 0; 82*bc556c66SDavid Miller vclfp(&vd, &vs_f32, 2, M4_def, 0); 83*bc556c66SDavid Miller if (4 != vd.w[0] || 4 != vd.w[2] || 5 != vd.w[1] || 1 != vd.w[3]) { 84*bc556c66SDavid Miller return 1; 85*bc556c66SDavid Miller } 86*bc556c66SDavid Miller 87*bc556c66SDavid Miller return 0; 88*bc556c66SDavid Miller } 89