Lines Matching refs:vsm
216 struct vfp_single *vsm, u32 fpscr) in vfp_propagate_nan() argument
223 if (vsm) in vfp_propagate_nan()
224 tm = vfp_single_type(vsm); in vfp_propagate_nan()
240 nan = vsm; in vfp_propagate_nan()
318 struct vfp_single vsm, vsd; in vfp_single_fsqrt() local
321 vfp_single_unpack(&vsm, m); in vfp_single_fsqrt()
322 tm = vfp_single_type(&vsm); in vfp_single_fsqrt()
327 ret = vfp_propagate_nan(vsp, &vsm, NULL, fpscr); in vfp_single_fsqrt()
328 else if (vsm.sign == 0) { in vfp_single_fsqrt()
330 vsp = &vsm; in vfp_single_fsqrt()
351 vfp_single_normalise_denormal(&vsm); in vfp_single_fsqrt()
356 if (vsm.sign) in vfp_single_fsqrt()
359 vfp_single_dump("sqrt", &vsm); in vfp_single_fsqrt()
365 vsd.exponent = ((vsm.exponent - 127) >> 1) + 127; in vfp_single_fsqrt()
366 vsd.significand = vfp_estimate_sqrt_significand(vsm.exponent, vsm.significand) + 2; in vfp_single_fsqrt()
379 vsm.significand <<= !(vsm.exponent & 1); in vfp_single_fsqrt()
381 rem = ((u64)vsm.significand << 32) - term; in vfp_single_fsqrt()
484 struct vfp_single vsm; in vfp_single_fcvtd() local
489 vfp_single_unpack(&vsm, m); in vfp_single_fcvtd()
491 tm = vfp_single_type(&vsm); in vfp_single_fcvtd()
500 vfp_single_normalise_denormal(&vsm); in vfp_single_fcvtd()
502 vdd.sign = vsm.sign; in vfp_single_fcvtd()
503 vdd.significand = (u64)vsm.significand << 32; in vfp_single_fcvtd()
516 vdd.exponent = vsm.exponent + (1023 - 127); in vfp_single_fcvtd()
549 struct vfp_single vsm; in vfp_single_ftoui() local
554 vfp_single_unpack(&vsm, m); in vfp_single_ftoui()
555 vfp_single_dump("VSM", &vsm); in vfp_single_ftoui()
560 tm = vfp_single_type(&vsm); in vfp_single_ftoui()
565 vsm.sign = 0; in vfp_single_ftoui()
567 if (vsm.exponent >= 127 + 32) { in vfp_single_ftoui()
568 d = vsm.sign ? 0 : 0xffffffff; in vfp_single_ftoui()
570 } else if (vsm.exponent >= 127 - 1) { in vfp_single_ftoui()
571 int shift = 127 + 31 - vsm.exponent; in vfp_single_ftoui()
577 d = (vsm.significand << 1) >> shift; in vfp_single_ftoui()
578 rem = vsm.significand << (33 - shift); in vfp_single_ftoui()
586 } else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vsm.sign != 0)) { in vfp_single_ftoui()
597 if (d && vsm.sign) { in vfp_single_ftoui()
604 if (vsm.exponent | vsm.significand) { in vfp_single_ftoui()
606 if (rmode == FPSCR_ROUND_PLUSINF && vsm.sign == 0) in vfp_single_ftoui()
608 else if (rmode == FPSCR_ROUND_MINUSINF && vsm.sign) { in vfp_single_ftoui()
629 struct vfp_single vsm; in vfp_single_ftosi() local
634 vfp_single_unpack(&vsm, m); in vfp_single_ftosi()
635 vfp_single_dump("VSM", &vsm); in vfp_single_ftosi()
640 tm = vfp_single_type(&vsm); in vfp_single_ftosi()
641 if (vfp_single_type(&vsm) & VFP_DENORMAL) in vfp_single_ftosi()
647 } else if (vsm.exponent >= 127 + 32) { in vfp_single_ftosi()
652 if (vsm.sign) in vfp_single_ftosi()
655 } else if (vsm.exponent >= 127 - 1) { in vfp_single_ftosi()
656 int shift = 127 + 31 - vsm.exponent; in vfp_single_ftosi()
660 d = (vsm.significand << 1) >> shift; in vfp_single_ftosi()
661 rem = vsm.significand << (33 - shift); in vfp_single_ftosi()
669 } else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vsm.sign != 0)) { in vfp_single_ftosi()
675 if (d > 0x7fffffff + (vsm.sign != 0)) { in vfp_single_ftosi()
676 d = 0x7fffffff + (vsm.sign != 0); in vfp_single_ftosi()
681 if (vsm.sign) in vfp_single_ftosi()
685 if (vsm.exponent | vsm.significand) { in vfp_single_ftosi()
687 if (rmode == FPSCR_ROUND_PLUSINF && vsm.sign == 0) in vfp_single_ftosi()
689 else if (rmode == FPSCR_ROUND_MINUSINF && vsm.sign) in vfp_single_ftosi()
730 struct vfp_single *vsm, u32 fpscr) in vfp_single_fadd_nonnumber() argument
737 tm = vfp_single_type(vsm); in vfp_single_fadd_nonnumber()
743 if (vsn->sign ^ vsm->sign) { in vfp_single_fadd_nonnumber()
764 return vfp_propagate_nan(vsd, vsn, vsm, fpscr); in vfp_single_fadd_nonnumber()
772 struct vfp_single *vsm, u32 fpscr) in vfp_single_add() argument
777 vsm->significand & 0x80000000) { in vfp_single_add()
780 vfp_single_dump("VSM", vsm); in vfp_single_add()
788 if (vsn->exponent < vsm->exponent) { in vfp_single_add()
790 vsn = vsm; in vfp_single_add()
791 vsm = t; in vfp_single_add()
799 return vfp_single_fadd_nonnumber(vsd, vsn, vsm, fpscr); in vfp_single_add()
811 exp_diff = vsn->exponent - vsm->exponent; in vfp_single_add()
812 m_sig = vfp_shiftright32jamming(vsm->significand, exp_diff); in vfp_single_add()
817 if (vsn->sign ^ vsm->sign) { in vfp_single_add()
835 vfp_single_multiply(struct vfp_single *vsd, struct vfp_single *vsn, struct vfp_single *vsm, u32 fps… in vfp_single_multiply() argument
838 vfp_single_dump("VSM", vsm); in vfp_single_multiply()
845 if (vsn->exponent < vsm->exponent) { in vfp_single_multiply()
847 vsn = vsm; in vfp_single_multiply()
848 vsm = t; in vfp_single_multiply()
852 vsd->sign = vsn->sign ^ vsm->sign; in vfp_single_multiply()
858 if (vsn->significand || (vsm->exponent == 255 && vsm->significand)) in vfp_single_multiply()
859 return vfp_propagate_nan(vsd, vsn, vsm, fpscr); in vfp_single_multiply()
860 if ((vsm->exponent | vsm->significand) == 0) { in vfp_single_multiply()
873 if ((vsm->exponent | vsm->significand) == 0) { in vfp_single_multiply()
884 vsd->exponent = vsn->exponent + vsm->exponent - 127 + 2; in vfp_single_multiply()
885 vsd->significand = vfp_hi64to32jamming((u64)vsn->significand * vsm->significand); in vfp_single_multiply()
897 struct vfp_single vsd, vsp, vsn, vsm; in vfp_single_multiply_accumulate() local
907 vfp_single_unpack(&vsm, m); in vfp_single_multiply_accumulate()
908 if (vsm.exponent == 0 && vsm.significand) in vfp_single_multiply_accumulate()
909 vfp_single_normalise_denormal(&vsm); in vfp_single_multiply_accumulate()
911 exceptions = vfp_single_multiply(&vsp, &vsn, &vsm, fpscr); in vfp_single_multiply_accumulate()
969 struct vfp_single vsd, vsn, vsm; in vfp_single_fmul() local
979 vfp_single_unpack(&vsm, m); in vfp_single_fmul()
980 if (vsm.exponent == 0 && vsm.significand) in vfp_single_fmul()
981 vfp_single_normalise_denormal(&vsm); in vfp_single_fmul()
983 exceptions = vfp_single_multiply(&vsd, &vsn, &vsm, fpscr); in vfp_single_fmul()
992 struct vfp_single vsd, vsn, vsm; in vfp_single_fnmul() local
1002 vfp_single_unpack(&vsm, m); in vfp_single_fnmul()
1003 if (vsm.exponent == 0 && vsm.significand) in vfp_single_fnmul()
1004 vfp_single_normalise_denormal(&vsm); in vfp_single_fnmul()
1006 exceptions = vfp_single_multiply(&vsd, &vsn, &vsm, fpscr); in vfp_single_fnmul()
1016 struct vfp_single vsd, vsn, vsm; in vfp_single_fadd() local
1029 vfp_single_unpack(&vsm, m); in vfp_single_fadd()
1030 if (vsm.exponent == 0 && vsm.significand) in vfp_single_fadd()
1031 vfp_single_normalise_denormal(&vsm); in vfp_single_fadd()
1033 exceptions = vfp_single_add(&vsd, &vsn, &vsm, fpscr); in vfp_single_fadd()
1054 struct vfp_single vsd, vsn, vsm; in vfp_single_fdiv() local
1062 vfp_single_unpack(&vsm, m); in vfp_single_fdiv()
1064 vsd.sign = vsn.sign ^ vsm.sign; in vfp_single_fdiv()
1067 tm = vfp_single_type(&vsm); in vfp_single_fdiv()
1109 vfp_single_normalise_denormal(&vsm); in vfp_single_fdiv()
1114 vsd.exponent = vsn.exponent - vsm.exponent + 127 - 1; in vfp_single_fdiv()
1115 vsm.significand <<= 1; in vfp_single_fdiv()
1116 if (vsm.significand <= (2 * vsn.significand)) { in vfp_single_fdiv()
1122 do_div(significand, vsm.significand); in vfp_single_fdiv()
1126 vsd.significand |= ((u64)vsm.significand * vsd.significand != (u64)vsn.significand << 32); in vfp_single_fdiv()
1131 exceptions = vfp_propagate_nan(&vsd, &vsn, &vsm, fpscr); in vfp_single_fdiv()
1137 exceptions = vfp_propagate_nan(&vsd, &vsm, &vsn, fpscr); in vfp_single_fdiv()