xref: /openbmc/linux/arch/x86/math-emu/reg_norm.S (revision 6d685e53)
1b2441318SGreg Kroah-Hartman/* SPDX-License-Identifier: GPL-2.0 */
2da957e11SThomas Gleixner/*---------------------------------------------------------------------------+
3da957e11SThomas Gleixner |  reg_norm.S                                                               |
4da957e11SThomas Gleixner |                                                                           |
5da957e11SThomas Gleixner | Copyright (C) 1992,1993,1994,1995,1997                                    |
6da957e11SThomas Gleixner |                       W. Metzenthen, 22 Parker St, Ormond, Vic 3163,      |
7da957e11SThomas Gleixner |                       Australia.  E-mail billm@suburbia.net               |
8da957e11SThomas Gleixner |                                                                           |
9da957e11SThomas Gleixner | Normalize the value in a FPU_REG.                                         |
10da957e11SThomas Gleixner |                                                                           |
11da957e11SThomas Gleixner | Call from C as:                                                           |
12da957e11SThomas Gleixner |    int FPU_normalize(FPU_REG *n)                                          |
13da957e11SThomas Gleixner |                                                                           |
14da957e11SThomas Gleixner |    int FPU_normalize_nuo(FPU_REG *n)                                      |
15da957e11SThomas Gleixner |                                                                           |
16da957e11SThomas Gleixner |    Return value is the tag of the answer, or-ed with FPU_Exception if     |
17da957e11SThomas Gleixner |    one was raised, or -1 on internal error.                               |
18da957e11SThomas Gleixner |                                                                           |
19da957e11SThomas Gleixner +---------------------------------------------------------------------------*/
20da957e11SThomas Gleixner
21da957e11SThomas Gleixner#include "fpu_emu.h"
22da957e11SThomas Gleixner
23da957e11SThomas Gleixner
24da957e11SThomas Gleixner.text
256d685e53SJiri SlabySYM_FUNC_START(FPU_normalize)
26da957e11SThomas Gleixner	pushl	%ebp
27da957e11SThomas Gleixner	movl	%esp,%ebp
28da957e11SThomas Gleixner	pushl	%ebx
29da957e11SThomas Gleixner
30da957e11SThomas Gleixner	movl	PARAM1,%ebx
31da957e11SThomas Gleixner
32da957e11SThomas Gleixner	movl	SIGH(%ebx),%edx
33da957e11SThomas Gleixner	movl	SIGL(%ebx),%eax
34da957e11SThomas Gleixner
35da957e11SThomas Gleixner	orl	%edx,%edx	/* ms bits */
36da957e11SThomas Gleixner	js	L_done		/* Already normalized */
37da957e11SThomas Gleixner	jnz	L_shift_1	/* Shift left 1 - 31 bits */
38da957e11SThomas Gleixner
39da957e11SThomas Gleixner	orl	%eax,%eax
40da957e11SThomas Gleixner	jz	L_zero		/* The contents are zero */
41da957e11SThomas Gleixner
42da957e11SThomas Gleixner	movl	%eax,%edx
43da957e11SThomas Gleixner	xorl	%eax,%eax
44da957e11SThomas Gleixner	subw	$32,EXP(%ebx)	/* This can cause an underflow */
45da957e11SThomas Gleixner
46da957e11SThomas Gleixner/* We need to shift left by 1 - 31 bits */
47da957e11SThomas GleixnerL_shift_1:
48da957e11SThomas Gleixner	bsrl	%edx,%ecx	/* get the required shift in %ecx */
49da957e11SThomas Gleixner	subl	$31,%ecx
50da957e11SThomas Gleixner	negl	%ecx
51da957e11SThomas Gleixner	shld	%cl,%eax,%edx
52da957e11SThomas Gleixner	shl	%cl,%eax
53da957e11SThomas Gleixner	subw	%cx,EXP(%ebx)	/* This can cause an underflow */
54da957e11SThomas Gleixner
55da957e11SThomas Gleixner	movl	%edx,SIGH(%ebx)
56da957e11SThomas Gleixner	movl	%eax,SIGL(%ebx)
57da957e11SThomas Gleixner
58da957e11SThomas GleixnerL_done:
59da957e11SThomas Gleixner	cmpw	EXP_OVER,EXP(%ebx)
60da957e11SThomas Gleixner	jge	L_overflow
61da957e11SThomas Gleixner
62da957e11SThomas Gleixner	cmpw	EXP_UNDER,EXP(%ebx)
63da957e11SThomas Gleixner	jle	L_underflow
64da957e11SThomas Gleixner
65da957e11SThomas GleixnerL_exit_valid:
66da957e11SThomas Gleixner	movl	TAG_Valid,%eax
67da957e11SThomas Gleixner
68da957e11SThomas Gleixner	/* Convert the exponent to 80x87 form. */
69da957e11SThomas Gleixner	addw	EXTENDED_Ebias,EXP(%ebx)
70da957e11SThomas Gleixner	andw	$0x7fff,EXP(%ebx)
71da957e11SThomas Gleixner
72da957e11SThomas GleixnerL_exit:
73da957e11SThomas Gleixner	popl	%ebx
74da957e11SThomas Gleixner	leave
75da957e11SThomas Gleixner	ret
76da957e11SThomas Gleixner
77da957e11SThomas Gleixner
78da957e11SThomas GleixnerL_zero:
79da957e11SThomas Gleixner	movw	$0,EXP(%ebx)
80da957e11SThomas Gleixner	movl	TAG_Zero,%eax
81da957e11SThomas Gleixner	jmp	L_exit
82da957e11SThomas Gleixner
83da957e11SThomas GleixnerL_underflow:
84da957e11SThomas Gleixner	/* Convert the exponent to 80x87 form. */
85da957e11SThomas Gleixner	addw	EXTENDED_Ebias,EXP(%ebx)
86da957e11SThomas Gleixner	push	%ebx
87da957e11SThomas Gleixner	call	arith_underflow
88da957e11SThomas Gleixner	pop	%ebx
89da957e11SThomas Gleixner	jmp	L_exit
90da957e11SThomas Gleixner
91da957e11SThomas GleixnerL_overflow:
92da957e11SThomas Gleixner	/* Convert the exponent to 80x87 form. */
93da957e11SThomas Gleixner	addw	EXTENDED_Ebias,EXP(%ebx)
94da957e11SThomas Gleixner	push	%ebx
95da957e11SThomas Gleixner	call	arith_overflow
96da957e11SThomas Gleixner	pop	%ebx
97da957e11SThomas Gleixner	jmp	L_exit
986d685e53SJiri SlabySYM_FUNC_END(FPU_normalize)
99da957e11SThomas Gleixner
100da957e11SThomas Gleixner
101da957e11SThomas Gleixner
102da957e11SThomas Gleixner/* Normalise without reporting underflow or overflow */
1036d685e53SJiri SlabySYM_FUNC_START(FPU_normalize_nuo)
104da957e11SThomas Gleixner	pushl	%ebp
105da957e11SThomas Gleixner	movl	%esp,%ebp
106da957e11SThomas Gleixner	pushl	%ebx
107da957e11SThomas Gleixner
108da957e11SThomas Gleixner	movl	PARAM1,%ebx
109da957e11SThomas Gleixner
110da957e11SThomas Gleixner	movl	SIGH(%ebx),%edx
111da957e11SThomas Gleixner	movl	SIGL(%ebx),%eax
112da957e11SThomas Gleixner
113da957e11SThomas Gleixner	orl	%edx,%edx	/* ms bits */
114da957e11SThomas Gleixner	js	L_exit_nuo_valid	/* Already normalized */
115da957e11SThomas Gleixner	jnz	L_nuo_shift_1	/* Shift left 1 - 31 bits */
116da957e11SThomas Gleixner
117da957e11SThomas Gleixner	orl	%eax,%eax
118da957e11SThomas Gleixner	jz	L_exit_nuo_zero		/* The contents are zero */
119da957e11SThomas Gleixner
120da957e11SThomas Gleixner	movl	%eax,%edx
121da957e11SThomas Gleixner	xorl	%eax,%eax
122da957e11SThomas Gleixner	subw	$32,EXP(%ebx)	/* This can cause an underflow */
123da957e11SThomas Gleixner
124da957e11SThomas Gleixner/* We need to shift left by 1 - 31 bits */
125da957e11SThomas GleixnerL_nuo_shift_1:
126da957e11SThomas Gleixner	bsrl	%edx,%ecx	/* get the required shift in %ecx */
127da957e11SThomas Gleixner	subl	$31,%ecx
128da957e11SThomas Gleixner	negl	%ecx
129da957e11SThomas Gleixner	shld	%cl,%eax,%edx
130da957e11SThomas Gleixner	shl	%cl,%eax
131da957e11SThomas Gleixner	subw	%cx,EXP(%ebx)	/* This can cause an underflow */
132da957e11SThomas Gleixner
133da957e11SThomas Gleixner	movl	%edx,SIGH(%ebx)
134da957e11SThomas Gleixner	movl	%eax,SIGL(%ebx)
135da957e11SThomas Gleixner
136da957e11SThomas GleixnerL_exit_nuo_valid:
137da957e11SThomas Gleixner	movl	TAG_Valid,%eax
138da957e11SThomas Gleixner
139da957e11SThomas Gleixner	popl	%ebx
140da957e11SThomas Gleixner	leave
141da957e11SThomas Gleixner	ret
142da957e11SThomas Gleixner
143da957e11SThomas GleixnerL_exit_nuo_zero:
144da957e11SThomas Gleixner	movl	TAG_Zero,%eax
145da957e11SThomas Gleixner	movw	EXP_UNDER,EXP(%ebx)
146da957e11SThomas Gleixner
147da957e11SThomas Gleixner	popl	%ebx
148da957e11SThomas Gleixner	leave
149da957e11SThomas Gleixner	ret
1506d685e53SJiri SlabySYM_FUNC_END(FPU_normalize_nuo)
151