xref: /openbmc/linux/arch/mips/include/asm/asmmacro.h (revision a2fb4d78)
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003 Ralf Baechle
7  */
8 #ifndef _ASM_ASMMACRO_H
9 #define _ASM_ASMMACRO_H
10 
11 #include <asm/hazards.h>
12 #include <asm/asm-offsets.h>
13 
14 #ifdef CONFIG_32BIT
15 #include <asm/asmmacro-32.h>
16 #endif
17 #ifdef CONFIG_64BIT
18 #include <asm/asmmacro-64.h>
19 #endif
20 #ifdef CONFIG_MIPS_MT_SMTC
21 #include <asm/mipsmtregs.h>
22 #endif
23 
24 #ifdef CONFIG_MIPS_MT_SMTC
25 	.macro	local_irq_enable reg=t0
26 	mfc0	\reg, CP0_TCSTATUS
27 	ori	\reg, \reg, TCSTATUS_IXMT
28 	xori	\reg, \reg, TCSTATUS_IXMT
29 	mtc0	\reg, CP0_TCSTATUS
30 	_ehb
31 	.endm
32 
33 	.macro	local_irq_disable reg=t0
34 	mfc0	\reg, CP0_TCSTATUS
35 	ori	\reg, \reg, TCSTATUS_IXMT
36 	mtc0	\reg, CP0_TCSTATUS
37 	_ehb
38 	.endm
39 #elif defined(CONFIG_CPU_MIPSR2)
40 	.macro	local_irq_enable reg=t0
41 	ei
42 	irq_enable_hazard
43 	.endm
44 
45 	.macro	local_irq_disable reg=t0
46 	di
47 	irq_disable_hazard
48 	.endm
49 #else
50 	.macro	local_irq_enable reg=t0
51 	mfc0	\reg, CP0_STATUS
52 	ori	\reg, \reg, 1
53 	mtc0	\reg, CP0_STATUS
54 	irq_enable_hazard
55 	.endm
56 
57 	.macro	local_irq_disable reg=t0
58 #ifdef CONFIG_PREEMPT
59 	lw      \reg, TI_PRE_COUNT($28)
60 	addi    \reg, \reg, 1
61 	sw      \reg, TI_PRE_COUNT($28)
62 #endif
63 	mfc0	\reg, CP0_STATUS
64 	ori	\reg, \reg, 1
65 	xori	\reg, \reg, 1
66 	mtc0	\reg, CP0_STATUS
67 	irq_disable_hazard
68 #ifdef CONFIG_PREEMPT
69 	lw      \reg, TI_PRE_COUNT($28)
70 	addi    \reg, \reg, -1
71 	sw      \reg, TI_PRE_COUNT($28)
72 #endif
73 	.endm
74 #endif /* CONFIG_MIPS_MT_SMTC */
75 
76 	.macro	fpu_save_16even thread tmp=t0
77 	cfc1	\tmp, fcr31
78 	sdc1	$f0,  THREAD_FPR0(\thread)
79 	sdc1	$f2,  THREAD_FPR2(\thread)
80 	sdc1	$f4,  THREAD_FPR4(\thread)
81 	sdc1	$f6,  THREAD_FPR6(\thread)
82 	sdc1	$f8,  THREAD_FPR8(\thread)
83 	sdc1	$f10, THREAD_FPR10(\thread)
84 	sdc1	$f12, THREAD_FPR12(\thread)
85 	sdc1	$f14, THREAD_FPR14(\thread)
86 	sdc1	$f16, THREAD_FPR16(\thread)
87 	sdc1	$f18, THREAD_FPR18(\thread)
88 	sdc1	$f20, THREAD_FPR20(\thread)
89 	sdc1	$f22, THREAD_FPR22(\thread)
90 	sdc1	$f24, THREAD_FPR24(\thread)
91 	sdc1	$f26, THREAD_FPR26(\thread)
92 	sdc1	$f28, THREAD_FPR28(\thread)
93 	sdc1	$f30, THREAD_FPR30(\thread)
94 	sw	\tmp, THREAD_FCR31(\thread)
95 	.endm
96 
97 	.macro	fpu_save_16odd thread
98 	.set	push
99 	.set	mips64r2
100 	sdc1	$f1,  THREAD_FPR1(\thread)
101 	sdc1	$f3,  THREAD_FPR3(\thread)
102 	sdc1	$f5,  THREAD_FPR5(\thread)
103 	sdc1	$f7,  THREAD_FPR7(\thread)
104 	sdc1	$f9,  THREAD_FPR9(\thread)
105 	sdc1	$f11, THREAD_FPR11(\thread)
106 	sdc1	$f13, THREAD_FPR13(\thread)
107 	sdc1	$f15, THREAD_FPR15(\thread)
108 	sdc1	$f17, THREAD_FPR17(\thread)
109 	sdc1	$f19, THREAD_FPR19(\thread)
110 	sdc1	$f21, THREAD_FPR21(\thread)
111 	sdc1	$f23, THREAD_FPR23(\thread)
112 	sdc1	$f25, THREAD_FPR25(\thread)
113 	sdc1	$f27, THREAD_FPR27(\thread)
114 	sdc1	$f29, THREAD_FPR29(\thread)
115 	sdc1	$f31, THREAD_FPR31(\thread)
116 	.set	pop
117 	.endm
118 
119 	.macro	fpu_save_double thread status tmp
120 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
121 	sll	\tmp, \status, 5
122 	bgez	\tmp, 10f
123 	fpu_save_16odd \thread
124 10:
125 #endif
126 	fpu_save_16even \thread \tmp
127 	.endm
128 
129 	.macro	fpu_restore_16even thread tmp=t0
130 	lw	\tmp, THREAD_FCR31(\thread)
131 	ldc1	$f0,  THREAD_FPR0(\thread)
132 	ldc1	$f2,  THREAD_FPR2(\thread)
133 	ldc1	$f4,  THREAD_FPR4(\thread)
134 	ldc1	$f6,  THREAD_FPR6(\thread)
135 	ldc1	$f8,  THREAD_FPR8(\thread)
136 	ldc1	$f10, THREAD_FPR10(\thread)
137 	ldc1	$f12, THREAD_FPR12(\thread)
138 	ldc1	$f14, THREAD_FPR14(\thread)
139 	ldc1	$f16, THREAD_FPR16(\thread)
140 	ldc1	$f18, THREAD_FPR18(\thread)
141 	ldc1	$f20, THREAD_FPR20(\thread)
142 	ldc1	$f22, THREAD_FPR22(\thread)
143 	ldc1	$f24, THREAD_FPR24(\thread)
144 	ldc1	$f26, THREAD_FPR26(\thread)
145 	ldc1	$f28, THREAD_FPR28(\thread)
146 	ldc1	$f30, THREAD_FPR30(\thread)
147 	ctc1	\tmp, fcr31
148 	.endm
149 
150 	.macro	fpu_restore_16odd thread
151 	.set	push
152 	.set	mips64r2
153 	ldc1	$f1,  THREAD_FPR1(\thread)
154 	ldc1	$f3,  THREAD_FPR3(\thread)
155 	ldc1	$f5,  THREAD_FPR5(\thread)
156 	ldc1	$f7,  THREAD_FPR7(\thread)
157 	ldc1	$f9,  THREAD_FPR9(\thread)
158 	ldc1	$f11, THREAD_FPR11(\thread)
159 	ldc1	$f13, THREAD_FPR13(\thread)
160 	ldc1	$f15, THREAD_FPR15(\thread)
161 	ldc1	$f17, THREAD_FPR17(\thread)
162 	ldc1	$f19, THREAD_FPR19(\thread)
163 	ldc1	$f21, THREAD_FPR21(\thread)
164 	ldc1	$f23, THREAD_FPR23(\thread)
165 	ldc1	$f25, THREAD_FPR25(\thread)
166 	ldc1	$f27, THREAD_FPR27(\thread)
167 	ldc1	$f29, THREAD_FPR29(\thread)
168 	ldc1	$f31, THREAD_FPR31(\thread)
169 	.set	pop
170 	.endm
171 
172 	.macro	fpu_restore_double thread status tmp
173 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
174 	sll	\tmp, \status, 5
175 	bgez	\tmp, 10f				# 16 register mode?
176 
177 	fpu_restore_16odd \thread
178 10:
179 #endif
180 	fpu_restore_16even \thread \tmp
181 	.endm
182 
183 /*
184  * Temporary until all gas have MT ASE support
185  */
186 	.macro	DMT	reg=0
187 	.word	0x41600bc1 | (\reg << 16)
188 	.endm
189 
190 	.macro	EMT	reg=0
191 	.word	0x41600be1 | (\reg << 16)
192 	.endm
193 
194 	.macro	DVPE	reg=0
195 	.word	0x41600001 | (\reg << 16)
196 	.endm
197 
198 	.macro	EVPE	reg=0
199 	.word	0x41600021 | (\reg << 16)
200 	.endm
201 
202 	.macro	MFTR	rt=0, rd=0, u=0, sel=0
203 	 .word	0x41000000 | (\rt << 16) | (\rd << 11) | (\u << 5) | (\sel)
204 	.endm
205 
206 	.macro	MTTR	rt=0, rd=0, u=0, sel=0
207 	 .word	0x41800000 | (\rt << 16) | (\rd << 11) | (\u << 5) | (\sel)
208 	.endm
209 
210 #endif /* _ASM_ASMMACRO_H */
211