xref: /openbmc/linux/arch/mips/include/asm/hazards.h (revision 03ab8e6297acd1bc0eedaa050e2a1635c576fd11)
1384740dcSRalf Baechle /*
2384740dcSRalf Baechle  * This file is subject to the terms and conditions of the GNU General Public
3384740dcSRalf Baechle  * License.  See the file "COPYING" in the main directory of this archive
4384740dcSRalf Baechle  * for more details.
5384740dcSRalf Baechle  *
6384740dcSRalf Baechle  * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org>
7384740dcSRalf Baechle  * Copyright (C) MIPS Technologies, Inc.
8384740dcSRalf Baechle  *   written by Ralf Baechle <ralf@linux-mips.org>
9384740dcSRalf Baechle  */
10384740dcSRalf Baechle #ifndef _ASM_HAZARDS_H
11384740dcSRalf Baechle #define _ASM_HAZARDS_H
12384740dcSRalf Baechle 
1302b849f7SRalf Baechle #include <linux/stringify.h>
14f52fca97SMarkos Chandras #include <asm/compiler.h>
15384740dcSRalf Baechle 
1602b849f7SRalf Baechle #define ___ssnop							\
17384740dcSRalf Baechle 	sll	$0, $0, 1
18384740dcSRalf Baechle 
1902b849f7SRalf Baechle #define ___ehb								\
20384740dcSRalf Baechle 	sll	$0, $0, 3
21384740dcSRalf Baechle 
22384740dcSRalf Baechle /*
23384740dcSRalf Baechle  * TLB hazards
24384740dcSRalf Baechle  */
25ab7c01fdSSerge Semin #if (defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR5) || \
26ab7c01fdSSerge Semin      defined(CONFIG_CPU_MIPSR6)) && \
2751522217SJiaxun Yang     !defined(CONFIG_CPU_CAVIUM_OCTEON) && !defined(CONFIG_CPU_LOONGSON64)
28384740dcSRalf Baechle 
29384740dcSRalf Baechle /*
30384740dcSRalf Baechle  * MIPSR2 defines ehb for hazard avoidance
31384740dcSRalf Baechle  */
32384740dcSRalf Baechle 
3302b849f7SRalf Baechle #define __mtc0_tlbw_hazard						\
3402b849f7SRalf Baechle 	___ehb
3502b849f7SRalf Baechle 
36e50f0e31SJames Hogan #define __mtc0_tlbr_hazard						\
37e50f0e31SJames Hogan 	___ehb
38e50f0e31SJames Hogan 
3902b849f7SRalf Baechle #define __tlbw_use_hazard						\
4002b849f7SRalf Baechle 	___ehb
4102b849f7SRalf Baechle 
42e50f0e31SJames Hogan #define __tlb_read_hazard						\
43e50f0e31SJames Hogan 	___ehb
44e50f0e31SJames Hogan 
4502b849f7SRalf Baechle #define __tlb_probe_hazard						\
4602b849f7SRalf Baechle 	___ehb
4702b849f7SRalf Baechle 
4802b849f7SRalf Baechle #define __irq_enable_hazard						\
4902b849f7SRalf Baechle 	___ehb
5002b849f7SRalf Baechle 
5102b849f7SRalf Baechle #define __irq_disable_hazard						\
5202b849f7SRalf Baechle 	___ehb
5302b849f7SRalf Baechle 
5402b849f7SRalf Baechle #define __back_to_back_c0_hazard					\
5502b849f7SRalf Baechle 	___ehb
5602b849f7SRalf Baechle 
57384740dcSRalf Baechle /*
58384740dcSRalf Baechle  * gcc has a tradition of misscompiling the previous construct using the
59384740dcSRalf Baechle  * address of a label as argument to inline assembler.	Gas otoh has the
60384740dcSRalf Baechle  * annoying difference between la and dla which are only usable for 32-bit
61384740dcSRalf Baechle  * rsp. 64-bit code, so can't be used without conditional compilation.
6220430e76SAndrea Gelmini  * The alternative is switching the assembler to 64-bit code which happens
63384740dcSRalf Baechle  * to work right even for 32-bit code...
64384740dcSRalf Baechle  */
65384740dcSRalf Baechle #define instruction_hazard()						\
66384740dcSRalf Baechle do {									\
67384740dcSRalf Baechle 	unsigned long tmp;						\
68384740dcSRalf Baechle 									\
69384740dcSRalf Baechle 	__asm__ __volatile__(						\
70378ed6f0SPaul Burton 	"	.set	push					\n"	\
71f52fca97SMarkos Chandras 	"	.set "MIPS_ISA_LEVEL"				\n"	\
72384740dcSRalf Baechle 	"	dla	%0, 1f					\n"	\
73384740dcSRalf Baechle 	"	jr.hb	%0					\n"	\
74378ed6f0SPaul Burton 	"	.set	pop					\n"	\
75384740dcSRalf Baechle 	"1:							\n"	\
76384740dcSRalf Baechle 	: "=r" (tmp));							\
77384740dcSRalf Baechle } while (0)
78384740dcSRalf Baechle 
791c7c4451SKevin Cernekee #elif (defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MIPS_ALCHEMY)) || \
801c7c4451SKevin Cernekee 	defined(CONFIG_CPU_BMIPS)
81384740dcSRalf Baechle 
82384740dcSRalf Baechle /*
83384740dcSRalf Baechle  * These are slightly complicated by the fact that we guarantee R1 kernels to
84384740dcSRalf Baechle  * run fine on R2 processors.
85384740dcSRalf Baechle  */
8602b849f7SRalf Baechle 
8702b849f7SRalf Baechle #define __mtc0_tlbw_hazard						\
8802b849f7SRalf Baechle 	___ssnop;							\
8902b849f7SRalf Baechle 	___ssnop;							\
9002b849f7SRalf Baechle 	___ehb
9102b849f7SRalf Baechle 
92e50f0e31SJames Hogan #define __mtc0_tlbr_hazard						\
93e50f0e31SJames Hogan 	___ssnop;							\
94e50f0e31SJames Hogan 	___ssnop;							\
95e50f0e31SJames Hogan 	___ehb
96e50f0e31SJames Hogan 
9702b849f7SRalf Baechle #define __tlbw_use_hazard						\
9802b849f7SRalf Baechle 	___ssnop;							\
9902b849f7SRalf Baechle 	___ssnop;							\
10002b849f7SRalf Baechle 	___ssnop;							\
10102b849f7SRalf Baechle 	___ehb
10202b849f7SRalf Baechle 
103e50f0e31SJames Hogan #define __tlb_read_hazard						\
104e50f0e31SJames Hogan 	___ssnop;							\
105e50f0e31SJames Hogan 	___ssnop;							\
106e50f0e31SJames Hogan 	___ssnop;							\
107e50f0e31SJames Hogan 	___ehb
108e50f0e31SJames Hogan 
10902b849f7SRalf Baechle #define __tlb_probe_hazard						\
11002b849f7SRalf Baechle 	___ssnop;							\
11102b849f7SRalf Baechle 	___ssnop;							\
11202b849f7SRalf Baechle 	___ssnop;							\
11302b849f7SRalf Baechle 	___ehb
11402b849f7SRalf Baechle 
11502b849f7SRalf Baechle #define __irq_enable_hazard						\
11602b849f7SRalf Baechle 	___ssnop;							\
11702b849f7SRalf Baechle 	___ssnop;							\
11802b849f7SRalf Baechle 	___ssnop;							\
11902b849f7SRalf Baechle 	___ehb
12002b849f7SRalf Baechle 
12102b849f7SRalf Baechle #define __irq_disable_hazard						\
12202b849f7SRalf Baechle 	___ssnop;							\
12302b849f7SRalf Baechle 	___ssnop;							\
12402b849f7SRalf Baechle 	___ssnop;							\
12502b849f7SRalf Baechle 	___ehb
12602b849f7SRalf Baechle 
12702b849f7SRalf Baechle #define __back_to_back_c0_hazard					\
12802b849f7SRalf Baechle 	___ssnop;							\
12902b849f7SRalf Baechle 	___ssnop;							\
13002b849f7SRalf Baechle 	___ssnop;							\
13102b849f7SRalf Baechle 	___ehb
13202b849f7SRalf Baechle 
133384740dcSRalf Baechle /*
134384740dcSRalf Baechle  * gcc has a tradition of misscompiling the previous construct using the
135384740dcSRalf Baechle  * address of a label as argument to inline assembler.	Gas otoh has the
136384740dcSRalf Baechle  * annoying difference between la and dla which are only usable for 32-bit
137384740dcSRalf Baechle  * rsp. 64-bit code, so can't be used without conditional compilation.
13820430e76SAndrea Gelmini  * The alternative is switching the assembler to 64-bit code which happens
139384740dcSRalf Baechle  * to work right even for 32-bit code...
140384740dcSRalf Baechle  */
141384740dcSRalf Baechle #define __instruction_hazard()						\
142384740dcSRalf Baechle do {									\
143384740dcSRalf Baechle 	unsigned long tmp;						\
144384740dcSRalf Baechle 									\
145384740dcSRalf Baechle 	__asm__ __volatile__(						\
146378ed6f0SPaul Burton 	"	.set	push					\n"	\
147384740dcSRalf Baechle 	"	.set	mips64r2				\n"	\
148384740dcSRalf Baechle 	"	dla	%0, 1f					\n"	\
149384740dcSRalf Baechle 	"	jr.hb	%0					\n"	\
150378ed6f0SPaul Burton 	"	.set	pop					\n"	\
151384740dcSRalf Baechle 	"1:							\n"	\
152384740dcSRalf Baechle 	: "=r" (tmp));							\
153384740dcSRalf Baechle } while (0)
154384740dcSRalf Baechle 
155384740dcSRalf Baechle #define instruction_hazard()						\
156384740dcSRalf Baechle do {									\
157f52fca97SMarkos Chandras 	if (cpu_has_mips_r2_r6)						\
158384740dcSRalf Baechle 		__instruction_hazard();					\
159384740dcSRalf Baechle } while (0)
160384740dcSRalf Baechle 
16142a4f17dSManuel Lauss #elif defined(CONFIG_MIPS_ALCHEMY) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \
16251522217SJiaxun Yang 	defined(CONFIG_CPU_LOONGSON2EF) || defined(CONFIG_CPU_LOONGSON64) || \
163*95b8a5e0SThomas Bogendoerfer 	defined(CONFIG_CPU_R10000) || defined(CONFIG_CPU_R5500)
164384740dcSRalf Baechle 
165384740dcSRalf Baechle /*
166384740dcSRalf Baechle  * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
167384740dcSRalf Baechle  */
168384740dcSRalf Baechle 
16902b849f7SRalf Baechle #define __mtc0_tlbw_hazard
17002b849f7SRalf Baechle 
171e50f0e31SJames Hogan #define __mtc0_tlbr_hazard
172e50f0e31SJames Hogan 
17302b849f7SRalf Baechle #define __tlbw_use_hazard
17402b849f7SRalf Baechle 
175e50f0e31SJames Hogan #define __tlb_read_hazard
176e50f0e31SJames Hogan 
17702b849f7SRalf Baechle #define __tlb_probe_hazard
17802b849f7SRalf Baechle 
17902b849f7SRalf Baechle #define __irq_enable_hazard
18002b849f7SRalf Baechle 
18102b849f7SRalf Baechle #define __irq_disable_hazard
18202b849f7SRalf Baechle 
18302b849f7SRalf Baechle #define __back_to_back_c0_hazard
18402b849f7SRalf Baechle 
185384740dcSRalf Baechle #define instruction_hazard() do { } while (0)
186384740dcSRalf Baechle 
187384740dcSRalf Baechle #elif defined(CONFIG_CPU_SB1)
188384740dcSRalf Baechle 
189384740dcSRalf Baechle /*
190384740dcSRalf Baechle  * Mostly like R4000 for historic reasons
191384740dcSRalf Baechle  */
19202b849f7SRalf Baechle #define __mtc0_tlbw_hazard
19302b849f7SRalf Baechle 
194e50f0e31SJames Hogan #define __mtc0_tlbr_hazard
195e50f0e31SJames Hogan 
19602b849f7SRalf Baechle #define __tlbw_use_hazard
19702b849f7SRalf Baechle 
198e50f0e31SJames Hogan #define __tlb_read_hazard
199e50f0e31SJames Hogan 
20002b849f7SRalf Baechle #define __tlb_probe_hazard
20102b849f7SRalf Baechle 
20202b849f7SRalf Baechle #define __irq_enable_hazard
20302b849f7SRalf Baechle 
20402b849f7SRalf Baechle #define __irq_disable_hazard						\
20502b849f7SRalf Baechle 	___ssnop;							\
20602b849f7SRalf Baechle 	___ssnop;							\
20702b849f7SRalf Baechle 	___ssnop
20802b849f7SRalf Baechle 
20902b849f7SRalf Baechle #define __back_to_back_c0_hazard
21002b849f7SRalf Baechle 
211384740dcSRalf Baechle #define instruction_hazard() do { } while (0)
212384740dcSRalf Baechle 
213384740dcSRalf Baechle #else
214384740dcSRalf Baechle 
215384740dcSRalf Baechle /*
216384740dcSRalf Baechle  * Finally the catchall case for all other processors including R4000, R4400,
217384740dcSRalf Baechle  * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
218384740dcSRalf Baechle  *
219384740dcSRalf Baechle  * The taken branch will result in a two cycle penalty for the two killed
220384740dcSRalf Baechle  * instructions on R4000 / R4400.  Other processors only have a single cycle
221384740dcSRalf Baechle  * hazard so this is nice trick to have an optimal code for a range of
222384740dcSRalf Baechle  * processors.
223384740dcSRalf Baechle  */
22402b849f7SRalf Baechle #define __mtc0_tlbw_hazard						\
22502b849f7SRalf Baechle 	nop;								\
22602b849f7SRalf Baechle 	nop
22702b849f7SRalf Baechle 
228e50f0e31SJames Hogan #define __mtc0_tlbr_hazard						\
229e50f0e31SJames Hogan 	nop;								\
230e50f0e31SJames Hogan 	nop
231e50f0e31SJames Hogan 
23202b849f7SRalf Baechle #define __tlbw_use_hazard						\
23302b849f7SRalf Baechle 	nop;								\
23402b849f7SRalf Baechle 	nop;								\
23502b849f7SRalf Baechle 	nop
23602b849f7SRalf Baechle 
237e50f0e31SJames Hogan #define __tlb_read_hazard						\
238e50f0e31SJames Hogan 	nop;								\
239e50f0e31SJames Hogan 	nop;								\
240e50f0e31SJames Hogan 	nop
241e50f0e31SJames Hogan 
24202b849f7SRalf Baechle #define __tlb_probe_hazard						\
24302b849f7SRalf Baechle 	nop;								\
24402b849f7SRalf Baechle 	nop;								\
24502b849f7SRalf Baechle 	nop
24602b849f7SRalf Baechle 
24702b849f7SRalf Baechle #define __irq_enable_hazard						\
24802b849f7SRalf Baechle 	___ssnop;							\
24902b849f7SRalf Baechle 	___ssnop;							\
25002b849f7SRalf Baechle 	___ssnop
25102b849f7SRalf Baechle 
25202b849f7SRalf Baechle #define __irq_disable_hazard						\
25302b849f7SRalf Baechle 	nop;								\
25402b849f7SRalf Baechle 	nop;								\
25502b849f7SRalf Baechle 	nop
25602b849f7SRalf Baechle 
25702b849f7SRalf Baechle #define __back_to_back_c0_hazard					\
25802b849f7SRalf Baechle 	___ssnop;							\
25902b849f7SRalf Baechle 	___ssnop;							\
26002b849f7SRalf Baechle 	___ssnop
26102b849f7SRalf Baechle 
262384740dcSRalf Baechle #define instruction_hazard() do { } while (0)
263384740dcSRalf Baechle 
264384740dcSRalf Baechle #endif
265384740dcSRalf Baechle 
266384740dcSRalf Baechle 
267384740dcSRalf Baechle /* FPU hazards */
268384740dcSRalf Baechle 
269384740dcSRalf Baechle #if defined(CONFIG_CPU_SB1)
27002b849f7SRalf Baechle 
27102b849f7SRalf Baechle #define __enable_fpu_hazard						\
27202b849f7SRalf Baechle 	.set	push;							\
27302b849f7SRalf Baechle 	.set	mips64;							\
27402b849f7SRalf Baechle 	.set	noreorder;						\
27502b849f7SRalf Baechle 	___ssnop;							\
27602b849f7SRalf Baechle 	bnezl	$0, .+4;						\
27702b849f7SRalf Baechle 	___ssnop;							\
278384740dcSRalf Baechle 	.set	pop
27902b849f7SRalf Baechle 
28002b849f7SRalf Baechle #define __disable_fpu_hazard
281384740dcSRalf Baechle 
282ab7c01fdSSerge Semin #elif defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR5) || \
283ab7c01fdSSerge Semin       defined(CONFIG_CPU_MIPSR6)
28402b849f7SRalf Baechle 
28502b849f7SRalf Baechle #define __enable_fpu_hazard						\
28602b849f7SRalf Baechle 	___ehb
28702b849f7SRalf Baechle 
28802b849f7SRalf Baechle #define __disable_fpu_hazard						\
28902b849f7SRalf Baechle 	___ehb
29002b849f7SRalf Baechle 
291384740dcSRalf Baechle #else
29202b849f7SRalf Baechle 
29302b849f7SRalf Baechle #define __enable_fpu_hazard						\
29402b849f7SRalf Baechle 	nop;								\
29502b849f7SRalf Baechle 	nop;								\
29602b849f7SRalf Baechle 	nop;								\
29702b849f7SRalf Baechle 	nop
29802b849f7SRalf Baechle 
29902b849f7SRalf Baechle #define __disable_fpu_hazard						\
30002b849f7SRalf Baechle 	___ehb
30102b849f7SRalf Baechle 
302384740dcSRalf Baechle #endif
303384740dcSRalf Baechle 
30402b849f7SRalf Baechle #ifdef __ASSEMBLY__
30502b849f7SRalf Baechle 
30602b849f7SRalf Baechle #define _ssnop ___ssnop
30702b849f7SRalf Baechle #define	_ehb ___ehb
30802b849f7SRalf Baechle #define mtc0_tlbw_hazard __mtc0_tlbw_hazard
309e50f0e31SJames Hogan #define mtc0_tlbr_hazard __mtc0_tlbr_hazard
31002b849f7SRalf Baechle #define tlbw_use_hazard __tlbw_use_hazard
311e50f0e31SJames Hogan #define tlb_read_hazard __tlb_read_hazard
31202b849f7SRalf Baechle #define tlb_probe_hazard __tlb_probe_hazard
31302b849f7SRalf Baechle #define irq_enable_hazard __irq_enable_hazard
31402b849f7SRalf Baechle #define irq_disable_hazard __irq_disable_hazard
31502b849f7SRalf Baechle #define back_to_back_c0_hazard __back_to_back_c0_hazard
31602b849f7SRalf Baechle #define enable_fpu_hazard __enable_fpu_hazard
31702b849f7SRalf Baechle #define disable_fpu_hazard __disable_fpu_hazard
31802b849f7SRalf Baechle 
31902b849f7SRalf Baechle #else
32002b849f7SRalf Baechle 
32102b849f7SRalf Baechle #define _ssnop()							\
32202b849f7SRalf Baechle do {									\
32302b849f7SRalf Baechle 	__asm__ __volatile__(						\
32402b849f7SRalf Baechle 	__stringify(___ssnop)						\
32502b849f7SRalf Baechle 	);								\
32602b849f7SRalf Baechle } while (0)
32702b849f7SRalf Baechle 
32802b849f7SRalf Baechle #define	_ehb()								\
32902b849f7SRalf Baechle do {									\
33002b849f7SRalf Baechle 	__asm__ __volatile__(						\
33102b849f7SRalf Baechle 	__stringify(___ehb)						\
33202b849f7SRalf Baechle 	);								\
33302b849f7SRalf Baechle } while (0)
33402b849f7SRalf Baechle 
33502b849f7SRalf Baechle 
33602b849f7SRalf Baechle #define mtc0_tlbw_hazard()						\
33702b849f7SRalf Baechle do {									\
33802b849f7SRalf Baechle 	__asm__ __volatile__(						\
33902b849f7SRalf Baechle 	__stringify(__mtc0_tlbw_hazard)					\
34002b849f7SRalf Baechle 	);								\
34102b849f7SRalf Baechle } while (0)
34202b849f7SRalf Baechle 
34302b849f7SRalf Baechle 
344e50f0e31SJames Hogan #define mtc0_tlbr_hazard()						\
345e50f0e31SJames Hogan do {									\
346e50f0e31SJames Hogan 	__asm__ __volatile__(						\
347e50f0e31SJames Hogan 	__stringify(__mtc0_tlbr_hazard)					\
348e50f0e31SJames Hogan 	);								\
349e50f0e31SJames Hogan } while (0)
350e50f0e31SJames Hogan 
351e50f0e31SJames Hogan 
35202b849f7SRalf Baechle #define tlbw_use_hazard()						\
35302b849f7SRalf Baechle do {									\
35402b849f7SRalf Baechle 	__asm__ __volatile__(						\
35502b849f7SRalf Baechle 	__stringify(__tlbw_use_hazard)					\
35602b849f7SRalf Baechle 	);								\
35702b849f7SRalf Baechle } while (0)
35802b849f7SRalf Baechle 
35902b849f7SRalf Baechle 
360e50f0e31SJames Hogan #define tlb_read_hazard()						\
361e50f0e31SJames Hogan do {									\
362e50f0e31SJames Hogan 	__asm__ __volatile__(						\
363e50f0e31SJames Hogan 	__stringify(__tlb_read_hazard)					\
364e50f0e31SJames Hogan 	);								\
365e50f0e31SJames Hogan } while (0)
366e50f0e31SJames Hogan 
367e50f0e31SJames Hogan 
36802b849f7SRalf Baechle #define tlb_probe_hazard()						\
36902b849f7SRalf Baechle do {									\
37002b849f7SRalf Baechle 	__asm__ __volatile__(						\
37102b849f7SRalf Baechle 	__stringify(__tlb_probe_hazard)					\
37202b849f7SRalf Baechle 	);								\
37302b849f7SRalf Baechle } while (0)
37402b849f7SRalf Baechle 
37502b849f7SRalf Baechle 
37602b849f7SRalf Baechle #define irq_enable_hazard()						\
37702b849f7SRalf Baechle do {									\
37802b849f7SRalf Baechle 	__asm__ __volatile__(						\
37902b849f7SRalf Baechle 	__stringify(__irq_enable_hazard)				\
38002b849f7SRalf Baechle 	);								\
38102b849f7SRalf Baechle } while (0)
38202b849f7SRalf Baechle 
38302b849f7SRalf Baechle 
38402b849f7SRalf Baechle #define irq_disable_hazard()						\
38502b849f7SRalf Baechle do {									\
38602b849f7SRalf Baechle 	__asm__ __volatile__(						\
38702b849f7SRalf Baechle 	__stringify(__irq_disable_hazard)				\
38802b849f7SRalf Baechle 	);								\
38902b849f7SRalf Baechle } while (0)
39002b849f7SRalf Baechle 
39102b849f7SRalf Baechle 
39202b849f7SRalf Baechle #define back_to_back_c0_hazard() 					\
39302b849f7SRalf Baechle do {									\
39402b849f7SRalf Baechle 	__asm__ __volatile__(						\
39502b849f7SRalf Baechle 	__stringify(__back_to_back_c0_hazard)				\
39602b849f7SRalf Baechle 	);								\
39702b849f7SRalf Baechle } while (0)
39802b849f7SRalf Baechle 
39902b849f7SRalf Baechle 
40002b849f7SRalf Baechle #define enable_fpu_hazard()						\
40102b849f7SRalf Baechle do {									\
40202b849f7SRalf Baechle 	__asm__ __volatile__(						\
40302b849f7SRalf Baechle 	__stringify(__enable_fpu_hazard)				\
40402b849f7SRalf Baechle 	);								\
40502b849f7SRalf Baechle } while (0)
40602b849f7SRalf Baechle 
40702b849f7SRalf Baechle 
40802b849f7SRalf Baechle #define disable_fpu_hazard()						\
40902b849f7SRalf Baechle do {									\
41002b849f7SRalf Baechle 	__asm__ __volatile__(						\
41102b849f7SRalf Baechle 	__stringify(__disable_fpu_hazard)				\
41202b849f7SRalf Baechle 	);								\
41302b849f7SRalf Baechle } while (0)
41402b849f7SRalf Baechle 
41502b849f7SRalf Baechle /*
41602b849f7SRalf Baechle  * MIPS R2 instruction hazard barrier.   Needs to be called as a subroutine.
41702b849f7SRalf Baechle  */
41802b849f7SRalf Baechle extern void mips_ihb(void);
41902b849f7SRalf Baechle 
42002b849f7SRalf Baechle #endif /* __ASSEMBLY__  */
42102b849f7SRalf Baechle 
422384740dcSRalf Baechle #endif /* _ASM_HAZARDS_H */
423