xref: /openbmc/linux/arch/ia64/include/asm/native/inst.h (revision 588b48ca)
1 /******************************************************************************
2  * arch/ia64/include/asm/native/inst.h
3  *
4  * Copyright (c) 2008 Isaku Yamahata <yamahata at valinux co jp>
5  *                    VA Linux Systems Japan K.K.
6  *
7  * This program is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License as published by
9  * the Free Software Foundation; either version 2 of the License, or
10  * (at your option) any later version.
11  *
12  * This program is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15  * GNU General Public License for more details.
16  *
17  * You should have received a copy of the GNU General Public License
18  * along with this program; if not, write to the Free Software
19  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
20  *
21  */
22 
23 #define DO_SAVE_MIN		IA64_NATIVE_DO_SAVE_MIN
24 
25 #define __paravirt_switch_to			ia64_native_switch_to
26 #define __paravirt_leave_syscall		ia64_native_leave_syscall
27 #define __paravirt_work_processed_syscall	ia64_native_work_processed_syscall
28 #define __paravirt_leave_kernel			ia64_native_leave_kernel
29 #define __paravirt_pending_syscall_end		ia64_work_pending_syscall_end
30 #define __paravirt_work_processed_syscall_target \
31 						ia64_work_processed_syscall
32 
33 #define paravirt_fsyscall_table			ia64_native_fsyscall_table
34 #define paravirt_fsys_bubble_down		ia64_native_fsys_bubble_down
35 
36 #ifdef CONFIG_PARAVIRT_GUEST_ASM_CLOBBER_CHECK
37 # define PARAVIRT_POISON	0xdeadbeefbaadf00d
38 # define CLOBBER(clob)				\
39 	;;					\
40 	movl clob = PARAVIRT_POISON;		\
41 	;;
42 # define CLOBBER_PRED(pred_clob)		\
43 	;;					\
44 	cmp.eq pred_clob, p0 = r0, r0		\
45 	;;
46 #else
47 # define CLOBBER(clob)			/* nothing */
48 # define CLOBBER_PRED(pred_clob)	/* nothing */
49 #endif
50 
51 #define MOV_FROM_IFA(reg)	\
52 	mov reg = cr.ifa
53 
54 #define MOV_FROM_ITIR(reg)	\
55 	mov reg = cr.itir
56 
57 #define MOV_FROM_ISR(reg)	\
58 	mov reg = cr.isr
59 
60 #define MOV_FROM_IHA(reg)	\
61 	mov reg = cr.iha
62 
63 #define MOV_FROM_IPSR(pred, reg)	\
64 (pred)	mov reg = cr.ipsr
65 
66 #define MOV_FROM_IIM(reg)	\
67 	mov reg = cr.iim
68 
69 #define MOV_FROM_IIP(reg)	\
70 	mov reg = cr.iip
71 
72 #define MOV_FROM_IVR(reg, clob)	\
73 	mov reg = cr.ivr	\
74 	CLOBBER(clob)
75 
76 #define MOV_FROM_PSR(pred, reg, clob)	\
77 (pred)	mov reg = psr			\
78 	CLOBBER(clob)
79 
80 #define MOV_FROM_ITC(pred, pred_clob, reg, clob)	\
81 (pred)	mov reg = ar.itc				\
82 	CLOBBER(clob)					\
83 	CLOBBER_PRED(pred_clob)
84 
85 #define MOV_TO_IFA(reg, clob)	\
86 	mov cr.ifa = reg	\
87 	CLOBBER(clob)
88 
89 #define MOV_TO_ITIR(pred, reg, clob)	\
90 (pred)	mov cr.itir = reg		\
91 	CLOBBER(clob)
92 
93 #define MOV_TO_IHA(pred, reg, clob)	\
94 (pred)	mov cr.iha = reg		\
95 	CLOBBER(clob)
96 
97 #define MOV_TO_IPSR(pred, reg, clob)		\
98 (pred)	mov cr.ipsr = reg			\
99 	CLOBBER(clob)
100 
101 #define MOV_TO_IFS(pred, reg, clob)	\
102 (pred)	mov cr.ifs = reg		\
103 	CLOBBER(clob)
104 
105 #define MOV_TO_IIP(reg, clob)	\
106 	mov cr.iip = reg	\
107 	CLOBBER(clob)
108 
109 #define MOV_TO_KR(kr, reg, clob0, clob1)	\
110 	mov IA64_KR(kr) = reg			\
111 	CLOBBER(clob0)				\
112 	CLOBBER(clob1)
113 
114 #define ITC_I(pred, reg, clob)	\
115 (pred)	itc.i reg		\
116 	CLOBBER(clob)
117 
118 #define ITC_D(pred, reg, clob)	\
119 (pred)	itc.d reg		\
120 	CLOBBER(clob)
121 
122 #define ITC_I_AND_D(pred_i, pred_d, reg, clob)	\
123 (pred_i) itc.i reg;				\
124 (pred_d) itc.d reg				\
125 	CLOBBER(clob)
126 
127 #define THASH(pred, reg0, reg1, clob)		\
128 (pred)	thash reg0 = reg1			\
129 	CLOBBER(clob)
130 
131 #define SSM_PSR_IC_AND_DEFAULT_BITS_AND_SRLZ_I(clob0, clob1)		\
132 	ssm psr.ic | PSR_DEFAULT_BITS					\
133 	CLOBBER(clob0)							\
134 	CLOBBER(clob1)							\
135 	;;								\
136 	srlz.i /* guarantee that interruption collectin is on */	\
137 	;;
138 
139 #define SSM_PSR_IC_AND_SRLZ_D(clob0, clob1)	\
140 	ssm psr.ic				\
141 	CLOBBER(clob0)				\
142 	CLOBBER(clob1)				\
143 	;;					\
144 	srlz.d
145 
146 #define RSM_PSR_IC(clob)	\
147 	rsm psr.ic		\
148 	CLOBBER(clob)
149 
150 #define SSM_PSR_I(pred, pred_clob, clob)	\
151 (pred)	ssm psr.i				\
152 	CLOBBER(clob)				\
153 	CLOBBER_PRED(pred_clob)
154 
155 #define RSM_PSR_I(pred, clob0, clob1)	\
156 (pred)	rsm psr.i			\
157 	CLOBBER(clob0)			\
158 	CLOBBER(clob1)
159 
160 #define RSM_PSR_I_IC(clob0, clob1, clob2)	\
161 	rsm psr.i | psr.ic			\
162 	CLOBBER(clob0)				\
163 	CLOBBER(clob1)				\
164 	CLOBBER(clob2)
165 
166 #define RSM_PSR_DT		\
167 	rsm psr.dt
168 
169 #define RSM_PSR_BE_I(clob0, clob1)	\
170 	rsm psr.be | psr.i		\
171 	CLOBBER(clob0)			\
172 	CLOBBER(clob1)
173 
174 #define SSM_PSR_DT_AND_SRLZ_I	\
175 	ssm psr.dt		\
176 	;;			\
177 	srlz.i
178 
179 #define BSW_0(clob0, clob1, clob2)	\
180 	bsw.0				\
181 	CLOBBER(clob0)			\
182 	CLOBBER(clob1)			\
183 	CLOBBER(clob2)
184 
185 #define BSW_1(clob0, clob1)	\
186 	bsw.1			\
187 	CLOBBER(clob0)		\
188 	CLOBBER(clob1)
189 
190 #define COVER	\
191 	cover
192 
193 #define RFI	\
194 	rfi
195