xref: /openbmc/linux/arch/x86/entry/calling.h (revision e872045b)
1478dc89cSAndy Lutomirski #include <linux/jump_label.h>
28c1f7558SJosh Poimboeuf #include <asm/unwind_hints.h>
3478dc89cSAndy Lutomirski 
4d36f9479SIngo Molnar /*
5d36f9479SIngo Molnar 
6d36f9479SIngo Molnar  x86 function call convention, 64-bit:
7d36f9479SIngo Molnar  -------------------------------------
8d36f9479SIngo Molnar   arguments           |  callee-saved      | extra caller-saved | return
9d36f9479SIngo Molnar  [callee-clobbered]   |                    | [callee-clobbered] |
10d36f9479SIngo Molnar  ---------------------------------------------------------------------------
11d36f9479SIngo Molnar  rdi rsi rdx rcx r8-9 | rbx rbp [*] r12-15 | r10-11             | rax, rdx [**]
12d36f9479SIngo Molnar 
13d36f9479SIngo Molnar  ( rsp is obviously invariant across normal function calls. (gcc can 'merge'
14d36f9479SIngo Molnar    functions when it sees tail-call optimization possibilities) rflags is
15d36f9479SIngo Molnar    clobbered. Leftover arguments are passed over the stack frame.)
16d36f9479SIngo Molnar 
17d36f9479SIngo Molnar  [*]  In the frame-pointers case rbp is fixed to the stack frame.
18d36f9479SIngo Molnar 
19d36f9479SIngo Molnar  [**] for struct return values wider than 64 bits the return convention is a
20d36f9479SIngo Molnar       bit more complex: up to 128 bits width we return small structures
21d36f9479SIngo Molnar       straight in rax, rdx. For structures larger than that (3 words or
22d36f9479SIngo Molnar       larger) the caller puts a pointer to an on-stack return struct
23d36f9479SIngo Molnar       [allocated in the caller's stack frame] into the first argument - i.e.
24d36f9479SIngo Molnar       into rdi. All other arguments shift up by one in this case.
25d36f9479SIngo Molnar       Fortunately this case is rare in the kernel.
26d36f9479SIngo Molnar 
27d36f9479SIngo Molnar For 32-bit we have the following conventions - kernel is built with
28d36f9479SIngo Molnar -mregparm=3 and -freg-struct-return:
29d36f9479SIngo Molnar 
30d36f9479SIngo Molnar  x86 function calling convention, 32-bit:
31d36f9479SIngo Molnar  ----------------------------------------
32d36f9479SIngo Molnar   arguments         | callee-saved        | extra caller-saved | return
33d36f9479SIngo Molnar  [callee-clobbered] |                     | [callee-clobbered] |
34d36f9479SIngo Molnar  -------------------------------------------------------------------------
35d36f9479SIngo Molnar  eax edx ecx        | ebx edi esi ebp [*] | <none>             | eax, edx [**]
36d36f9479SIngo Molnar 
37d36f9479SIngo Molnar  ( here too esp is obviously invariant across normal function calls. eflags
38d36f9479SIngo Molnar    is clobbered. Leftover arguments are passed over the stack frame. )
39d36f9479SIngo Molnar 
40d36f9479SIngo Molnar  [*]  In the frame-pointers case ebp is fixed to the stack frame.
41d36f9479SIngo Molnar 
42d36f9479SIngo Molnar  [**] We build with -freg-struct-return, which on 32-bit means similar
43d36f9479SIngo Molnar       semantics as on 64-bit: edx can be used for a second return value
44d36f9479SIngo Molnar       (i.e. covering integer and structure sizes up to 64 bits) - after that
45d36f9479SIngo Molnar       it gets more complex and more expensive: 3-word or larger struct returns
46d36f9479SIngo Molnar       get done in the caller's frame and the pointer to the return struct goes
47d36f9479SIngo Molnar       into regparm0, i.e. eax - the other arguments shift up and the
48d36f9479SIngo Molnar       function's register parameters degenerate to regparm=2 in essence.
49d36f9479SIngo Molnar 
50d36f9479SIngo Molnar */
51d36f9479SIngo Molnar 
52d36f9479SIngo Molnar #ifdef CONFIG_X86_64
53d36f9479SIngo Molnar 
54d36f9479SIngo Molnar /*
55d36f9479SIngo Molnar  * 64-bit system call stack frame layout defines and helpers,
56d36f9479SIngo Molnar  * for assembly code:
57d36f9479SIngo Molnar  */
58d36f9479SIngo Molnar 
59d36f9479SIngo Molnar /* The layout forms the "struct pt_regs" on the stack: */
60d36f9479SIngo Molnar /*
61d36f9479SIngo Molnar  * C ABI says these regs are callee-preserved. They aren't saved on kernel entry
62d36f9479SIngo Molnar  * unless syscall needs a complete, fully filled "struct pt_regs".
63d36f9479SIngo Molnar  */
64d36f9479SIngo Molnar #define R15		0*8
65d36f9479SIngo Molnar #define R14		1*8
66d36f9479SIngo Molnar #define R13		2*8
67d36f9479SIngo Molnar #define R12		3*8
68d36f9479SIngo Molnar #define RBP		4*8
69d36f9479SIngo Molnar #define RBX		5*8
70d36f9479SIngo Molnar /* These regs are callee-clobbered. Always saved on kernel entry. */
71d36f9479SIngo Molnar #define R11		6*8
72d36f9479SIngo Molnar #define R10		7*8
73d36f9479SIngo Molnar #define R9		8*8
74d36f9479SIngo Molnar #define R8		9*8
75d36f9479SIngo Molnar #define RAX		10*8
76d36f9479SIngo Molnar #define RCX		11*8
77d36f9479SIngo Molnar #define RDX		12*8
78d36f9479SIngo Molnar #define RSI		13*8
79d36f9479SIngo Molnar #define RDI		14*8
80d36f9479SIngo Molnar /*
81d36f9479SIngo Molnar  * On syscall entry, this is syscall#. On CPU exception, this is error code.
82d36f9479SIngo Molnar  * On hw interrupt, it's IRQ number:
83d36f9479SIngo Molnar  */
84d36f9479SIngo Molnar #define ORIG_RAX	15*8
85d36f9479SIngo Molnar /* Return frame for iretq */
86d36f9479SIngo Molnar #define RIP		16*8
87d36f9479SIngo Molnar #define CS		17*8
88d36f9479SIngo Molnar #define EFLAGS		18*8
89d36f9479SIngo Molnar #define RSP		19*8
90d36f9479SIngo Molnar #define SS		20*8
91d36f9479SIngo Molnar 
92d36f9479SIngo Molnar #define SIZEOF_PTREGS	21*8
93d36f9479SIngo Molnar 
9459df2268SAlexander Kuleshov 	.macro ALLOC_PT_GPREGS_ON_STACK
9559df2268SAlexander Kuleshov 	addq	$-(15*8), %rsp
96d36f9479SIngo Molnar 	.endm
97d36f9479SIngo Molnar 
98d36f9479SIngo Molnar 	.macro SAVE_C_REGS_HELPER offset=0 rax=1 rcx=1 r8910=1 r11=1
99d36f9479SIngo Molnar 	.if \r11
100d36f9479SIngo Molnar 	movq %r11, 6*8+\offset(%rsp)
101d36f9479SIngo Molnar 	.endif
102d36f9479SIngo Molnar 	.if \r8910
103d36f9479SIngo Molnar 	movq %r10, 7*8+\offset(%rsp)
104d36f9479SIngo Molnar 	movq %r9,  8*8+\offset(%rsp)
105d36f9479SIngo Molnar 	movq %r8,  9*8+\offset(%rsp)
106d36f9479SIngo Molnar 	.endif
107d36f9479SIngo Molnar 	.if \rax
108d36f9479SIngo Molnar 	movq %rax, 10*8+\offset(%rsp)
109d36f9479SIngo Molnar 	.endif
110d36f9479SIngo Molnar 	.if \rcx
111d36f9479SIngo Molnar 	movq %rcx, 11*8+\offset(%rsp)
112d36f9479SIngo Molnar 	.endif
113d36f9479SIngo Molnar 	movq %rdx, 12*8+\offset(%rsp)
114d36f9479SIngo Molnar 	movq %rsi, 13*8+\offset(%rsp)
115d36f9479SIngo Molnar 	movq %rdi, 14*8+\offset(%rsp)
1168c1f7558SJosh Poimboeuf 	UNWIND_HINT_REGS offset=\offset extra=0
117d36f9479SIngo Molnar 	.endm
118d36f9479SIngo Molnar 	.macro SAVE_C_REGS offset=0
119d36f9479SIngo Molnar 	SAVE_C_REGS_HELPER \offset, 1, 1, 1, 1
120d36f9479SIngo Molnar 	.endm
121d36f9479SIngo Molnar 	.macro SAVE_C_REGS_EXCEPT_RAX_RCX offset=0
122d36f9479SIngo Molnar 	SAVE_C_REGS_HELPER \offset, 0, 0, 1, 1
123d36f9479SIngo Molnar 	.endm
124d36f9479SIngo Molnar 	.macro SAVE_C_REGS_EXCEPT_R891011
125d36f9479SIngo Molnar 	SAVE_C_REGS_HELPER 0, 1, 1, 0, 0
126d36f9479SIngo Molnar 	.endm
127d36f9479SIngo Molnar 	.macro SAVE_C_REGS_EXCEPT_RCX_R891011
128d36f9479SIngo Molnar 	SAVE_C_REGS_HELPER 0, 1, 0, 0, 0
129d36f9479SIngo Molnar 	.endm
130d36f9479SIngo Molnar 	.macro SAVE_C_REGS_EXCEPT_RAX_RCX_R11
131d36f9479SIngo Molnar 	SAVE_C_REGS_HELPER 0, 0, 0, 1, 0
132d36f9479SIngo Molnar 	.endm
133d36f9479SIngo Molnar 
134d36f9479SIngo Molnar 	.macro SAVE_EXTRA_REGS offset=0
135d36f9479SIngo Molnar 	movq %r15, 0*8+\offset(%rsp)
136d36f9479SIngo Molnar 	movq %r14, 1*8+\offset(%rsp)
137d36f9479SIngo Molnar 	movq %r13, 2*8+\offset(%rsp)
138d36f9479SIngo Molnar 	movq %r12, 3*8+\offset(%rsp)
139d36f9479SIngo Molnar 	movq %rbp, 4*8+\offset(%rsp)
140d36f9479SIngo Molnar 	movq %rbx, 5*8+\offset(%rsp)
1418c1f7558SJosh Poimboeuf 	UNWIND_HINT_REGS offset=\offset
142d36f9479SIngo Molnar 	.endm
143d36f9479SIngo Molnar 
144d36f9479SIngo Molnar 	.macro RESTORE_EXTRA_REGS offset=0
145d36f9479SIngo Molnar 	movq 0*8+\offset(%rsp), %r15
146d36f9479SIngo Molnar 	movq 1*8+\offset(%rsp), %r14
147d36f9479SIngo Molnar 	movq 2*8+\offset(%rsp), %r13
148d36f9479SIngo Molnar 	movq 3*8+\offset(%rsp), %r12
149d36f9479SIngo Molnar 	movq 4*8+\offset(%rsp), %rbp
150d36f9479SIngo Molnar 	movq 5*8+\offset(%rsp), %rbx
1518c1f7558SJosh Poimboeuf 	UNWIND_HINT_REGS offset=\offset extra=0
152d36f9479SIngo Molnar 	.endm
153d36f9479SIngo Molnar 
154e872045bSAndy Lutomirski 	.macro POP_EXTRA_REGS
155e872045bSAndy Lutomirski 	popq %r15
156e872045bSAndy Lutomirski 	popq %r14
157e872045bSAndy Lutomirski 	popq %r13
158e872045bSAndy Lutomirski 	popq %r12
159e872045bSAndy Lutomirski 	popq %rbp
160e872045bSAndy Lutomirski 	popq %rbx
161e872045bSAndy Lutomirski 	.endm
162e872045bSAndy Lutomirski 
163e872045bSAndy Lutomirski 	.macro POP_C_REGS
164e872045bSAndy Lutomirski 	popq %r11
165e872045bSAndy Lutomirski 	popq %r10
166e872045bSAndy Lutomirski 	popq %r9
167e872045bSAndy Lutomirski 	popq %r8
168e872045bSAndy Lutomirski 	popq %rax
169e872045bSAndy Lutomirski 	popq %rcx
170e872045bSAndy Lutomirski 	popq %rdx
171e872045bSAndy Lutomirski 	popq %rsi
172e872045bSAndy Lutomirski 	popq %rdi
173e872045bSAndy Lutomirski 	.endm
174e872045bSAndy Lutomirski 
175d36f9479SIngo Molnar 	.macro RESTORE_C_REGS_HELPER rstor_rax=1, rstor_rcx=1, rstor_r11=1, rstor_r8910=1, rstor_rdx=1
176d36f9479SIngo Molnar 	.if \rstor_r11
177d36f9479SIngo Molnar 	movq 6*8(%rsp), %r11
178d36f9479SIngo Molnar 	.endif
179d36f9479SIngo Molnar 	.if \rstor_r8910
180d36f9479SIngo Molnar 	movq 7*8(%rsp), %r10
181d36f9479SIngo Molnar 	movq 8*8(%rsp), %r9
182d36f9479SIngo Molnar 	movq 9*8(%rsp), %r8
183d36f9479SIngo Molnar 	.endif
184d36f9479SIngo Molnar 	.if \rstor_rax
185d36f9479SIngo Molnar 	movq 10*8(%rsp), %rax
186d36f9479SIngo Molnar 	.endif
187d36f9479SIngo Molnar 	.if \rstor_rcx
188d36f9479SIngo Molnar 	movq 11*8(%rsp), %rcx
189d36f9479SIngo Molnar 	.endif
190d36f9479SIngo Molnar 	.if \rstor_rdx
191d36f9479SIngo Molnar 	movq 12*8(%rsp), %rdx
192d36f9479SIngo Molnar 	.endif
193d36f9479SIngo Molnar 	movq 13*8(%rsp), %rsi
194d36f9479SIngo Molnar 	movq 14*8(%rsp), %rdi
1958c1f7558SJosh Poimboeuf 	UNWIND_HINT_IRET_REGS offset=16*8
196d36f9479SIngo Molnar 	.endm
197d36f9479SIngo Molnar 	.macro RESTORE_C_REGS
198d36f9479SIngo Molnar 	RESTORE_C_REGS_HELPER 1,1,1,1,1
199d36f9479SIngo Molnar 	.endm
200d36f9479SIngo Molnar 	.macro RESTORE_C_REGS_EXCEPT_RAX
201d36f9479SIngo Molnar 	RESTORE_C_REGS_HELPER 0,1,1,1,1
202d36f9479SIngo Molnar 	.endm
203d36f9479SIngo Molnar 	.macro RESTORE_C_REGS_EXCEPT_RCX
204d36f9479SIngo Molnar 	RESTORE_C_REGS_HELPER 1,0,1,1,1
205d36f9479SIngo Molnar 	.endm
206d36f9479SIngo Molnar 	.macro RESTORE_C_REGS_EXCEPT_R11
207d36f9479SIngo Molnar 	RESTORE_C_REGS_HELPER 1,1,0,1,1
208d36f9479SIngo Molnar 	.endm
209d36f9479SIngo Molnar 	.macro RESTORE_C_REGS_EXCEPT_RCX_R11
210d36f9479SIngo Molnar 	RESTORE_C_REGS_HELPER 1,0,0,1,1
211d36f9479SIngo Molnar 	.endm
212d36f9479SIngo Molnar 
213d36f9479SIngo Molnar 	.macro REMOVE_PT_GPREGS_FROM_STACK addskip=0
214d36f9479SIngo Molnar 	subq $-(15*8+\addskip), %rsp
215d36f9479SIngo Molnar 	.endm
216d36f9479SIngo Molnar 
217d36f9479SIngo Molnar 	.macro icebp
218d36f9479SIngo Molnar 	.byte 0xf1
219d36f9479SIngo Molnar 	.endm
220d36f9479SIngo Molnar 
221946c1911SJosh Poimboeuf /*
222946c1911SJosh Poimboeuf  * This is a sneaky trick to help the unwinder find pt_regs on the stack.  The
223946c1911SJosh Poimboeuf  * frame pointer is replaced with an encoded pointer to pt_regs.  The encoding
224946c1911SJosh Poimboeuf  * is just setting the LSB, which makes it an invalid stack address and is also
225946c1911SJosh Poimboeuf  * a signal to the unwinder that it's a pt_regs pointer in disguise.
226946c1911SJosh Poimboeuf  *
227946c1911SJosh Poimboeuf  * NOTE: This macro must be used *after* SAVE_EXTRA_REGS because it corrupts
228946c1911SJosh Poimboeuf  * the original rbp.
229946c1911SJosh Poimboeuf  */
230946c1911SJosh Poimboeuf .macro ENCODE_FRAME_POINTER ptregs_offset=0
231946c1911SJosh Poimboeuf #ifdef CONFIG_FRAME_POINTER
232946c1911SJosh Poimboeuf 	.if \ptregs_offset
233946c1911SJosh Poimboeuf 		leaq \ptregs_offset(%rsp), %rbp
234946c1911SJosh Poimboeuf 	.else
235946c1911SJosh Poimboeuf 		mov %rsp, %rbp
236946c1911SJosh Poimboeuf 	.endif
237946c1911SJosh Poimboeuf 	orq	$0x1, %rbp
238946c1911SJosh Poimboeuf #endif
239946c1911SJosh Poimboeuf .endm
240946c1911SJosh Poimboeuf 
241d36f9479SIngo Molnar #endif /* CONFIG_X86_64 */
242d36f9479SIngo Molnar 
243478dc89cSAndy Lutomirski /*
244478dc89cSAndy Lutomirski  * This does 'call enter_from_user_mode' unless we can avoid it based on
245478dc89cSAndy Lutomirski  * kernel config or using the static jump infrastructure.
246478dc89cSAndy Lutomirski  */
247478dc89cSAndy Lutomirski .macro CALL_enter_from_user_mode
248478dc89cSAndy Lutomirski #ifdef CONFIG_CONTEXT_TRACKING
249478dc89cSAndy Lutomirski #ifdef HAVE_JUMP_LABEL
250478dc89cSAndy Lutomirski 	STATIC_JUMP_IF_FALSE .Lafter_call_\@, context_tracking_enabled, def=0
251478dc89cSAndy Lutomirski #endif
252478dc89cSAndy Lutomirski 	call enter_from_user_mode
253478dc89cSAndy Lutomirski .Lafter_call_\@:
254478dc89cSAndy Lutomirski #endif
255478dc89cSAndy Lutomirski .endm
256