10a6d1fa0SAndy Lutomirski/*
20a6d1fa0SAndy Lutomirski * Code for the vDSO.  This version uses the old int $0x80 method.
329c0ce95SAndy Lutomirski*/
429c0ce95SAndy Lutomirski
529c0ce95SAndy Lutomirski#include <asm/dwarf2.h>
6a474e67cSAndy Lutomirski#include <asm/cpufeature.h>
7a474e67cSAndy Lutomirski#include <asm/alternative-asm.h>
829c0ce95SAndy Lutomirski
929c0ce95SAndy Lutomirski/*
100a6d1fa0SAndy Lutomirski * First get the common code for the sigreturn entry points.
110a6d1fa0SAndy Lutomirski * This must come first.
120a6d1fa0SAndy Lutomirski */
130a6d1fa0SAndy Lutomirski#include "sigreturn.S"
140a6d1fa0SAndy Lutomirski
150a6d1fa0SAndy Lutomirski	.text
160a6d1fa0SAndy Lutomirski	.globl __kernel_vsyscall
170a6d1fa0SAndy Lutomirski	.type __kernel_vsyscall,@function
180a6d1fa0SAndy Lutomirski	ALIGN
190a6d1fa0SAndy Lutomirski__kernel_vsyscall:
2029c0ce95SAndy Lutomirski	CFI_STARTPROC
218242c6c8SAndy Lutomirski	/*
228242c6c8SAndy Lutomirski	 * Reshuffle regs so that all of any of the entry instructions
238242c6c8SAndy Lutomirski	 * will preserve enough state.
248242c6c8SAndy Lutomirski	 */
258242c6c8SAndy Lutomirski	pushl	%edx
268242c6c8SAndy Lutomirski	CFI_ADJUST_CFA_OFFSET	4
278242c6c8SAndy Lutomirski	CFI_REL_OFFSET		edx, 0
288242c6c8SAndy Lutomirski	pushl	%ecx
298242c6c8SAndy Lutomirski	CFI_ADJUST_CFA_OFFSET	4
308242c6c8SAndy Lutomirski	CFI_REL_OFFSET		ecx, 0
318242c6c8SAndy Lutomirski	movl	%esp, %ecx
328242c6c8SAndy Lutomirski
33a474e67cSAndy Lutomirski#ifdef CONFIG_X86_64
34a474e67cSAndy Lutomirski	/* If SYSENTER (Intel) or SYSCALL32 (AMD) is available, use it. */
35a474e67cSAndy Lutomirski	ALTERNATIVE_2 "", "sysenter", X86_FEATURE_SYSENTER32, \
36a474e67cSAndy Lutomirski	                  "syscall",  X86_FEATURE_SYSCALL32
37a474e67cSAndy Lutomirski#endif
38a474e67cSAndy Lutomirski
398242c6c8SAndy Lutomirski	/* Enter using int $0x80 */
408242c6c8SAndy Lutomirski	movl	(%esp), %ecx
410a6d1fa0SAndy Lutomirski	int	$0x80
428242c6c8SAndy LutomirskiGLOBAL(int80_landing_pad)
438242c6c8SAndy Lutomirski
448242c6c8SAndy Lutomirski	/* Restore ECX and EDX in case they were clobbered. */
458242c6c8SAndy Lutomirski	popl	%ecx
468242c6c8SAndy Lutomirski	CFI_RESTORE		ecx
478242c6c8SAndy Lutomirski	CFI_ADJUST_CFA_OFFSET	-4
488242c6c8SAndy Lutomirski	popl	%edx
498242c6c8SAndy Lutomirski	CFI_RESTORE		edx
508242c6c8SAndy Lutomirski	CFI_ADJUST_CFA_OFFSET	-4
510a6d1fa0SAndy Lutomirski	ret
5229c0ce95SAndy Lutomirski	CFI_ENDPROC
530a6d1fa0SAndy Lutomirski
5429c0ce95SAndy Lutomirski	.size __kernel_vsyscall,.-__kernel_vsyscall
550a6d1fa0SAndy Lutomirski	.previous
56