xref: /openbmc/linux/arch/x86/lib/getuser.S (revision 74be2d3b)
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * __get_user functions.
4 *
5 * (C) Copyright 1998 Linus Torvalds
6 * (C) Copyright 2005 Andi Kleen
7 * (C) Copyright 2008 Glauber Costa
8 *
9 * These functions have a non-standard call interface
10 * to make them more efficient, especially as they
11 * return an error value in addition to the "real"
12 * return value.
13 */
14
15/*
16 * __get_user_X
17 *
18 * Inputs:	%[r|e]ax contains the address.
19 *
20 * Outputs:	%[r|e]ax is error code (0 or -EFAULT)
21 *		%[r|e]dx contains zero-extended value
22 *		%ecx contains the high half for 32-bit __get_user_8
23 *
24 *
25 * These functions should not modify any other registers,
26 * as they get called from within inline assembly.
27 */
28
29#include <linux/linkage.h>
30#include <asm/page_types.h>
31#include <asm/errno.h>
32#include <asm/asm-offsets.h>
33#include <asm/thread_info.h>
34#include <asm/asm.h>
35#include <asm/smap.h>
36#include <asm/export.h>
37
38	.text
39SYM_FUNC_START(__get_user_1)
40	mov PER_CPU_VAR(current_task), %_ASM_DX
41	cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
42	jae bad_get_user
43	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
44	and %_ASM_DX, %_ASM_AX
45	ASM_STAC
461:	movzbl (%_ASM_AX),%edx
47	xor %eax,%eax
48	ASM_CLAC
49	ret
50SYM_FUNC_END(__get_user_1)
51EXPORT_SYMBOL(__get_user_1)
52
53SYM_FUNC_START(__get_user_2)
54	add $1,%_ASM_AX
55	jc bad_get_user
56	mov PER_CPU_VAR(current_task), %_ASM_DX
57	cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
58	jae bad_get_user
59	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
60	and %_ASM_DX, %_ASM_AX
61	ASM_STAC
622:	movzwl -1(%_ASM_AX),%edx
63	xor %eax,%eax
64	ASM_CLAC
65	ret
66SYM_FUNC_END(__get_user_2)
67EXPORT_SYMBOL(__get_user_2)
68
69SYM_FUNC_START(__get_user_4)
70	add $3,%_ASM_AX
71	jc bad_get_user
72	mov PER_CPU_VAR(current_task), %_ASM_DX
73	cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
74	jae bad_get_user
75	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
76	and %_ASM_DX, %_ASM_AX
77	ASM_STAC
783:	movl -3(%_ASM_AX),%edx
79	xor %eax,%eax
80	ASM_CLAC
81	ret
82SYM_FUNC_END(__get_user_4)
83EXPORT_SYMBOL(__get_user_4)
84
85SYM_FUNC_START(__get_user_8)
86#ifdef CONFIG_X86_64
87	add $7,%_ASM_AX
88	jc bad_get_user
89	mov PER_CPU_VAR(current_task), %_ASM_DX
90	cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
91	jae bad_get_user
92	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
93	and %_ASM_DX, %_ASM_AX
94	ASM_STAC
954:	movq -7(%_ASM_AX),%rdx
96	xor %eax,%eax
97	ASM_CLAC
98	ret
99#else
100	add $7,%_ASM_AX
101	jc bad_get_user_8
102	mov PER_CPU_VAR(current_task), %_ASM_DX
103	cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
104	jae bad_get_user_8
105	sbb %_ASM_DX, %_ASM_DX		/* array_index_mask_nospec() */
106	and %_ASM_DX, %_ASM_AX
107	ASM_STAC
1084:	movl -7(%_ASM_AX),%edx
1095:	movl -3(%_ASM_AX),%ecx
110	xor %eax,%eax
111	ASM_CLAC
112	ret
113#endif
114SYM_FUNC_END(__get_user_8)
115EXPORT_SYMBOL(__get_user_8)
116
117
118SYM_CODE_START_LOCAL(.Lbad_get_user_clac)
119	ASM_CLAC
120bad_get_user:
121	xor %edx,%edx
122	mov $(-EFAULT),%_ASM_AX
123	ret
124SYM_CODE_END(.Lbad_get_user_clac)
125
126#ifdef CONFIG_X86_32
127SYM_CODE_START_LOCAL(.Lbad_get_user_8_clac)
128	ASM_CLAC
129bad_get_user_8:
130	xor %edx,%edx
131	xor %ecx,%ecx
132	mov $(-EFAULT),%_ASM_AX
133	ret
134SYM_CODE_END(.Lbad_get_user_8_clac)
135#endif
136
137	_ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
138	_ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
139	_ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
140#ifdef CONFIG_X86_64
141	_ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
142#else
143	_ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
144	_ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
145#endif
146