xref: /openbmc/linux/arch/x86/lib/usercopy_32.c (revision 5c816641)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * User address space access functions.
4  * The non inlined parts of asm-i386/uaccess.h are here.
5  *
6  * Copyright 1997 Andi Kleen <ak@muc.de>
7  * Copyright 1997 Linus Torvalds
8  */
9 #include <linux/export.h>
10 #include <linux/uaccess.h>
11 #include <asm/asm.h>
12 
13 #ifdef CONFIG_X86_INTEL_USERCOPY
14 /*
15  * Alignment at which movsl is preferred for bulk memory copies.
16  */
17 struct movsl_mask movsl_mask __read_mostly;
18 #endif
19 
20 static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
21 {
22 #ifdef CONFIG_X86_INTEL_USERCOPY
23 	if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
24 		return 0;
25 #endif
26 	return 1;
27 }
28 #define movsl_is_ok(a1, a2, n) \
29 	__movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
30 
31 /*
32  * Zero Userspace
33  */
34 
35 #define __do_clear_user(addr,size)					\
36 do {									\
37 	int __d0;							\
38 	might_fault();							\
39 	__asm__ __volatile__(						\
40 		ASM_STAC "\n"						\
41 		"0:	rep; stosl\n"					\
42 		"	movl %2,%0\n"					\
43 		"1:	rep; stosb\n"					\
44 		"2: " ASM_CLAC "\n"					\
45 		_ASM_EXTABLE_TYPE_REG(0b, 2b, EX_TYPE_UCOPY_LEN4, %2)	\
46 		_ASM_EXTABLE_UA(1b, 2b)					\
47 		: "=&c"(size), "=&D" (__d0)				\
48 		: "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0));	\
49 } while (0)
50 
51 /**
52  * clear_user - Zero a block of memory in user space.
53  * @to:   Destination address, in user space.
54  * @n:    Number of bytes to zero.
55  *
56  * Zero a block of memory in user space.
57  *
58  * Return: number of bytes that could not be cleared.
59  * On success, this will be zero.
60  */
61 unsigned long
62 clear_user(void __user *to, unsigned long n)
63 {
64 	might_fault();
65 	if (access_ok(to, n))
66 		__do_clear_user(to, n);
67 	return n;
68 }
69 EXPORT_SYMBOL(clear_user);
70 
71 /**
72  * __clear_user - Zero a block of memory in user space, with less checking.
73  * @to:   Destination address, in user space.
74  * @n:    Number of bytes to zero.
75  *
76  * Zero a block of memory in user space.  Caller must check
77  * the specified block with access_ok() before calling this function.
78  *
79  * Return: number of bytes that could not be cleared.
80  * On success, this will be zero.
81  */
82 unsigned long
83 __clear_user(void __user *to, unsigned long n)
84 {
85 	__do_clear_user(to, n);
86 	return n;
87 }
88 EXPORT_SYMBOL(__clear_user);
89 
90 #ifdef CONFIG_X86_INTEL_USERCOPY
91 static unsigned long
92 __copy_user_intel(void __user *to, const void *from, unsigned long size)
93 {
94 	int d0, d1;
95 	__asm__ __volatile__(
96 		       "       .align 2,0x90\n"
97 		       "1:     movl 32(%4), %%eax\n"
98 		       "       cmpl $67, %0\n"
99 		       "       jbe 3f\n"
100 		       "2:     movl 64(%4), %%eax\n"
101 		       "       .align 2,0x90\n"
102 		       "3:     movl 0(%4), %%eax\n"
103 		       "4:     movl 4(%4), %%edx\n"
104 		       "5:     movl %%eax, 0(%3)\n"
105 		       "6:     movl %%edx, 4(%3)\n"
106 		       "7:     movl 8(%4), %%eax\n"
107 		       "8:     movl 12(%4),%%edx\n"
108 		       "9:     movl %%eax, 8(%3)\n"
109 		       "10:    movl %%edx, 12(%3)\n"
110 		       "11:    movl 16(%4), %%eax\n"
111 		       "12:    movl 20(%4), %%edx\n"
112 		       "13:    movl %%eax, 16(%3)\n"
113 		       "14:    movl %%edx, 20(%3)\n"
114 		       "15:    movl 24(%4), %%eax\n"
115 		       "16:    movl 28(%4), %%edx\n"
116 		       "17:    movl %%eax, 24(%3)\n"
117 		       "18:    movl %%edx, 28(%3)\n"
118 		       "19:    movl 32(%4), %%eax\n"
119 		       "20:    movl 36(%4), %%edx\n"
120 		       "21:    movl %%eax, 32(%3)\n"
121 		       "22:    movl %%edx, 36(%3)\n"
122 		       "23:    movl 40(%4), %%eax\n"
123 		       "24:    movl 44(%4), %%edx\n"
124 		       "25:    movl %%eax, 40(%3)\n"
125 		       "26:    movl %%edx, 44(%3)\n"
126 		       "27:    movl 48(%4), %%eax\n"
127 		       "28:    movl 52(%4), %%edx\n"
128 		       "29:    movl %%eax, 48(%3)\n"
129 		       "30:    movl %%edx, 52(%3)\n"
130 		       "31:    movl 56(%4), %%eax\n"
131 		       "32:    movl 60(%4), %%edx\n"
132 		       "33:    movl %%eax, 56(%3)\n"
133 		       "34:    movl %%edx, 60(%3)\n"
134 		       "       addl $-64, %0\n"
135 		       "       addl $64, %4\n"
136 		       "       addl $64, %3\n"
137 		       "       cmpl $63, %0\n"
138 		       "       ja  1b\n"
139 		       "35:    movl  %0, %%eax\n"
140 		       "       shrl  $2, %0\n"
141 		       "       andl  $3, %%eax\n"
142 		       "       cld\n"
143 		       "99:    rep; movsl\n"
144 		       "36:    movl %%eax, %0\n"
145 		       "37:    rep; movsb\n"
146 		       "100:\n"
147 		       _ASM_EXTABLE_UA(1b, 100b)
148 		       _ASM_EXTABLE_UA(2b, 100b)
149 		       _ASM_EXTABLE_UA(3b, 100b)
150 		       _ASM_EXTABLE_UA(4b, 100b)
151 		       _ASM_EXTABLE_UA(5b, 100b)
152 		       _ASM_EXTABLE_UA(6b, 100b)
153 		       _ASM_EXTABLE_UA(7b, 100b)
154 		       _ASM_EXTABLE_UA(8b, 100b)
155 		       _ASM_EXTABLE_UA(9b, 100b)
156 		       _ASM_EXTABLE_UA(10b, 100b)
157 		       _ASM_EXTABLE_UA(11b, 100b)
158 		       _ASM_EXTABLE_UA(12b, 100b)
159 		       _ASM_EXTABLE_UA(13b, 100b)
160 		       _ASM_EXTABLE_UA(14b, 100b)
161 		       _ASM_EXTABLE_UA(15b, 100b)
162 		       _ASM_EXTABLE_UA(16b, 100b)
163 		       _ASM_EXTABLE_UA(17b, 100b)
164 		       _ASM_EXTABLE_UA(18b, 100b)
165 		       _ASM_EXTABLE_UA(19b, 100b)
166 		       _ASM_EXTABLE_UA(20b, 100b)
167 		       _ASM_EXTABLE_UA(21b, 100b)
168 		       _ASM_EXTABLE_UA(22b, 100b)
169 		       _ASM_EXTABLE_UA(23b, 100b)
170 		       _ASM_EXTABLE_UA(24b, 100b)
171 		       _ASM_EXTABLE_UA(25b, 100b)
172 		       _ASM_EXTABLE_UA(26b, 100b)
173 		       _ASM_EXTABLE_UA(27b, 100b)
174 		       _ASM_EXTABLE_UA(28b, 100b)
175 		       _ASM_EXTABLE_UA(29b, 100b)
176 		       _ASM_EXTABLE_UA(30b, 100b)
177 		       _ASM_EXTABLE_UA(31b, 100b)
178 		       _ASM_EXTABLE_UA(32b, 100b)
179 		       _ASM_EXTABLE_UA(33b, 100b)
180 		       _ASM_EXTABLE_UA(34b, 100b)
181 		       _ASM_EXTABLE_UA(35b, 100b)
182 		       _ASM_EXTABLE_UA(36b, 100b)
183 		       _ASM_EXTABLE_UA(37b, 100b)
184 		       _ASM_EXTABLE_TYPE_REG(99b, 100b, EX_TYPE_UCOPY_LEN4, %%eax)
185 		       : "=&c"(size), "=&D" (d0), "=&S" (d1)
186 		       :  "1"(to), "2"(from), "0"(size)
187 		       : "eax", "edx", "memory");
188 	return size;
189 }
190 
191 static unsigned long __copy_user_intel_nocache(void *to,
192 				const void __user *from, unsigned long size)
193 {
194 	int d0, d1;
195 
196 	__asm__ __volatile__(
197 	       "        .align 2,0x90\n"
198 	       "0:      movl 32(%4), %%eax\n"
199 	       "        cmpl $67, %0\n"
200 	       "        jbe 2f\n"
201 	       "1:      movl 64(%4), %%eax\n"
202 	       "        .align 2,0x90\n"
203 	       "2:      movl 0(%4), %%eax\n"
204 	       "21:     movl 4(%4), %%edx\n"
205 	       "        movnti %%eax, 0(%3)\n"
206 	       "        movnti %%edx, 4(%3)\n"
207 	       "3:      movl 8(%4), %%eax\n"
208 	       "31:     movl 12(%4),%%edx\n"
209 	       "        movnti %%eax, 8(%3)\n"
210 	       "        movnti %%edx, 12(%3)\n"
211 	       "4:      movl 16(%4), %%eax\n"
212 	       "41:     movl 20(%4), %%edx\n"
213 	       "        movnti %%eax, 16(%3)\n"
214 	       "        movnti %%edx, 20(%3)\n"
215 	       "10:     movl 24(%4), %%eax\n"
216 	       "51:     movl 28(%4), %%edx\n"
217 	       "        movnti %%eax, 24(%3)\n"
218 	       "        movnti %%edx, 28(%3)\n"
219 	       "11:     movl 32(%4), %%eax\n"
220 	       "61:     movl 36(%4), %%edx\n"
221 	       "        movnti %%eax, 32(%3)\n"
222 	       "        movnti %%edx, 36(%3)\n"
223 	       "12:     movl 40(%4), %%eax\n"
224 	       "71:     movl 44(%4), %%edx\n"
225 	       "        movnti %%eax, 40(%3)\n"
226 	       "        movnti %%edx, 44(%3)\n"
227 	       "13:     movl 48(%4), %%eax\n"
228 	       "81:     movl 52(%4), %%edx\n"
229 	       "        movnti %%eax, 48(%3)\n"
230 	       "        movnti %%edx, 52(%3)\n"
231 	       "14:     movl 56(%4), %%eax\n"
232 	       "91:     movl 60(%4), %%edx\n"
233 	       "        movnti %%eax, 56(%3)\n"
234 	       "        movnti %%edx, 60(%3)\n"
235 	       "        addl $-64, %0\n"
236 	       "        addl $64, %4\n"
237 	       "        addl $64, %3\n"
238 	       "        cmpl $63, %0\n"
239 	       "        ja  0b\n"
240 	       "        sfence \n"
241 	       "5:      movl  %0, %%eax\n"
242 	       "        shrl  $2, %0\n"
243 	       "        andl $3, %%eax\n"
244 	       "        cld\n"
245 	       "6:      rep; movsl\n"
246 	       "        movl %%eax,%0\n"
247 	       "7:      rep; movsb\n"
248 	       "8:\n"
249 	       _ASM_EXTABLE_UA(0b, 8b)
250 	       _ASM_EXTABLE_UA(1b, 8b)
251 	       _ASM_EXTABLE_UA(2b, 8b)
252 	       _ASM_EXTABLE_UA(21b, 8b)
253 	       _ASM_EXTABLE_UA(3b, 8b)
254 	       _ASM_EXTABLE_UA(31b, 8b)
255 	       _ASM_EXTABLE_UA(4b, 8b)
256 	       _ASM_EXTABLE_UA(41b, 8b)
257 	       _ASM_EXTABLE_UA(10b, 8b)
258 	       _ASM_EXTABLE_UA(51b, 8b)
259 	       _ASM_EXTABLE_UA(11b, 8b)
260 	       _ASM_EXTABLE_UA(61b, 8b)
261 	       _ASM_EXTABLE_UA(12b, 8b)
262 	       _ASM_EXTABLE_UA(71b, 8b)
263 	       _ASM_EXTABLE_UA(13b, 8b)
264 	       _ASM_EXTABLE_UA(81b, 8b)
265 	       _ASM_EXTABLE_UA(14b, 8b)
266 	       _ASM_EXTABLE_UA(91b, 8b)
267 	       _ASM_EXTABLE_TYPE_REG(6b, 8b, EX_TYPE_UCOPY_LEN4, %%eax)
268 	       _ASM_EXTABLE_UA(7b, 8b)
269 	       : "=&c"(size), "=&D" (d0), "=&S" (d1)
270 	       :  "1"(to), "2"(from), "0"(size)
271 	       : "eax", "edx", "memory");
272 	return size;
273 }
274 
275 #else
276 
277 /*
278  * Leave these declared but undefined.  They should not be any references to
279  * them
280  */
281 unsigned long __copy_user_intel(void __user *to, const void *from,
282 					unsigned long size);
283 #endif /* CONFIG_X86_INTEL_USERCOPY */
284 
285 /* Generic arbitrary sized copy.  */
286 #define __copy_user(to, from, size)					\
287 do {									\
288 	int __d0, __d1, __d2;						\
289 	__asm__ __volatile__(						\
290 		"	cmp  $7,%0\n"					\
291 		"	jbe  1f\n"					\
292 		"	movl %1,%0\n"					\
293 		"	negl %0\n"					\
294 		"	andl $7,%0\n"					\
295 		"	subl %0,%3\n"					\
296 		"4:	rep; movsb\n"					\
297 		"	movl %3,%0\n"					\
298 		"	shrl $2,%0\n"					\
299 		"	andl $3,%3\n"					\
300 		"	.align 2,0x90\n"				\
301 		"0:	rep; movsl\n"					\
302 		"	movl %3,%0\n"					\
303 		"1:	rep; movsb\n"					\
304 		"2:\n"							\
305 		_ASM_EXTABLE_TYPE_REG(4b, 2b, EX_TYPE_UCOPY_LEN1, %3)	\
306 		_ASM_EXTABLE_TYPE_REG(0b, 2b, EX_TYPE_UCOPY_LEN4, %3)	\
307 		_ASM_EXTABLE_UA(1b, 2b)					\
308 		: "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)	\
309 		: "3"(size), "0"(size), "1"(to), "2"(from)		\
310 		: "memory");						\
311 } while (0)
312 
313 unsigned long __copy_user_ll(void *to, const void *from, unsigned long n)
314 {
315 	__uaccess_begin_nospec();
316 	if (movsl_is_ok(to, from, n))
317 		__copy_user(to, from, n);
318 	else
319 		n = __copy_user_intel(to, from, n);
320 	__uaccess_end();
321 	return n;
322 }
323 EXPORT_SYMBOL(__copy_user_ll);
324 
325 unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
326 					unsigned long n)
327 {
328 	__uaccess_begin_nospec();
329 #ifdef CONFIG_X86_INTEL_USERCOPY
330 	if (n > 64 && static_cpu_has(X86_FEATURE_XMM2))
331 		n = __copy_user_intel_nocache(to, from, n);
332 	else
333 		__copy_user(to, from, n);
334 #else
335 	__copy_user(to, from, n);
336 #endif
337 	__uaccess_end();
338 	return n;
339 }
340 EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero);
341