xref: /openbmc/linux/arch/arc/include/asm/uaccess.h (revision 2a598d0b)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
4  *
5  * vineetg: June 2010
6  *    -__clear_user( ) called multiple times during elf load was byte loop
7  *    converted to do as much word clear as possible.
8  *
9  * vineetg: Dec 2009
10  *    -Hand crafted constant propagation for "constant" copy sizes
11  *    -stock kernel shrunk by 33K at -O3
12  *
13  * vineetg: Sept 2009
14  *    -Added option to (UN)inline copy_(to|from)_user to reduce code sz
15  *    -kernel shrunk by 200K even at -O3 (gcc 4.2.1)
16  *    -Enabled when doing -Os
17  *
18  * Amit Bhor, Sameer Dhavale: Codito Technologies 2004
19  */
20 
21 #ifndef _ASM_ARC_UACCESS_H
22 #define _ASM_ARC_UACCESS_H
23 
24 #include <linux/string.h>	/* for generic string functions */
25 
26 /*********** Single byte/hword/word copies ******************/
27 
28 #define __get_user_fn(sz, u, k)					\
29 ({								\
30 	long __ret = 0;	/* success by default */	\
31 	switch (sz) {						\
32 	case 1: __arc_get_user_one(*(k), u, "ldb", __ret); break;	\
33 	case 2: __arc_get_user_one(*(k), u, "ldw", __ret); break;	\
34 	case 4: __arc_get_user_one(*(k), u, "ld", __ret);  break;	\
35 	case 8: __arc_get_user_one_64(*(k), u, __ret);     break;	\
36 	}							\
37 	__ret;							\
38 })
39 
40 /*
41  * Returns 0 on success, -EFAULT if not.
42  * @ret already contains 0 - given that errors will be less likely
43  * (hence +r asm constraint below).
44  * In case of error, fixup code will make it -EFAULT
45  */
46 #define __arc_get_user_one(dst, src, op, ret)	\
47 	__asm__ __volatile__(                   \
48 	"1:	"op"    %1,[%2]\n"		\
49 	"2:	;nop\n"				\
50 	"	.section .fixup, \"ax\"\n"	\
51 	"	.align 4\n"			\
52 	"3:	# return -EFAULT\n"		\
53 	"	mov %0, %3\n"			\
54 	"	# zero out dst ptr\n"		\
55 	"	mov %1,  0\n"			\
56 	"	j   2b\n"			\
57 	"	.previous\n"			\
58 	"	.section __ex_table, \"a\"\n"	\
59 	"	.align 4\n"			\
60 	"	.word 1b,3b\n"			\
61 	"	.previous\n"			\
62 						\
63 	: "+r" (ret), "=r" (dst)		\
64 	: "r" (src), "ir" (-EFAULT))
65 
66 #define __arc_get_user_one_64(dst, src, ret)	\
67 	__asm__ __volatile__(                   \
68 	"1:	ld   %1,[%2]\n"			\
69 	"4:	ld  %R1,[%2, 4]\n"		\
70 	"2:	;nop\n"				\
71 	"	.section .fixup, \"ax\"\n"	\
72 	"	.align 4\n"			\
73 	"3:	# return -EFAULT\n"		\
74 	"	mov %0, %3\n"			\
75 	"	# zero out dst ptr\n"		\
76 	"	mov %1,  0\n"			\
77 	"	mov %R1, 0\n"			\
78 	"	j   2b\n"			\
79 	"	.previous\n"			\
80 	"	.section __ex_table, \"a\"\n"	\
81 	"	.align 4\n"			\
82 	"	.word 1b,3b\n"			\
83 	"	.word 4b,3b\n"			\
84 	"	.previous\n"			\
85 						\
86 	: "+r" (ret), "=r" (dst)		\
87 	: "r" (src), "ir" (-EFAULT))
88 
89 #define __put_user_fn(sz, u, k)					\
90 ({								\
91 	long __ret = 0;	/* success by default */	\
92 	switch (sz) {						\
93 	case 1: __arc_put_user_one(*(k), u, "stb", __ret); break;	\
94 	case 2: __arc_put_user_one(*(k), u, "stw", __ret); break;	\
95 	case 4: __arc_put_user_one(*(k), u, "st", __ret);  break;	\
96 	case 8: __arc_put_user_one_64(*(k), u, __ret);     break;	\
97 	}							\
98 	__ret;							\
99 })
100 
101 #define __arc_put_user_one(src, dst, op, ret)	\
102 	__asm__ __volatile__(                   \
103 	"1:	"op"    %1,[%2]\n"		\
104 	"2:	;nop\n"				\
105 	"	.section .fixup, \"ax\"\n"	\
106 	"	.align 4\n"			\
107 	"3:	mov %0, %3\n"			\
108 	"	j   2b\n"			\
109 	"	.previous\n"			\
110 	"	.section __ex_table, \"a\"\n"	\
111 	"	.align 4\n"			\
112 	"	.word 1b,3b\n"			\
113 	"	.previous\n"			\
114 						\
115 	: "+r" (ret)				\
116 	: "r" (src), "r" (dst), "ir" (-EFAULT))
117 
118 #define __arc_put_user_one_64(src, dst, ret)	\
119 	__asm__ __volatile__(                   \
120 	"1:	st   %1,[%2]\n"			\
121 	"4:	st  %R1,[%2, 4]\n"		\
122 	"2:	;nop\n"				\
123 	"	.section .fixup, \"ax\"\n"	\
124 	"	.align 4\n"			\
125 	"3:	mov %0, %3\n"			\
126 	"	j   2b\n"			\
127 	"	.previous\n"			\
128 	"	.section __ex_table, \"a\"\n"	\
129 	"	.align 4\n"			\
130 	"	.word 1b,3b\n"			\
131 	"	.word 4b,3b\n"			\
132 	"	.previous\n"			\
133 						\
134 	: "+r" (ret)				\
135 	: "r" (src), "r" (dst), "ir" (-EFAULT))
136 
137 
138 static inline unsigned long
139 raw_copy_from_user(void *to, const void __user *from, unsigned long n)
140 {
141 	long res = 0;
142 	char val;
143 	unsigned long tmp1, tmp2, tmp3, tmp4;
144 	unsigned long orig_n = n;
145 
146 	if (n == 0)
147 		return 0;
148 
149 	/* unaligned */
150 	if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
151 
152 		unsigned char tmp;
153 
154 		__asm__ __volatile__ (
155 		"	mov.f   lp_count, %0		\n"
156 		"	lpnz 2f				\n"
157 		"1:	ldb.ab  %1, [%3, 1]		\n"
158 		"	stb.ab  %1, [%2, 1]		\n"
159 		"	sub     %0,%0,1			\n"
160 		"2:	;nop				\n"
161 		"	.section .fixup, \"ax\"		\n"
162 		"	.align 4			\n"
163 		"3:	j   2b				\n"
164 		"	.previous			\n"
165 		"	.section __ex_table, \"a\"	\n"
166 		"	.align 4			\n"
167 		"	.word   1b, 3b			\n"
168 		"	.previous			\n"
169 
170 		: "+r" (n),
171 		/*
172 		 * Note as an '&' earlyclobber operand to make sure the
173 		 * temporary register inside the loop is not the same as
174 		 *  FROM or TO.
175 		*/
176 		  "=&r" (tmp), "+r" (to), "+r" (from)
177 		:
178 		: "lp_count", "memory");
179 
180 		return n;
181 	}
182 
183 	/*
184 	 * Hand-crafted constant propagation to reduce code sz of the
185 	 * laddered copy 16x,8,4,2,1
186 	 */
187 	if (__builtin_constant_p(orig_n)) {
188 		res = orig_n;
189 
190 		if (orig_n / 16) {
191 			orig_n = orig_n % 16;
192 
193 			__asm__ __volatile__(
194 			"	lsr   lp_count, %7,4		\n"
195 			"	lp    3f			\n"
196 			"1:	ld.ab   %3, [%2, 4]		\n"
197 			"11:	ld.ab   %4, [%2, 4]		\n"
198 			"12:	ld.ab   %5, [%2, 4]		\n"
199 			"13:	ld.ab   %6, [%2, 4]		\n"
200 			"	st.ab   %3, [%1, 4]		\n"
201 			"	st.ab   %4, [%1, 4]		\n"
202 			"	st.ab   %5, [%1, 4]		\n"
203 			"	st.ab   %6, [%1, 4]		\n"
204 			"	sub     %0,%0,16		\n"
205 			"3:	;nop				\n"
206 			"	.section .fixup, \"ax\"		\n"
207 			"	.align 4			\n"
208 			"4:	j   3b				\n"
209 			"	.previous			\n"
210 			"	.section __ex_table, \"a\"	\n"
211 			"	.align 4			\n"
212 			"	.word   1b, 4b			\n"
213 			"	.word   11b,4b			\n"
214 			"	.word   12b,4b			\n"
215 			"	.word   13b,4b			\n"
216 			"	.previous			\n"
217 			: "+r" (res), "+r"(to), "+r"(from),
218 			  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
219 			: "ir"(n)
220 			: "lp_count", "memory");
221 		}
222 		if (orig_n / 8) {
223 			orig_n = orig_n % 8;
224 
225 			__asm__ __volatile__(
226 			"14:	ld.ab   %3, [%2,4]		\n"
227 			"15:	ld.ab   %4, [%2,4]		\n"
228 			"	st.ab   %3, [%1,4]		\n"
229 			"	st.ab   %4, [%1,4]		\n"
230 			"	sub     %0,%0,8			\n"
231 			"31:	;nop				\n"
232 			"	.section .fixup, \"ax\"		\n"
233 			"	.align 4			\n"
234 			"4:	j   31b				\n"
235 			"	.previous			\n"
236 			"	.section __ex_table, \"a\"	\n"
237 			"	.align 4			\n"
238 			"	.word   14b,4b			\n"
239 			"	.word   15b,4b			\n"
240 			"	.previous			\n"
241 			: "+r" (res), "+r"(to), "+r"(from),
242 			  "=r"(tmp1), "=r"(tmp2)
243 			:
244 			: "memory");
245 		}
246 		if (orig_n / 4) {
247 			orig_n = orig_n % 4;
248 
249 			__asm__ __volatile__(
250 			"16:	ld.ab   %3, [%2,4]		\n"
251 			"	st.ab   %3, [%1,4]		\n"
252 			"	sub     %0,%0,4			\n"
253 			"32:	;nop				\n"
254 			"	.section .fixup, \"ax\"		\n"
255 			"	.align 4			\n"
256 			"4:	j   32b				\n"
257 			"	.previous			\n"
258 			"	.section __ex_table, \"a\"	\n"
259 			"	.align 4			\n"
260 			"	.word   16b,4b			\n"
261 			"	.previous			\n"
262 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
263 			:
264 			: "memory");
265 		}
266 		if (orig_n / 2) {
267 			orig_n = orig_n % 2;
268 
269 			__asm__ __volatile__(
270 			"17:	ldw.ab   %3, [%2,2]		\n"
271 			"	stw.ab   %3, [%1,2]		\n"
272 			"	sub      %0,%0,2		\n"
273 			"33:	;nop				\n"
274 			"	.section .fixup, \"ax\"		\n"
275 			"	.align 4			\n"
276 			"4:	j   33b				\n"
277 			"	.previous			\n"
278 			"	.section __ex_table, \"a\"	\n"
279 			"	.align 4			\n"
280 			"	.word   17b,4b			\n"
281 			"	.previous			\n"
282 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
283 			:
284 			: "memory");
285 		}
286 		if (orig_n & 1) {
287 			__asm__ __volatile__(
288 			"18:	ldb.ab   %3, [%2,2]		\n"
289 			"	stb.ab   %3, [%1,2]		\n"
290 			"	sub      %0,%0,1		\n"
291 			"34:	; nop				\n"
292 			"	.section .fixup, \"ax\"		\n"
293 			"	.align 4			\n"
294 			"4:	j   34b				\n"
295 			"	.previous			\n"
296 			"	.section __ex_table, \"a\"	\n"
297 			"	.align 4			\n"
298 			"	.word   18b,4b			\n"
299 			"	.previous			\n"
300 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
301 			:
302 			: "memory");
303 		}
304 	} else {  /* n is NOT constant, so laddered copy of 16x,8,4,2,1  */
305 
306 		__asm__ __volatile__(
307 		"	mov %0,%3			\n"
308 		"	lsr.f   lp_count, %3,4		\n"  /* 16x bytes */
309 		"	lpnz    3f			\n"
310 		"1:	ld.ab   %5, [%2, 4]		\n"
311 		"11:	ld.ab   %6, [%2, 4]		\n"
312 		"12:	ld.ab   %7, [%2, 4]		\n"
313 		"13:	ld.ab   %8, [%2, 4]		\n"
314 		"	st.ab   %5, [%1, 4]		\n"
315 		"	st.ab   %6, [%1, 4]		\n"
316 		"	st.ab   %7, [%1, 4]		\n"
317 		"	st.ab   %8, [%1, 4]		\n"
318 		"	sub     %0,%0,16		\n"
319 		"3:	and.f   %3,%3,0xf		\n"  /* stragglers */
320 		"	bz      34f			\n"
321 		"	bbit0   %3,3,31f		\n"  /* 8 bytes left */
322 		"14:	ld.ab   %5, [%2,4]		\n"
323 		"15:	ld.ab   %6, [%2,4]		\n"
324 		"	st.ab   %5, [%1,4]		\n"
325 		"	st.ab   %6, [%1,4]		\n"
326 		"	sub.f   %0,%0,8			\n"
327 		"31:	bbit0   %3,2,32f		\n"  /* 4 bytes left */
328 		"16:	ld.ab   %5, [%2,4]		\n"
329 		"	st.ab   %5, [%1,4]		\n"
330 		"	sub.f   %0,%0,4			\n"
331 		"32:	bbit0   %3,1,33f		\n"  /* 2 bytes left */
332 		"17:	ldw.ab  %5, [%2,2]		\n"
333 		"	stw.ab  %5, [%1,2]		\n"
334 		"	sub.f   %0,%0,2			\n"
335 		"33:	bbit0   %3,0,34f		\n"
336 		"18:	ldb.ab  %5, [%2,1]		\n"  /* 1 byte left */
337 		"	stb.ab  %5, [%1,1]		\n"
338 		"	sub.f   %0,%0,1			\n"
339 		"34:	;nop				\n"
340 		"	.section .fixup, \"ax\"		\n"
341 		"	.align 4			\n"
342 		"4:	j   34b				\n"
343 		"	.previous			\n"
344 		"	.section __ex_table, \"a\"	\n"
345 		"	.align 4			\n"
346 		"	.word   1b, 4b			\n"
347 		"	.word   11b,4b			\n"
348 		"	.word   12b,4b			\n"
349 		"	.word   13b,4b			\n"
350 		"	.word   14b,4b			\n"
351 		"	.word   15b,4b			\n"
352 		"	.word   16b,4b			\n"
353 		"	.word   17b,4b			\n"
354 		"	.word   18b,4b			\n"
355 		"	.previous			\n"
356 		: "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
357 		  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
358 		:
359 		: "lp_count", "memory");
360 	}
361 
362 	return res;
363 }
364 
365 static inline unsigned long
366 raw_copy_to_user(void __user *to, const void *from, unsigned long n)
367 {
368 	long res = 0;
369 	char val;
370 	unsigned long tmp1, tmp2, tmp3, tmp4;
371 	unsigned long orig_n = n;
372 
373 	if (n == 0)
374 		return 0;
375 
376 	/* unaligned */
377 	if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
378 
379 		unsigned char tmp;
380 
381 		__asm__ __volatile__(
382 		"	mov.f   lp_count, %0		\n"
383 		"	lpnz 3f				\n"
384 		"	ldb.ab  %1, [%3, 1]		\n"
385 		"1:	stb.ab  %1, [%2, 1]		\n"
386 		"	sub     %0, %0, 1		\n"
387 		"3:	;nop				\n"
388 		"	.section .fixup, \"ax\"		\n"
389 		"	.align 4			\n"
390 		"4:	j   3b				\n"
391 		"	.previous			\n"
392 		"	.section __ex_table, \"a\"	\n"
393 		"	.align 4			\n"
394 		"	.word   1b, 4b			\n"
395 		"	.previous			\n"
396 
397 		: "+r" (n),
398 		/* Note as an '&' earlyclobber operand to make sure the
399 		 * temporary register inside the loop is not the same as
400 		 * FROM or TO.
401 		 */
402 		  "=&r" (tmp), "+r" (to), "+r" (from)
403 		:
404 		: "lp_count", "memory");
405 
406 		return n;
407 	}
408 
409 	if (__builtin_constant_p(orig_n)) {
410 		res = orig_n;
411 
412 		if (orig_n / 16) {
413 			orig_n = orig_n % 16;
414 
415 			__asm__ __volatile__(
416 			"	lsr lp_count, %7,4		\n"
417 			"	lp  3f				\n"
418 			"	ld.ab %3, [%2, 4]		\n"
419 			"	ld.ab %4, [%2, 4]		\n"
420 			"	ld.ab %5, [%2, 4]		\n"
421 			"	ld.ab %6, [%2, 4]		\n"
422 			"1:	st.ab %3, [%1, 4]		\n"
423 			"11:	st.ab %4, [%1, 4]		\n"
424 			"12:	st.ab %5, [%1, 4]		\n"
425 			"13:	st.ab %6, [%1, 4]		\n"
426 			"	sub   %0, %0, 16		\n"
427 			"3:;nop					\n"
428 			"	.section .fixup, \"ax\"		\n"
429 			"	.align 4			\n"
430 			"4:	j   3b				\n"
431 			"	.previous			\n"
432 			"	.section __ex_table, \"a\"	\n"
433 			"	.align 4			\n"
434 			"	.word   1b, 4b			\n"
435 			"	.word   11b,4b			\n"
436 			"	.word   12b,4b			\n"
437 			"	.word   13b,4b			\n"
438 			"	.previous			\n"
439 			: "+r" (res), "+r"(to), "+r"(from),
440 			  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
441 			: "ir"(n)
442 			: "lp_count", "memory");
443 		}
444 		if (orig_n / 8) {
445 			orig_n = orig_n % 8;
446 
447 			__asm__ __volatile__(
448 			"	ld.ab   %3, [%2,4]		\n"
449 			"	ld.ab   %4, [%2,4]		\n"
450 			"14:	st.ab   %3, [%1,4]		\n"
451 			"15:	st.ab   %4, [%1,4]		\n"
452 			"	sub     %0, %0, 8		\n"
453 			"31:;nop				\n"
454 			"	.section .fixup, \"ax\"		\n"
455 			"	.align 4			\n"
456 			"4:	j   31b				\n"
457 			"	.previous			\n"
458 			"	.section __ex_table, \"a\"	\n"
459 			"	.align 4			\n"
460 			"	.word   14b,4b			\n"
461 			"	.word   15b,4b			\n"
462 			"	.previous			\n"
463 			: "+r" (res), "+r"(to), "+r"(from),
464 			  "=r"(tmp1), "=r"(tmp2)
465 			:
466 			: "memory");
467 		}
468 		if (orig_n / 4) {
469 			orig_n = orig_n % 4;
470 
471 			__asm__ __volatile__(
472 			"	ld.ab   %3, [%2,4]		\n"
473 			"16:	st.ab   %3, [%1,4]		\n"
474 			"	sub     %0, %0, 4		\n"
475 			"32:;nop				\n"
476 			"	.section .fixup, \"ax\"		\n"
477 			"	.align 4			\n"
478 			"4:	j   32b				\n"
479 			"	.previous			\n"
480 			"	.section __ex_table, \"a\"	\n"
481 			"	.align 4			\n"
482 			"	.word   16b,4b			\n"
483 			"	.previous			\n"
484 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
485 			:
486 			: "memory");
487 		}
488 		if (orig_n / 2) {
489 			orig_n = orig_n % 2;
490 
491 			__asm__ __volatile__(
492 			"	ldw.ab    %3, [%2,2]		\n"
493 			"17:	stw.ab    %3, [%1,2]		\n"
494 			"	sub       %0, %0, 2		\n"
495 			"33:;nop				\n"
496 			"	.section .fixup, \"ax\"		\n"
497 			"	.align 4			\n"
498 			"4:	j   33b				\n"
499 			"	.previous			\n"
500 			"	.section __ex_table, \"a\"	\n"
501 			"	.align 4			\n"
502 			"	.word   17b,4b			\n"
503 			"	.previous			\n"
504 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
505 			:
506 			: "memory");
507 		}
508 		if (orig_n & 1) {
509 			__asm__ __volatile__(
510 			"	ldb.ab  %3, [%2,1]		\n"
511 			"18:	stb.ab  %3, [%1,1]		\n"
512 			"	sub     %0, %0, 1		\n"
513 			"34:	;nop				\n"
514 			"	.section .fixup, \"ax\"		\n"
515 			"	.align 4			\n"
516 			"4:	j   34b				\n"
517 			"	.previous			\n"
518 			"	.section __ex_table, \"a\"	\n"
519 			"	.align 4			\n"
520 			"	.word   18b,4b			\n"
521 			"	.previous			\n"
522 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
523 			:
524 			: "memory");
525 		}
526 	} else {  /* n is NOT constant, so laddered copy of 16x,8,4,2,1  */
527 
528 		__asm__ __volatile__(
529 		"	mov   %0,%3			\n"
530 		"	lsr.f lp_count, %3,4		\n"  /* 16x bytes */
531 		"	lpnz  3f			\n"
532 		"	ld.ab %5, [%2, 4]		\n"
533 		"	ld.ab %6, [%2, 4]		\n"
534 		"	ld.ab %7, [%2, 4]		\n"
535 		"	ld.ab %8, [%2, 4]		\n"
536 		"1:	st.ab %5, [%1, 4]		\n"
537 		"11:	st.ab %6, [%1, 4]		\n"
538 		"12:	st.ab %7, [%1, 4]		\n"
539 		"13:	st.ab %8, [%1, 4]		\n"
540 		"	sub   %0, %0, 16		\n"
541 		"3:	and.f %3,%3,0xf			\n" /* stragglers */
542 		"	bz 34f				\n"
543 		"	bbit0   %3,3,31f		\n" /* 8 bytes left */
544 		"	ld.ab   %5, [%2,4]		\n"
545 		"	ld.ab   %6, [%2,4]		\n"
546 		"14:	st.ab   %5, [%1,4]		\n"
547 		"15:	st.ab   %6, [%1,4]		\n"
548 		"	sub.f   %0, %0, 8		\n"
549 		"31:	bbit0   %3,2,32f		\n"  /* 4 bytes left */
550 		"	ld.ab   %5, [%2,4]		\n"
551 		"16:	st.ab   %5, [%1,4]		\n"
552 		"	sub.f   %0, %0, 4		\n"
553 		"32:	bbit0 %3,1,33f			\n"  /* 2 bytes left */
554 		"	ldw.ab    %5, [%2,2]		\n"
555 		"17:	stw.ab    %5, [%1,2]		\n"
556 		"	sub.f %0, %0, 2			\n"
557 		"33:	bbit0 %3,0,34f			\n"
558 		"	ldb.ab    %5, [%2,1]		\n"  /* 1 byte left */
559 		"18:	stb.ab  %5, [%1,1]		\n"
560 		"	sub.f %0, %0, 1			\n"
561 		"34:	;nop				\n"
562 		"	.section .fixup, \"ax\"		\n"
563 		"	.align 4			\n"
564 		"4:	j   34b				\n"
565 		"	.previous			\n"
566 		"	.section __ex_table, \"a\"	\n"
567 		"	.align 4			\n"
568 		"	.word   1b, 4b			\n"
569 		"	.word   11b,4b			\n"
570 		"	.word   12b,4b			\n"
571 		"	.word   13b,4b			\n"
572 		"	.word   14b,4b			\n"
573 		"	.word   15b,4b			\n"
574 		"	.word   16b,4b			\n"
575 		"	.word   17b,4b			\n"
576 		"	.word   18b,4b			\n"
577 		"	.previous			\n"
578 		: "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
579 		  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
580 		:
581 		: "lp_count", "memory");
582 	}
583 
584 	return res;
585 }
586 
587 static inline unsigned long __arc_clear_user(void __user *to, unsigned long n)
588 {
589 	long res = n;
590 	unsigned char *d_char = to;
591 
592 	__asm__ __volatile__(
593 	"	bbit0   %0, 0, 1f		\n"
594 	"75:	stb.ab  %2, [%0,1]		\n"
595 	"	sub %1, %1, 1			\n"
596 	"1:	bbit0   %0, 1, 2f		\n"
597 	"76:	stw.ab  %2, [%0,2]		\n"
598 	"	sub %1, %1, 2			\n"
599 	"2:	asr.f   lp_count, %1, 2		\n"
600 	"	lpnz    3f			\n"
601 	"77:	st.ab   %2, [%0,4]		\n"
602 	"	sub %1, %1, 4			\n"
603 	"3:	bbit0   %1, 1, 4f		\n"
604 	"78:	stw.ab  %2, [%0,2]		\n"
605 	"	sub %1, %1, 2			\n"
606 	"4:	bbit0   %1, 0, 5f		\n"
607 	"79:	stb.ab  %2, [%0,1]		\n"
608 	"	sub %1, %1, 1			\n"
609 	"5:					\n"
610 	"	.section .fixup, \"ax\"		\n"
611 	"	.align 4			\n"
612 	"3:	j   5b				\n"
613 	"	.previous			\n"
614 	"	.section __ex_table, \"a\"	\n"
615 	"	.align 4			\n"
616 	"	.word   75b, 3b			\n"
617 	"	.word   76b, 3b			\n"
618 	"	.word   77b, 3b			\n"
619 	"	.word   78b, 3b			\n"
620 	"	.word   79b, 3b			\n"
621 	"	.previous			\n"
622 	: "+r"(d_char), "+r"(res)
623 	: "i"(0)
624 	: "lp_count", "memory");
625 
626 	return res;
627 }
628 
629 #ifndef CONFIG_CC_OPTIMIZE_FOR_SIZE
630 
631 #define INLINE_COPY_TO_USER
632 #define INLINE_COPY_FROM_USER
633 
634 #define __clear_user(d, n)		__arc_clear_user(d, n)
635 #else
636 extern unsigned long arc_clear_user_noinline(void __user *to,
637 		unsigned long n);
638 #define __clear_user(d, n)		arc_clear_user_noinline(d, n)
639 #endif
640 
641 #include <asm-generic/uaccess.h>
642 
643 #endif
644