xref: /openbmc/linux/arch/arc/include/asm/uaccess.h (revision c8ee610a)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
4  *
5  * vineetg: June 2010
6  *    -__clear_user( ) called multiple times during elf load was byte loop
7  *    converted to do as much word clear as possible.
8  *
9  * vineetg: Dec 2009
10  *    -Hand crafted constant propagation for "constant" copy sizes
11  *    -stock kernel shrunk by 33K at -O3
12  *
13  * vineetg: Sept 2009
14  *    -Added option to (UN)inline copy_(to|from)_user to reduce code sz
15  *    -kernel shrunk by 200K even at -O3 (gcc 4.2.1)
16  *    -Enabled when doing -Os
17  *
18  * Amit Bhor, Sameer Dhavale: Codito Technologies 2004
19  */
20 
21 #ifndef _ASM_ARC_UACCESS_H
22 #define _ASM_ARC_UACCESS_H
23 
24 #include <linux/string.h>	/* for generic string functions */
25 
26 /*********** Single byte/hword/word copies ******************/
27 
28 #define __get_user_fn(sz, u, k)					\
29 ({								\
30 	long __ret = 0;	/* success by default */	\
31 	switch (sz) {						\
32 	case 1: __arc_get_user_one(*(k), u, "ldb", __ret); break;	\
33 	case 2: __arc_get_user_one(*(k), u, "ldw", __ret); break;	\
34 	case 4: __arc_get_user_one(*(k), u, "ld", __ret);  break;	\
35 	case 8: __arc_get_user_one_64(*(k), u, __ret);     break;	\
36 	}							\
37 	__ret;							\
38 })
39 
40 /*
41  * Returns 0 on success, -EFAULT if not.
42  * @ret already contains 0 - given that errors will be less likely
43  * (hence +r asm constraint below).
44  * In case of error, fixup code will make it -EFAULT
45  */
46 #define __arc_get_user_one(dst, src, op, ret)	\
47 	__asm__ __volatile__(                   \
48 	"1:	"op"    %1,[%2]\n"		\
49 	"2:	;nop\n"				\
50 	"	.section .fixup, \"ax\"\n"	\
51 	"	.align 4\n"			\
52 	"3:	# return -EFAULT\n"		\
53 	"	mov %0, %3\n"			\
54 	"	# zero out dst ptr\n"		\
55 	"	mov %1,  0\n"			\
56 	"	j   2b\n"			\
57 	"	.previous\n"			\
58 	"	.section __ex_table, \"a\"\n"	\
59 	"	.align 4\n"			\
60 	"	.word 1b,3b\n"			\
61 	"	.previous\n"			\
62 						\
63 	: "+r" (ret), "=r" (dst)		\
64 	: "r" (src), "ir" (-EFAULT))
65 
66 #define __arc_get_user_one_64(dst, src, ret)	\
67 	__asm__ __volatile__(                   \
68 	"1:	ld   %1,[%2]\n"			\
69 	"4:	ld  %R1,[%2, 4]\n"		\
70 	"2:	;nop\n"				\
71 	"	.section .fixup, \"ax\"\n"	\
72 	"	.align 4\n"			\
73 	"3:	# return -EFAULT\n"		\
74 	"	mov %0, %3\n"			\
75 	"	# zero out dst ptr\n"		\
76 	"	mov %1,  0\n"			\
77 	"	mov %R1, 0\n"			\
78 	"	j   2b\n"			\
79 	"	.previous\n"			\
80 	"	.section __ex_table, \"a\"\n"	\
81 	"	.align 4\n"			\
82 	"	.word 1b,3b\n"			\
83 	"	.word 4b,3b\n"			\
84 	"	.previous\n"			\
85 						\
86 	: "+r" (ret), "=r" (dst)		\
87 	: "r" (src), "ir" (-EFAULT))
88 
89 #define __put_user_fn(sz, u, k)					\
90 ({								\
91 	long __ret = 0;	/* success by default */	\
92 	switch (sz) {						\
93 	case 1: __arc_put_user_one(*(k), u, "stb", __ret); break;	\
94 	case 2: __arc_put_user_one(*(k), u, "stw", __ret); break;	\
95 	case 4: __arc_put_user_one(*(k), u, "st", __ret);  break;	\
96 	case 8: __arc_put_user_one_64(*(k), u, __ret);     break;	\
97 	}							\
98 	__ret;							\
99 })
100 
101 #define __arc_put_user_one(src, dst, op, ret)	\
102 	__asm__ __volatile__(                   \
103 	"1:	"op"    %1,[%2]\n"		\
104 	"2:	;nop\n"				\
105 	"	.section .fixup, \"ax\"\n"	\
106 	"	.align 4\n"			\
107 	"3:	mov %0, %3\n"			\
108 	"	j   2b\n"			\
109 	"	.previous\n"			\
110 	"	.section __ex_table, \"a\"\n"	\
111 	"	.align 4\n"			\
112 	"	.word 1b,3b\n"			\
113 	"	.previous\n"			\
114 						\
115 	: "+r" (ret)				\
116 	: "r" (src), "r" (dst), "ir" (-EFAULT))
117 
118 #define __arc_put_user_one_64(src, dst, ret)	\
119 	__asm__ __volatile__(                   \
120 	"1:	st   %1,[%2]\n"			\
121 	"4:	st  %R1,[%2, 4]\n"		\
122 	"2:	;nop\n"				\
123 	"	.section .fixup, \"ax\"\n"	\
124 	"	.align 4\n"			\
125 	"3:	mov %0, %3\n"			\
126 	"	j   2b\n"			\
127 	"	.previous\n"			\
128 	"	.section __ex_table, \"a\"\n"	\
129 	"	.align 4\n"			\
130 	"	.word 1b,3b\n"			\
131 	"	.word 4b,3b\n"			\
132 	"	.previous\n"			\
133 						\
134 	: "+r" (ret)				\
135 	: "r" (src), "r" (dst), "ir" (-EFAULT))
136 
137 
138 static inline unsigned long
raw_copy_from_user(void * to,const void __user * from,unsigned long n)139 raw_copy_from_user(void *to, const void __user *from, unsigned long n)
140 {
141 	long res = 0;
142 	char val;
143 	unsigned long tmp1, tmp2, tmp3, tmp4;
144 	unsigned long orig_n = n;
145 
146 	if (n == 0)
147 		return 0;
148 
149 	/* fallback for unaligned access when hardware doesn't support */
150 	if (!IS_ENABLED(CONFIG_ARC_USE_UNALIGNED_MEM_ACCESS) &&
151 	     (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3))) {
152 
153 		unsigned char tmp;
154 
155 		__asm__ __volatile__ (
156 		"	mov.f   lp_count, %0		\n"
157 		"	lpnz 2f				\n"
158 		"1:	ldb.ab  %1, [%3, 1]		\n"
159 		"	stb.ab  %1, [%2, 1]		\n"
160 		"	sub     %0,%0,1			\n"
161 		"2:	;nop				\n"
162 		"	.section .fixup, \"ax\"		\n"
163 		"	.align 4			\n"
164 		"3:	j   2b				\n"
165 		"	.previous			\n"
166 		"	.section __ex_table, \"a\"	\n"
167 		"	.align 4			\n"
168 		"	.word   1b, 3b			\n"
169 		"	.previous			\n"
170 
171 		: "+r" (n),
172 		/*
173 		 * Note as an '&' earlyclobber operand to make sure the
174 		 * temporary register inside the loop is not the same as
175 		 *  FROM or TO.
176 		*/
177 		  "=&r" (tmp), "+r" (to), "+r" (from)
178 		:
179 		: "lp_count", "memory");
180 
181 		return n;
182 	}
183 
184 	/*
185 	 * Hand-crafted constant propagation to reduce code sz of the
186 	 * laddered copy 16x,8,4,2,1
187 	 */
188 	if (__builtin_constant_p(orig_n)) {
189 		res = orig_n;
190 
191 		if (orig_n / 16) {
192 			orig_n = orig_n % 16;
193 
194 			__asm__ __volatile__(
195 			"	lsr   lp_count, %7,4		\n"
196 			"	lp    3f			\n"
197 			"1:	ld.ab   %3, [%2, 4]		\n"
198 			"11:	ld.ab   %4, [%2, 4]		\n"
199 			"12:	ld.ab   %5, [%2, 4]		\n"
200 			"13:	ld.ab   %6, [%2, 4]		\n"
201 			"	st.ab   %3, [%1, 4]		\n"
202 			"	st.ab   %4, [%1, 4]		\n"
203 			"	st.ab   %5, [%1, 4]		\n"
204 			"	st.ab   %6, [%1, 4]		\n"
205 			"	sub     %0,%0,16		\n"
206 			"3:	;nop				\n"
207 			"	.section .fixup, \"ax\"		\n"
208 			"	.align 4			\n"
209 			"4:	j   3b				\n"
210 			"	.previous			\n"
211 			"	.section __ex_table, \"a\"	\n"
212 			"	.align 4			\n"
213 			"	.word   1b, 4b			\n"
214 			"	.word   11b,4b			\n"
215 			"	.word   12b,4b			\n"
216 			"	.word   13b,4b			\n"
217 			"	.previous			\n"
218 			: "+r" (res), "+r"(to), "+r"(from),
219 			  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
220 			: "ir"(n)
221 			: "lp_count", "memory");
222 		}
223 		if (orig_n / 8) {
224 			orig_n = orig_n % 8;
225 
226 			__asm__ __volatile__(
227 			"14:	ld.ab   %3, [%2,4]		\n"
228 			"15:	ld.ab   %4, [%2,4]		\n"
229 			"	st.ab   %3, [%1,4]		\n"
230 			"	st.ab   %4, [%1,4]		\n"
231 			"	sub     %0,%0,8			\n"
232 			"31:	;nop				\n"
233 			"	.section .fixup, \"ax\"		\n"
234 			"	.align 4			\n"
235 			"4:	j   31b				\n"
236 			"	.previous			\n"
237 			"	.section __ex_table, \"a\"	\n"
238 			"	.align 4			\n"
239 			"	.word   14b,4b			\n"
240 			"	.word   15b,4b			\n"
241 			"	.previous			\n"
242 			: "+r" (res), "+r"(to), "+r"(from),
243 			  "=r"(tmp1), "=r"(tmp2)
244 			:
245 			: "memory");
246 		}
247 		if (orig_n / 4) {
248 			orig_n = orig_n % 4;
249 
250 			__asm__ __volatile__(
251 			"16:	ld.ab   %3, [%2,4]		\n"
252 			"	st.ab   %3, [%1,4]		\n"
253 			"	sub     %0,%0,4			\n"
254 			"32:	;nop				\n"
255 			"	.section .fixup, \"ax\"		\n"
256 			"	.align 4			\n"
257 			"4:	j   32b				\n"
258 			"	.previous			\n"
259 			"	.section __ex_table, \"a\"	\n"
260 			"	.align 4			\n"
261 			"	.word   16b,4b			\n"
262 			"	.previous			\n"
263 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
264 			:
265 			: "memory");
266 		}
267 		if (orig_n / 2) {
268 			orig_n = orig_n % 2;
269 
270 			__asm__ __volatile__(
271 			"17:	ldw.ab   %3, [%2,2]		\n"
272 			"	stw.ab   %3, [%1,2]		\n"
273 			"	sub      %0,%0,2		\n"
274 			"33:	;nop				\n"
275 			"	.section .fixup, \"ax\"		\n"
276 			"	.align 4			\n"
277 			"4:	j   33b				\n"
278 			"	.previous			\n"
279 			"	.section __ex_table, \"a\"	\n"
280 			"	.align 4			\n"
281 			"	.word   17b,4b			\n"
282 			"	.previous			\n"
283 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
284 			:
285 			: "memory");
286 		}
287 		if (orig_n & 1) {
288 			__asm__ __volatile__(
289 			"18:	ldb.ab   %3, [%2,2]		\n"
290 			"	stb.ab   %3, [%1,2]		\n"
291 			"	sub      %0,%0,1		\n"
292 			"34:	; nop				\n"
293 			"	.section .fixup, \"ax\"		\n"
294 			"	.align 4			\n"
295 			"4:	j   34b				\n"
296 			"	.previous			\n"
297 			"	.section __ex_table, \"a\"	\n"
298 			"	.align 4			\n"
299 			"	.word   18b,4b			\n"
300 			"	.previous			\n"
301 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
302 			:
303 			: "memory");
304 		}
305 	} else {  /* n is NOT constant, so laddered copy of 16x,8,4,2,1  */
306 
307 		__asm__ __volatile__(
308 		"	mov %0,%3			\n"
309 		"	lsr.f   lp_count, %3,4		\n"  /* 16x bytes */
310 		"	lpnz    3f			\n"
311 		"1:	ld.ab   %5, [%2, 4]		\n"
312 		"11:	ld.ab   %6, [%2, 4]		\n"
313 		"12:	ld.ab   %7, [%2, 4]		\n"
314 		"13:	ld.ab   %8, [%2, 4]		\n"
315 		"	st.ab   %5, [%1, 4]		\n"
316 		"	st.ab   %6, [%1, 4]		\n"
317 		"	st.ab   %7, [%1, 4]		\n"
318 		"	st.ab   %8, [%1, 4]		\n"
319 		"	sub     %0,%0,16		\n"
320 		"3:	and.f   %3,%3,0xf		\n"  /* stragglers */
321 		"	bz      34f			\n"
322 		"	bbit0   %3,3,31f		\n"  /* 8 bytes left */
323 		"14:	ld.ab   %5, [%2,4]		\n"
324 		"15:	ld.ab   %6, [%2,4]		\n"
325 		"	st.ab   %5, [%1,4]		\n"
326 		"	st.ab   %6, [%1,4]		\n"
327 		"	sub.f   %0,%0,8			\n"
328 		"31:	bbit0   %3,2,32f		\n"  /* 4 bytes left */
329 		"16:	ld.ab   %5, [%2,4]		\n"
330 		"	st.ab   %5, [%1,4]		\n"
331 		"	sub.f   %0,%0,4			\n"
332 		"32:	bbit0   %3,1,33f		\n"  /* 2 bytes left */
333 		"17:	ldw.ab  %5, [%2,2]		\n"
334 		"	stw.ab  %5, [%1,2]		\n"
335 		"	sub.f   %0,%0,2			\n"
336 		"33:	bbit0   %3,0,34f		\n"
337 		"18:	ldb.ab  %5, [%2,1]		\n"  /* 1 byte left */
338 		"	stb.ab  %5, [%1,1]		\n"
339 		"	sub.f   %0,%0,1			\n"
340 		"34:	;nop				\n"
341 		"	.section .fixup, \"ax\"		\n"
342 		"	.align 4			\n"
343 		"4:	j   34b				\n"
344 		"	.previous			\n"
345 		"	.section __ex_table, \"a\"	\n"
346 		"	.align 4			\n"
347 		"	.word   1b, 4b			\n"
348 		"	.word   11b,4b			\n"
349 		"	.word   12b,4b			\n"
350 		"	.word   13b,4b			\n"
351 		"	.word   14b,4b			\n"
352 		"	.word   15b,4b			\n"
353 		"	.word   16b,4b			\n"
354 		"	.word   17b,4b			\n"
355 		"	.word   18b,4b			\n"
356 		"	.previous			\n"
357 		: "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
358 		  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
359 		:
360 		: "lp_count", "memory");
361 	}
362 
363 	return res;
364 }
365 
366 static inline unsigned long
raw_copy_to_user(void __user * to,const void * from,unsigned long n)367 raw_copy_to_user(void __user *to, const void *from, unsigned long n)
368 {
369 	long res = 0;
370 	char val;
371 	unsigned long tmp1, tmp2, tmp3, tmp4;
372 	unsigned long orig_n = n;
373 
374 	if (n == 0)
375 		return 0;
376 
377 	/* fallback for unaligned access when hardware doesn't support */
378 	if (!IS_ENABLED(CONFIG_ARC_USE_UNALIGNED_MEM_ACCESS) &&
379 	     (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3))) {
380 
381 		unsigned char tmp;
382 
383 		__asm__ __volatile__(
384 		"	mov.f   lp_count, %0		\n"
385 		"	lpnz 3f				\n"
386 		"	ldb.ab  %1, [%3, 1]		\n"
387 		"1:	stb.ab  %1, [%2, 1]		\n"
388 		"	sub     %0, %0, 1		\n"
389 		"3:	;nop				\n"
390 		"	.section .fixup, \"ax\"		\n"
391 		"	.align 4			\n"
392 		"4:	j   3b				\n"
393 		"	.previous			\n"
394 		"	.section __ex_table, \"a\"	\n"
395 		"	.align 4			\n"
396 		"	.word   1b, 4b			\n"
397 		"	.previous			\n"
398 
399 		: "+r" (n),
400 		/* Note as an '&' earlyclobber operand to make sure the
401 		 * temporary register inside the loop is not the same as
402 		 * FROM or TO.
403 		 */
404 		  "=&r" (tmp), "+r" (to), "+r" (from)
405 		:
406 		: "lp_count", "memory");
407 
408 		return n;
409 	}
410 
411 	if (__builtin_constant_p(orig_n)) {
412 		res = orig_n;
413 
414 		if (orig_n / 16) {
415 			orig_n = orig_n % 16;
416 
417 			__asm__ __volatile__(
418 			"	lsr lp_count, %7,4		\n"
419 			"	lp  3f				\n"
420 			"	ld.ab %3, [%2, 4]		\n"
421 			"	ld.ab %4, [%2, 4]		\n"
422 			"	ld.ab %5, [%2, 4]		\n"
423 			"	ld.ab %6, [%2, 4]		\n"
424 			"1:	st.ab %3, [%1, 4]		\n"
425 			"11:	st.ab %4, [%1, 4]		\n"
426 			"12:	st.ab %5, [%1, 4]		\n"
427 			"13:	st.ab %6, [%1, 4]		\n"
428 			"	sub   %0, %0, 16		\n"
429 			"3:;nop					\n"
430 			"	.section .fixup, \"ax\"		\n"
431 			"	.align 4			\n"
432 			"4:	j   3b				\n"
433 			"	.previous			\n"
434 			"	.section __ex_table, \"a\"	\n"
435 			"	.align 4			\n"
436 			"	.word   1b, 4b			\n"
437 			"	.word   11b,4b			\n"
438 			"	.word   12b,4b			\n"
439 			"	.word   13b,4b			\n"
440 			"	.previous			\n"
441 			: "+r" (res), "+r"(to), "+r"(from),
442 			  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
443 			: "ir"(n)
444 			: "lp_count", "memory");
445 		}
446 		if (orig_n / 8) {
447 			orig_n = orig_n % 8;
448 
449 			__asm__ __volatile__(
450 			"	ld.ab   %3, [%2,4]		\n"
451 			"	ld.ab   %4, [%2,4]		\n"
452 			"14:	st.ab   %3, [%1,4]		\n"
453 			"15:	st.ab   %4, [%1,4]		\n"
454 			"	sub     %0, %0, 8		\n"
455 			"31:;nop				\n"
456 			"	.section .fixup, \"ax\"		\n"
457 			"	.align 4			\n"
458 			"4:	j   31b				\n"
459 			"	.previous			\n"
460 			"	.section __ex_table, \"a\"	\n"
461 			"	.align 4			\n"
462 			"	.word   14b,4b			\n"
463 			"	.word   15b,4b			\n"
464 			"	.previous			\n"
465 			: "+r" (res), "+r"(to), "+r"(from),
466 			  "=r"(tmp1), "=r"(tmp2)
467 			:
468 			: "memory");
469 		}
470 		if (orig_n / 4) {
471 			orig_n = orig_n % 4;
472 
473 			__asm__ __volatile__(
474 			"	ld.ab   %3, [%2,4]		\n"
475 			"16:	st.ab   %3, [%1,4]		\n"
476 			"	sub     %0, %0, 4		\n"
477 			"32:;nop				\n"
478 			"	.section .fixup, \"ax\"		\n"
479 			"	.align 4			\n"
480 			"4:	j   32b				\n"
481 			"	.previous			\n"
482 			"	.section __ex_table, \"a\"	\n"
483 			"	.align 4			\n"
484 			"	.word   16b,4b			\n"
485 			"	.previous			\n"
486 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
487 			:
488 			: "memory");
489 		}
490 		if (orig_n / 2) {
491 			orig_n = orig_n % 2;
492 
493 			__asm__ __volatile__(
494 			"	ldw.ab    %3, [%2,2]		\n"
495 			"17:	stw.ab    %3, [%1,2]		\n"
496 			"	sub       %0, %0, 2		\n"
497 			"33:;nop				\n"
498 			"	.section .fixup, \"ax\"		\n"
499 			"	.align 4			\n"
500 			"4:	j   33b				\n"
501 			"	.previous			\n"
502 			"	.section __ex_table, \"a\"	\n"
503 			"	.align 4			\n"
504 			"	.word   17b,4b			\n"
505 			"	.previous			\n"
506 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
507 			:
508 			: "memory");
509 		}
510 		if (orig_n & 1) {
511 			__asm__ __volatile__(
512 			"	ldb.ab  %3, [%2,1]		\n"
513 			"18:	stb.ab  %3, [%1,1]		\n"
514 			"	sub     %0, %0, 1		\n"
515 			"34:	;nop				\n"
516 			"	.section .fixup, \"ax\"		\n"
517 			"	.align 4			\n"
518 			"4:	j   34b				\n"
519 			"	.previous			\n"
520 			"	.section __ex_table, \"a\"	\n"
521 			"	.align 4			\n"
522 			"	.word   18b,4b			\n"
523 			"	.previous			\n"
524 			: "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
525 			:
526 			: "memory");
527 		}
528 	} else {  /* n is NOT constant, so laddered copy of 16x,8,4,2,1  */
529 
530 		__asm__ __volatile__(
531 		"	mov   %0,%3			\n"
532 		"	lsr.f lp_count, %3,4		\n"  /* 16x bytes */
533 		"	lpnz  3f			\n"
534 		"	ld.ab %5, [%2, 4]		\n"
535 		"	ld.ab %6, [%2, 4]		\n"
536 		"	ld.ab %7, [%2, 4]		\n"
537 		"	ld.ab %8, [%2, 4]		\n"
538 		"1:	st.ab %5, [%1, 4]		\n"
539 		"11:	st.ab %6, [%1, 4]		\n"
540 		"12:	st.ab %7, [%1, 4]		\n"
541 		"13:	st.ab %8, [%1, 4]		\n"
542 		"	sub   %0, %0, 16		\n"
543 		"3:	and.f %3,%3,0xf			\n" /* stragglers */
544 		"	bz 34f				\n"
545 		"	bbit0   %3,3,31f		\n" /* 8 bytes left */
546 		"	ld.ab   %5, [%2,4]		\n"
547 		"	ld.ab   %6, [%2,4]		\n"
548 		"14:	st.ab   %5, [%1,4]		\n"
549 		"15:	st.ab   %6, [%1,4]		\n"
550 		"	sub.f   %0, %0, 8		\n"
551 		"31:	bbit0   %3,2,32f		\n"  /* 4 bytes left */
552 		"	ld.ab   %5, [%2,4]		\n"
553 		"16:	st.ab   %5, [%1,4]		\n"
554 		"	sub.f   %0, %0, 4		\n"
555 		"32:	bbit0 %3,1,33f			\n"  /* 2 bytes left */
556 		"	ldw.ab    %5, [%2,2]		\n"
557 		"17:	stw.ab    %5, [%1,2]		\n"
558 		"	sub.f %0, %0, 2			\n"
559 		"33:	bbit0 %3,0,34f			\n"
560 		"	ldb.ab    %5, [%2,1]		\n"  /* 1 byte left */
561 		"18:	stb.ab  %5, [%1,1]		\n"
562 		"	sub.f %0, %0, 1			\n"
563 		"34:	;nop				\n"
564 		"	.section .fixup, \"ax\"		\n"
565 		"	.align 4			\n"
566 		"4:	j   34b				\n"
567 		"	.previous			\n"
568 		"	.section __ex_table, \"a\"	\n"
569 		"	.align 4			\n"
570 		"	.word   1b, 4b			\n"
571 		"	.word   11b,4b			\n"
572 		"	.word   12b,4b			\n"
573 		"	.word   13b,4b			\n"
574 		"	.word   14b,4b			\n"
575 		"	.word   15b,4b			\n"
576 		"	.word   16b,4b			\n"
577 		"	.word   17b,4b			\n"
578 		"	.word   18b,4b			\n"
579 		"	.previous			\n"
580 		: "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
581 		  "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
582 		:
583 		: "lp_count", "memory");
584 	}
585 
586 	return res;
587 }
588 
__clear_user(void __user * to,unsigned long n)589 static inline unsigned long __clear_user(void __user *to, unsigned long n)
590 {
591 	long res = n;
592 	unsigned char *d_char = to;
593 
594 	__asm__ __volatile__(
595 	"	bbit0   %0, 0, 1f		\n"
596 	"75:	stb.ab  %2, [%0,1]		\n"
597 	"	sub %1, %1, 1			\n"
598 	"1:	bbit0   %0, 1, 2f		\n"
599 	"76:	stw.ab  %2, [%0,2]		\n"
600 	"	sub %1, %1, 2			\n"
601 	"2:	asr.f   lp_count, %1, 2		\n"
602 	"	lpnz    3f			\n"
603 	"77:	st.ab   %2, [%0,4]		\n"
604 	"	sub %1, %1, 4			\n"
605 	"3:	bbit0   %1, 1, 4f		\n"
606 	"78:	stw.ab  %2, [%0,2]		\n"
607 	"	sub %1, %1, 2			\n"
608 	"4:	bbit0   %1, 0, 5f		\n"
609 	"79:	stb.ab  %2, [%0,1]		\n"
610 	"	sub %1, %1, 1			\n"
611 	"5:					\n"
612 	"	.section .fixup, \"ax\"		\n"
613 	"	.align 4			\n"
614 	"3:	j   5b				\n"
615 	"	.previous			\n"
616 	"	.section __ex_table, \"a\"	\n"
617 	"	.align 4			\n"
618 	"	.word   75b, 3b			\n"
619 	"	.word   76b, 3b			\n"
620 	"	.word   77b, 3b			\n"
621 	"	.word   78b, 3b			\n"
622 	"	.word   79b, 3b			\n"
623 	"	.previous			\n"
624 	: "+r"(d_char), "+r"(res)
625 	: "i"(0)
626 	: "lp_count", "memory");
627 
628 	return res;
629 }
630 
631 #define INLINE_COPY_TO_USER
632 #define INLINE_COPY_FROM_USER
633 
634 #define __clear_user			__clear_user
635 
636 #include <asm-generic/uaccess.h>
637 
638 #endif
639