xref: /openbmc/linux/arch/m68k/include/asm/uaccess.h (revision 12700c17)
1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2a27bc11fSGreg Ungerer #ifndef __M68K_UACCESS_H
3a27bc11fSGreg Ungerer #define __M68K_UACCESS_H
4a27bc11fSGreg Ungerer 
5a27bc11fSGreg Ungerer #ifdef CONFIG_MMU
6a27bc11fSGreg Ungerer 
7a27bc11fSGreg Ungerer /*
8a27bc11fSGreg Ungerer  * User space memory access functions
9a27bc11fSGreg Ungerer  */
10a27bc11fSGreg Ungerer #include <linux/compiler.h>
11a27bc11fSGreg Ungerer #include <linux/types.h>
1268acfdcbSAl Viro #include <asm/extable.h>
13*12700c17SArnd Bergmann #include <asm-generic/access_ok.h>
14a27bc11fSGreg Ungerer 
15a27bc11fSGreg Ungerer /*
16a27bc11fSGreg Ungerer  * Not all varients of the 68k family support the notion of address spaces.
17a27bc11fSGreg Ungerer  * The traditional 680x0 parts do, and they use the sfc/dfc registers and
18a27bc11fSGreg Ungerer  * the "moves" instruction to access user space from kernel space. Other
19a27bc11fSGreg Ungerer  * family members like ColdFire don't support this, and only have a single
20a27bc11fSGreg Ungerer  * address space, and use the usual "move" instruction for user space access.
21a27bc11fSGreg Ungerer  *
22a27bc11fSGreg Ungerer  * Outside of this difference the user space access functions are the same.
23a27bc11fSGreg Ungerer  * So lets keep the code simple and just define in what we need to use.
24a27bc11fSGreg Ungerer  */
25a27bc11fSGreg Ungerer #ifdef CONFIG_CPU_HAS_ADDRESS_SPACES
26a27bc11fSGreg Ungerer #define	MOVES	"moves"
27a27bc11fSGreg Ungerer #else
28a27bc11fSGreg Ungerer #define	MOVES	"move"
29a27bc11fSGreg Ungerer #endif
30a27bc11fSGreg Ungerer 
318ade8339SChristoph Hellwig #define __put_user_asm(inst, res, x, ptr, bwl, reg, err) \
32a27bc11fSGreg Ungerer asm volatile ("\n"					\
338ade8339SChristoph Hellwig 	"1:	"inst"."#bwl"	%2,%1\n"		\
34a27bc11fSGreg Ungerer 	"2:\n"						\
35a27bc11fSGreg Ungerer 	"	.section .fixup,\"ax\"\n"		\
36a27bc11fSGreg Ungerer 	"	.even\n"				\
37a27bc11fSGreg Ungerer 	"10:	moveq.l	%3,%0\n"			\
38a27bc11fSGreg Ungerer 	"	jra 2b\n"				\
39a27bc11fSGreg Ungerer 	"	.previous\n"				\
40a27bc11fSGreg Ungerer 	"\n"						\
41a27bc11fSGreg Ungerer 	"	.section __ex_table,\"a\"\n"		\
42a27bc11fSGreg Ungerer 	"	.align	4\n"				\
43a27bc11fSGreg Ungerer 	"	.long	1b,10b\n"			\
44a27bc11fSGreg Ungerer 	"	.long	2b,10b\n"			\
45a27bc11fSGreg Ungerer 	"	.previous"				\
46a27bc11fSGreg Ungerer 	: "+d" (res), "=m" (*(ptr))			\
47a27bc11fSGreg Ungerer 	: #reg (x), "i" (err))
48a27bc11fSGreg Ungerer 
498ade8339SChristoph Hellwig #define __put_user_asm8(inst, res, x, ptr)			\
5001eec1afSChristoph Hellwig do {								\
5101eec1afSChristoph Hellwig 	const void *__pu_ptr = (const void __force *)(ptr);	\
5201eec1afSChristoph Hellwig 								\
5301eec1afSChristoph Hellwig 	asm volatile ("\n"					\
548ade8339SChristoph Hellwig 		"1:	"inst".l %2,(%1)+\n"			\
558ade8339SChristoph Hellwig 		"2:	"inst".l %R2,(%1)\n"			\
5601eec1afSChristoph Hellwig 		"3:\n"						\
5701eec1afSChristoph Hellwig 		"	.section .fixup,\"ax\"\n"		\
5801eec1afSChristoph Hellwig 		"	.even\n"				\
5901eec1afSChristoph Hellwig 		"10:	movel %3,%0\n"				\
6001eec1afSChristoph Hellwig 		"	jra 3b\n"				\
6101eec1afSChristoph Hellwig 		"	.previous\n"				\
6201eec1afSChristoph Hellwig 		"\n"						\
6301eec1afSChristoph Hellwig 		"	.section __ex_table,\"a\"\n"		\
6401eec1afSChristoph Hellwig 		"	.align 4\n"				\
6501eec1afSChristoph Hellwig 		"	.long 1b,10b\n"				\
6601eec1afSChristoph Hellwig 		"	.long 2b,10b\n"				\
6701eec1afSChristoph Hellwig 		"	.long 3b,10b\n"				\
6801eec1afSChristoph Hellwig 		"	.previous"				\
6901eec1afSChristoph Hellwig 		: "+d" (res), "+a" (__pu_ptr)			\
7001eec1afSChristoph Hellwig 		: "r" (x), "i" (-EFAULT)			\
7101eec1afSChristoph Hellwig 		: "memory");					\
7201eec1afSChristoph Hellwig } while (0)
7301eec1afSChristoph Hellwig 
74a27bc11fSGreg Ungerer /*
75a27bc11fSGreg Ungerer  * These are the main single-value transfer routines.  They automatically
76a27bc11fSGreg Ungerer  * use the right size if we just have the right pointer type.
77a27bc11fSGreg Ungerer  */
78a27bc11fSGreg Ungerer 
79a27bc11fSGreg Ungerer #define __put_user(x, ptr)						\
80a27bc11fSGreg Ungerer ({									\
81a27bc11fSGreg Ungerer 	typeof(*(ptr)) __pu_val = (x);					\
82a27bc11fSGreg Ungerer 	int __pu_err = 0;						\
83a27bc11fSGreg Ungerer 	__chk_user_ptr(ptr);						\
84a27bc11fSGreg Ungerer 	switch (sizeof (*(ptr))) {					\
85a27bc11fSGreg Ungerer 	case 1:								\
868ade8339SChristoph Hellwig 		__put_user_asm(MOVES, __pu_err, __pu_val, ptr, b, d, -EFAULT); \
87a27bc11fSGreg Ungerer 		break;							\
88a27bc11fSGreg Ungerer 	case 2:								\
898ade8339SChristoph Hellwig 		__put_user_asm(MOVES, __pu_err, __pu_val, ptr, w, r, -EFAULT); \
90a27bc11fSGreg Ungerer 		break;							\
91a27bc11fSGreg Ungerer 	case 4:								\
928ade8339SChristoph Hellwig 		__put_user_asm(MOVES, __pu_err, __pu_val, ptr, l, r, -EFAULT); \
93a27bc11fSGreg Ungerer 		break;							\
94a27bc11fSGreg Ungerer 	case 8:								\
958ade8339SChristoph Hellwig 		__put_user_asm8(MOVES, __pu_err, __pu_val, ptr);	\
96a27bc11fSGreg Ungerer 		break;							\
97a27bc11fSGreg Ungerer 	default:							\
9825d2cae4SChristoph Hellwig 		BUILD_BUG();						\
99a27bc11fSGreg Ungerer 	}								\
100a27bc11fSGreg Ungerer 	__pu_err;							\
101a27bc11fSGreg Ungerer })
102a27bc11fSGreg Ungerer #define put_user(x, ptr)	__put_user(x, ptr)
103a27bc11fSGreg Ungerer 
104a27bc11fSGreg Ungerer 
1058ade8339SChristoph Hellwig #define __get_user_asm(inst, res, x, ptr, type, bwl, reg, err) ({	\
106a27bc11fSGreg Ungerer 	type __gu_val;							\
107a27bc11fSGreg Ungerer 	asm volatile ("\n"						\
1088ade8339SChristoph Hellwig 		"1:	"inst"."#bwl"	%2,%1\n"			\
109a27bc11fSGreg Ungerer 		"2:\n"							\
110a27bc11fSGreg Ungerer 		"	.section .fixup,\"ax\"\n"			\
111a27bc11fSGreg Ungerer 		"	.even\n"					\
112a27bc11fSGreg Ungerer 		"10:	move.l	%3,%0\n"				\
113a27bc11fSGreg Ungerer 		"	sub.l	%1,%1\n"				\
114a27bc11fSGreg Ungerer 		"	jra	2b\n"					\
115a27bc11fSGreg Ungerer 		"	.previous\n"					\
116a27bc11fSGreg Ungerer 		"\n"							\
117a27bc11fSGreg Ungerer 		"	.section __ex_table,\"a\"\n"			\
118a27bc11fSGreg Ungerer 		"	.align	4\n"					\
119a27bc11fSGreg Ungerer 		"	.long	1b,10b\n"				\
120a27bc11fSGreg Ungerer 		"	.previous"					\
121a27bc11fSGreg Ungerer 		: "+d" (res), "=&" #reg (__gu_val)			\
122a27bc11fSGreg Ungerer 		: "m" (*(ptr)), "i" (err));				\
123a27bc11fSGreg Ungerer 	(x) = (__force typeof(*(ptr)))(__force unsigned long)__gu_val;	\
124a27bc11fSGreg Ungerer })
125a27bc11fSGreg Ungerer 
1268ade8339SChristoph Hellwig #define __get_user_asm8(inst, res, x, ptr) 				\
12701eec1afSChristoph Hellwig do {									\
12801eec1afSChristoph Hellwig 	const void *__gu_ptr = (const void __force *)(ptr);		\
129a27bc11fSGreg Ungerer 	union {								\
130a27bc11fSGreg Ungerer 		u64 l;							\
131a27bc11fSGreg Ungerer 		__typeof__(*(ptr)) t;					\
132a27bc11fSGreg Ungerer 	} __gu_val;							\
13301eec1afSChristoph Hellwig 									\
134a27bc11fSGreg Ungerer 	asm volatile ("\n"						\
1358ade8339SChristoph Hellwig 		"1:	"inst".l (%2)+,%1\n"				\
1368ade8339SChristoph Hellwig 		"2:	"inst".l (%2),%R1\n"				\
137a27bc11fSGreg Ungerer 		"3:\n"							\
138a27bc11fSGreg Ungerer 		"	.section .fixup,\"ax\"\n"			\
139a27bc11fSGreg Ungerer 		"	.even\n"					\
140a27bc11fSGreg Ungerer 		"10:	move.l	%3,%0\n"				\
141a27bc11fSGreg Ungerer 		"	sub.l	%1,%1\n"				\
142a27bc11fSGreg Ungerer 		"	sub.l	%R1,%R1\n"				\
143a27bc11fSGreg Ungerer 		"	jra	3b\n"					\
144a27bc11fSGreg Ungerer 		"	.previous\n"					\
145a27bc11fSGreg Ungerer 		"\n"							\
146a27bc11fSGreg Ungerer 		"	.section __ex_table,\"a\"\n"			\
147a27bc11fSGreg Ungerer 		"	.align	4\n"					\
148a27bc11fSGreg Ungerer 		"	.long	1b,10b\n"				\
149a27bc11fSGreg Ungerer 		"	.long	2b,10b\n"				\
150a27bc11fSGreg Ungerer 		"	.previous"					\
15101eec1afSChristoph Hellwig 		: "+d" (res), "=&r" (__gu_val.l),			\
152a27bc11fSGreg Ungerer 		  "+a" (__gu_ptr)					\
153a27bc11fSGreg Ungerer 		: "i" (-EFAULT)						\
154a27bc11fSGreg Ungerer 		: "memory");						\
155a27bc11fSGreg Ungerer 	(x) = __gu_val.t;						\
15601eec1afSChristoph Hellwig } while (0)
15701eec1afSChristoph Hellwig 
15801eec1afSChristoph Hellwig #define __get_user(x, ptr)						\
15901eec1afSChristoph Hellwig ({									\
16001eec1afSChristoph Hellwig 	int __gu_err = 0;						\
16101eec1afSChristoph Hellwig 	__chk_user_ptr(ptr);						\
16201eec1afSChristoph Hellwig 	switch (sizeof(*(ptr))) {					\
16301eec1afSChristoph Hellwig 	case 1:								\
1648ade8339SChristoph Hellwig 		__get_user_asm(MOVES, __gu_err, x, ptr, u8, b, d, -EFAULT); \
165a27bc11fSGreg Ungerer 		break;							\
16601eec1afSChristoph Hellwig 	case 2:								\
1678ade8339SChristoph Hellwig 		__get_user_asm(MOVES, __gu_err, x, ptr, u16, w, r, -EFAULT); \
16801eec1afSChristoph Hellwig 		break;							\
16901eec1afSChristoph Hellwig 	case 4:								\
1708ade8339SChristoph Hellwig 		__get_user_asm(MOVES, __gu_err, x, ptr, u32, l, r, -EFAULT); \
17101eec1afSChristoph Hellwig 		break;							\
17201eec1afSChristoph Hellwig 	case 8:								\
1738ade8339SChristoph Hellwig 		__get_user_asm8(MOVES, __gu_err, x, ptr);		\
17401eec1afSChristoph Hellwig 		break;							\
175a27bc11fSGreg Ungerer 	default:							\
17625d2cae4SChristoph Hellwig 		BUILD_BUG();						\
177a27bc11fSGreg Ungerer 	}								\
178a27bc11fSGreg Ungerer 	__gu_err;							\
179a27bc11fSGreg Ungerer })
180a27bc11fSGreg Ungerer #define get_user(x, ptr) __get_user(x, ptr)
181a27bc11fSGreg Ungerer 
182a27bc11fSGreg Ungerer unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
183a27bc11fSGreg Ungerer unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
184a27bc11fSGreg Ungerer 
185a27bc11fSGreg Ungerer #define __suffix0
186a27bc11fSGreg Ungerer #define __suffix1 b
187a27bc11fSGreg Ungerer #define __suffix2 w
188a27bc11fSGreg Ungerer #define __suffix4 l
189a27bc11fSGreg Ungerer 
190a27bc11fSGreg Ungerer #define ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
191a27bc11fSGreg Ungerer 	asm volatile ("\n"						\
192a27bc11fSGreg Ungerer 		"1:	"MOVES"."#s1"	(%2)+,%3\n"			\
193a27bc11fSGreg Ungerer 		"	move."#s1"	%3,(%1)+\n"			\
194a27bc11fSGreg Ungerer 		"	.ifnc	\""#s2"\",\"\"\n"			\
195a27bc11fSGreg Ungerer 		"2:	"MOVES"."#s2"	(%2)+,%3\n"			\
196a27bc11fSGreg Ungerer 		"	move."#s2"	%3,(%1)+\n"			\
197a27bc11fSGreg Ungerer 		"	.ifnc	\""#s3"\",\"\"\n"			\
198a27bc11fSGreg Ungerer 		"3:	"MOVES"."#s3"	(%2)+,%3\n"			\
199a27bc11fSGreg Ungerer 		"	move."#s3"	%3,(%1)+\n"			\
200a27bc11fSGreg Ungerer 		"	.endif\n"					\
201a27bc11fSGreg Ungerer 		"	.endif\n"					\
202a27bc11fSGreg Ungerer 		"4:\n"							\
203a27bc11fSGreg Ungerer 		"	.section __ex_table,\"a\"\n"			\
204a27bc11fSGreg Ungerer 		"	.align	4\n"					\
205a27bc11fSGreg Ungerer 		"	.long	1b,10f\n"				\
206a27bc11fSGreg Ungerer 		"	.ifnc	\""#s2"\",\"\"\n"			\
207a27bc11fSGreg Ungerer 		"	.long	2b,20f\n"				\
208a27bc11fSGreg Ungerer 		"	.ifnc	\""#s3"\",\"\"\n"			\
209a27bc11fSGreg Ungerer 		"	.long	3b,30f\n"				\
210a27bc11fSGreg Ungerer 		"	.endif\n"					\
211a27bc11fSGreg Ungerer 		"	.endif\n"					\
212a27bc11fSGreg Ungerer 		"	.previous\n"					\
213a27bc11fSGreg Ungerer 		"\n"							\
214a27bc11fSGreg Ungerer 		"	.section .fixup,\"ax\"\n"			\
215a27bc11fSGreg Ungerer 		"	.even\n"					\
216a27bc11fSGreg Ungerer 		"10:	addq.l #"#n1",%0\n"				\
217a27bc11fSGreg Ungerer 		"	.ifnc	\""#s2"\",\"\"\n"			\
218a27bc11fSGreg Ungerer 		"20:	addq.l #"#n2",%0\n"				\
219a27bc11fSGreg Ungerer 		"	.ifnc	\""#s3"\",\"\"\n"			\
220a27bc11fSGreg Ungerer 		"30:	addq.l #"#n3",%0\n"				\
221a27bc11fSGreg Ungerer 		"	.endif\n"					\
222a27bc11fSGreg Ungerer 		"	.endif\n"					\
223a27bc11fSGreg Ungerer 		"	jra	4b\n"					\
224a27bc11fSGreg Ungerer 		"	.previous\n"					\
225a27bc11fSGreg Ungerer 		: "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp)	\
226a27bc11fSGreg Ungerer 		: : "memory")
227a27bc11fSGreg Ungerer 
228a27bc11fSGreg Ungerer #define ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
229a27bc11fSGreg Ungerer 	____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)
230a27bc11fSGreg Ungerer #define __constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3)	\
231a27bc11fSGreg Ungerer 	___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3,  \
232a27bc11fSGreg Ungerer 					__suffix##n1, __suffix##n2, __suffix##n3)
233a27bc11fSGreg Ungerer 
234a27bc11fSGreg Ungerer static __always_inline unsigned long
__constant_copy_from_user(void * to,const void __user * from,unsigned long n)235a27bc11fSGreg Ungerer __constant_copy_from_user(void *to, const void __user *from, unsigned long n)
236a27bc11fSGreg Ungerer {
237a27bc11fSGreg Ungerer 	unsigned long res = 0, tmp;
238a27bc11fSGreg Ungerer 
239a27bc11fSGreg Ungerer 	switch (n) {
240a27bc11fSGreg Ungerer 	case 1:
241a27bc11fSGreg Ungerer 		__constant_copy_from_user_asm(res, to, from, tmp, 1, 0, 0);
242a27bc11fSGreg Ungerer 		break;
243a27bc11fSGreg Ungerer 	case 2:
244a27bc11fSGreg Ungerer 		__constant_copy_from_user_asm(res, to, from, tmp, 2, 0, 0);
245a27bc11fSGreg Ungerer 		break;
246a27bc11fSGreg Ungerer 	case 3:
247a27bc11fSGreg Ungerer 		__constant_copy_from_user_asm(res, to, from, tmp, 2, 1, 0);
248a27bc11fSGreg Ungerer 		break;
249a27bc11fSGreg Ungerer 	case 4:
250a27bc11fSGreg Ungerer 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 0, 0);
251a27bc11fSGreg Ungerer 		break;
252a27bc11fSGreg Ungerer 	case 5:
253a27bc11fSGreg Ungerer 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 1, 0);
254a27bc11fSGreg Ungerer 		break;
255a27bc11fSGreg Ungerer 	case 6:
256a27bc11fSGreg Ungerer 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 0);
257a27bc11fSGreg Ungerer 		break;
258a27bc11fSGreg Ungerer 	case 7:
259a27bc11fSGreg Ungerer 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 1);
260a27bc11fSGreg Ungerer 		break;
261a27bc11fSGreg Ungerer 	case 8:
262a27bc11fSGreg Ungerer 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 0);
263a27bc11fSGreg Ungerer 		break;
264a27bc11fSGreg Ungerer 	case 9:
265a27bc11fSGreg Ungerer 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 1);
266a27bc11fSGreg Ungerer 		break;
267a27bc11fSGreg Ungerer 	case 10:
268a27bc11fSGreg Ungerer 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 2);
269a27bc11fSGreg Ungerer 		break;
270a27bc11fSGreg Ungerer 	case 12:
271a27bc11fSGreg Ungerer 		__constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 4);
272a27bc11fSGreg Ungerer 		break;
273a27bc11fSGreg Ungerer 	default:
274a27bc11fSGreg Ungerer 		/* we limit the inlined version to 3 moves */
275a27bc11fSGreg Ungerer 		return __generic_copy_from_user(to, from, n);
276a27bc11fSGreg Ungerer 	}
277a27bc11fSGreg Ungerer 
278a27bc11fSGreg Ungerer 	return res;
279a27bc11fSGreg Ungerer }
280a27bc11fSGreg Ungerer 
281a27bc11fSGreg Ungerer #define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3)	\
282a27bc11fSGreg Ungerer 	asm volatile ("\n"						\
283a27bc11fSGreg Ungerer 		"	move."#s1"	(%2)+,%3\n"			\
284a27bc11fSGreg Ungerer 		"11:	"MOVES"."#s1"	%3,(%1)+\n"			\
285a27bc11fSGreg Ungerer 		"12:	move."#s2"	(%2)+,%3\n"			\
286a27bc11fSGreg Ungerer 		"21:	"MOVES"."#s2"	%3,(%1)+\n"			\
287a27bc11fSGreg Ungerer 		"22:\n"							\
288a27bc11fSGreg Ungerer 		"	.ifnc	\""#s3"\",\"\"\n"			\
289a27bc11fSGreg Ungerer 		"	move."#s3"	(%2)+,%3\n"			\
290a27bc11fSGreg Ungerer 		"31:	"MOVES"."#s3"	%3,(%1)+\n"			\
291a27bc11fSGreg Ungerer 		"32:\n"							\
292a27bc11fSGreg Ungerer 		"	.endif\n"					\
293a27bc11fSGreg Ungerer 		"4:\n"							\
294a27bc11fSGreg Ungerer 		"\n"							\
295a27bc11fSGreg Ungerer 		"	.section __ex_table,\"a\"\n"			\
296a27bc11fSGreg Ungerer 		"	.align	4\n"					\
297a27bc11fSGreg Ungerer 		"	.long	11b,5f\n"				\
298a27bc11fSGreg Ungerer 		"	.long	12b,5f\n"				\
299a27bc11fSGreg Ungerer 		"	.long	21b,5f\n"				\
300a27bc11fSGreg Ungerer 		"	.long	22b,5f\n"				\
301a27bc11fSGreg Ungerer 		"	.ifnc	\""#s3"\",\"\"\n"			\
302a27bc11fSGreg Ungerer 		"	.long	31b,5f\n"				\
303a27bc11fSGreg Ungerer 		"	.long	32b,5f\n"				\
304a27bc11fSGreg Ungerer 		"	.endif\n"					\
305a27bc11fSGreg Ungerer 		"	.previous\n"					\
306a27bc11fSGreg Ungerer 		"\n"							\
307a27bc11fSGreg Ungerer 		"	.section .fixup,\"ax\"\n"			\
308a27bc11fSGreg Ungerer 		"	.even\n"					\
309a27bc11fSGreg Ungerer 		"5:	moveq.l	#"#n",%0\n"				\
310a27bc11fSGreg Ungerer 		"	jra	4b\n"					\
311a27bc11fSGreg Ungerer 		"	.previous\n"					\
312a27bc11fSGreg Ungerer 		: "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp)	\
313a27bc11fSGreg Ungerer 		: : "memory")
314a27bc11fSGreg Ungerer 
315a27bc11fSGreg Ungerer static __always_inline unsigned long
__constant_copy_to_user(void __user * to,const void * from,unsigned long n)316a27bc11fSGreg Ungerer __constant_copy_to_user(void __user *to, const void *from, unsigned long n)
317a27bc11fSGreg Ungerer {
318a27bc11fSGreg Ungerer 	unsigned long res = 0, tmp;
319a27bc11fSGreg Ungerer 
320a27bc11fSGreg Ungerer 	switch (n) {
321a27bc11fSGreg Ungerer 	case 1:
3228ade8339SChristoph Hellwig 		__put_user_asm(MOVES, res, *(u8 *)from, (u8 __user *)to,
3238ade8339SChristoph Hellwig 				b, d, 1);
324a27bc11fSGreg Ungerer 		break;
325a27bc11fSGreg Ungerer 	case 2:
3268ade8339SChristoph Hellwig 		__put_user_asm(MOVES, res, *(u16 *)from, (u16 __user *)to,
3278ade8339SChristoph Hellwig 				w, r, 2);
328a27bc11fSGreg Ungerer 		break;
329a27bc11fSGreg Ungerer 	case 3:
330a27bc11fSGreg Ungerer 		__constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
331a27bc11fSGreg Ungerer 		break;
332a27bc11fSGreg Ungerer 	case 4:
3338ade8339SChristoph Hellwig 		__put_user_asm(MOVES, res, *(u32 *)from, (u32 __user *)to,
3348ade8339SChristoph Hellwig 				l, r, 4);
335a27bc11fSGreg Ungerer 		break;
336a27bc11fSGreg Ungerer 	case 5:
337a27bc11fSGreg Ungerer 		__constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
338a27bc11fSGreg Ungerer 		break;
339a27bc11fSGreg Ungerer 	case 6:
340a27bc11fSGreg Ungerer 		__constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
341a27bc11fSGreg Ungerer 		break;
342a27bc11fSGreg Ungerer 	case 7:
343a27bc11fSGreg Ungerer 		__constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
344a27bc11fSGreg Ungerer 		break;
345a27bc11fSGreg Ungerer 	case 8:
346a27bc11fSGreg Ungerer 		__constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
347a27bc11fSGreg Ungerer 		break;
348a27bc11fSGreg Ungerer 	case 9:
349a27bc11fSGreg Ungerer 		__constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
350a27bc11fSGreg Ungerer 		break;
351a27bc11fSGreg Ungerer 	case 10:
352a27bc11fSGreg Ungerer 		__constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
353a27bc11fSGreg Ungerer 		break;
354a27bc11fSGreg Ungerer 	case 12:
355a27bc11fSGreg Ungerer 		__constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
356a27bc11fSGreg Ungerer 		break;
357a27bc11fSGreg Ungerer 	default:
358a27bc11fSGreg Ungerer 		/* limit the inlined version to 3 moves */
359a27bc11fSGreg Ungerer 		return __generic_copy_to_user(to, from, n);
360a27bc11fSGreg Ungerer 	}
361a27bc11fSGreg Ungerer 
362a27bc11fSGreg Ungerer 	return res;
363a27bc11fSGreg Ungerer }
364a27bc11fSGreg Ungerer 
365a27bc11fSGreg Ungerer static inline unsigned long
raw_copy_from_user(void * to,const void __user * from,unsigned long n)366a27bc11fSGreg Ungerer raw_copy_from_user(void *to, const void __user *from, unsigned long n)
367a27bc11fSGreg Ungerer {
368a27bc11fSGreg Ungerer 	if (__builtin_constant_p(n))
369a27bc11fSGreg Ungerer 		return __constant_copy_from_user(to, from, n);
370a27bc11fSGreg Ungerer 	return __generic_copy_from_user(to, from, n);
371a27bc11fSGreg Ungerer }
372a27bc11fSGreg Ungerer 
373a27bc11fSGreg Ungerer static inline unsigned long
raw_copy_to_user(void __user * to,const void * from,unsigned long n)374a27bc11fSGreg Ungerer raw_copy_to_user(void __user *to, const void *from, unsigned long n)
375a27bc11fSGreg Ungerer {
376a27bc11fSGreg Ungerer 	if (__builtin_constant_p(n))
377a27bc11fSGreg Ungerer 		return __constant_copy_to_user(to, from, n);
378a27bc11fSGreg Ungerer 	return __generic_copy_to_user(to, from, n);
379a27bc11fSGreg Ungerer }
380a27bc11fSGreg Ungerer #define INLINE_COPY_FROM_USER
381a27bc11fSGreg Ungerer #define INLINE_COPY_TO_USER
382a27bc11fSGreg Ungerer 
3838ade8339SChristoph Hellwig #define __get_kernel_nofault(dst, src, type, err_label)			\
3848ade8339SChristoph Hellwig do {									\
3858ade8339SChristoph Hellwig 	type *__gk_dst = (type *)(dst);					\
3868ade8339SChristoph Hellwig 	type *__gk_src = (type *)(src);					\
3878ade8339SChristoph Hellwig 	int __gk_err = 0;						\
3888ade8339SChristoph Hellwig 									\
3898ade8339SChristoph Hellwig 	switch (sizeof(type)) {						\
3908ade8339SChristoph Hellwig 	case 1:								\
3918ade8339SChristoph Hellwig 		__get_user_asm("move", __gk_err, *__gk_dst, __gk_src,	\
3928ade8339SChristoph Hellwig 				u8, b, d, -EFAULT);			\
3938ade8339SChristoph Hellwig 		break;							\
3948ade8339SChristoph Hellwig 	case 2:								\
3958ade8339SChristoph Hellwig 		__get_user_asm("move", __gk_err, *__gk_dst, __gk_src,	\
3968ade8339SChristoph Hellwig 				u16, w, r, -EFAULT);			\
3978ade8339SChristoph Hellwig 		break;							\
3988ade8339SChristoph Hellwig 	case 4:								\
3998ade8339SChristoph Hellwig 		__get_user_asm("move", __gk_err, *__gk_dst, __gk_src,	\
4008ade8339SChristoph Hellwig 				u32, l, r, -EFAULT);			\
4018ade8339SChristoph Hellwig 		break;							\
4028ade8339SChristoph Hellwig 	case 8:								\
4038ade8339SChristoph Hellwig 		__get_user_asm8("move", __gk_err, *__gk_dst, __gk_src);	\
4048ade8339SChristoph Hellwig 		break;							\
4058ade8339SChristoph Hellwig 	default:							\
4068ade8339SChristoph Hellwig 		BUILD_BUG();						\
4078ade8339SChristoph Hellwig 	}								\
4088ade8339SChristoph Hellwig 	if (unlikely(__gk_err))						\
4098ade8339SChristoph Hellwig 		goto err_label;						\
4108ade8339SChristoph Hellwig } while (0)
4118ade8339SChristoph Hellwig 
4128ade8339SChristoph Hellwig #define __put_kernel_nofault(dst, src, type, err_label)			\
4138ade8339SChristoph Hellwig do {									\
4148ade8339SChristoph Hellwig 	type __pk_src = *(type *)(src);					\
4158ade8339SChristoph Hellwig 	type *__pk_dst = (type *)(dst);					\
4168ade8339SChristoph Hellwig 	int __pk_err = 0;						\
4178ade8339SChristoph Hellwig 									\
4188ade8339SChristoph Hellwig 	switch (sizeof(type)) {						\
4198ade8339SChristoph Hellwig 	case 1:								\
4208ade8339SChristoph Hellwig 		__put_user_asm("move", __pk_err, __pk_src, __pk_dst,	\
4218ade8339SChristoph Hellwig 				b, d, -EFAULT);				\
4228ade8339SChristoph Hellwig 		break;							\
4238ade8339SChristoph Hellwig 	case 2:								\
4248ade8339SChristoph Hellwig 		__put_user_asm("move", __pk_err, __pk_src, __pk_dst,	\
4258ade8339SChristoph Hellwig 				w, r, -EFAULT);				\
4268ade8339SChristoph Hellwig 		break;							\
4278ade8339SChristoph Hellwig 	case 4:								\
4288ade8339SChristoph Hellwig 		__put_user_asm("move", __pk_err, __pk_src, __pk_dst,	\
4298ade8339SChristoph Hellwig 				l, r, -EFAULT);				\
4308ade8339SChristoph Hellwig 		break;							\
4318ade8339SChristoph Hellwig 	case 8:								\
4328ade8339SChristoph Hellwig 		__put_user_asm8("move", __pk_err, __pk_src, __pk_dst);	\
4338ade8339SChristoph Hellwig 		break;							\
4348ade8339SChristoph Hellwig 	default:							\
4358ade8339SChristoph Hellwig 		BUILD_BUG();						\
4368ade8339SChristoph Hellwig 	}								\
4378ade8339SChristoph Hellwig 	if (unlikely(__pk_err))						\
4388ade8339SChristoph Hellwig 		goto err_label;						\
4398ade8339SChristoph Hellwig } while (0)
4408ade8339SChristoph Hellwig 
441a27bc11fSGreg Ungerer extern long strncpy_from_user(char *dst, const char __user *src, long count);
442a27bc11fSGreg Ungerer extern __must_check long strnlen_user(const char __user *str, long n);
443a27bc11fSGreg Ungerer 
444a27bc11fSGreg Ungerer unsigned long __clear_user(void __user *to, unsigned long n);
445a27bc11fSGreg Ungerer 
446a27bc11fSGreg Ungerer #define clear_user	__clear_user
447a27bc11fSGreg Ungerer 
448a27bc11fSGreg Ungerer #else /* !CONFIG_MMU */
449a27bc11fSGreg Ungerer #include <asm-generic/uaccess.h>
450a27bc11fSGreg Ungerer #endif
451a27bc11fSGreg Ungerer 
452a27bc11fSGreg Ungerer #endif /* _M68K_UACCESS_H */
453