xref: /openbmc/linux/include/asm-generic/uaccess.h (revision 4f2c0a4acffbec01079c28f839422e64ddeff004)
1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2eed417ddSArnd Bergmann #ifndef __ASM_GENERIC_UACCESS_H
3eed417ddSArnd Bergmann #define __ASM_GENERIC_UACCESS_H
4eed417ddSArnd Bergmann 
5eed417ddSArnd Bergmann /*
6eed417ddSArnd Bergmann  * User space memory access functions, these should work
7eed417ddSArnd Bergmann  * on any machine that has kernel and user data in the same
8eed417ddSArnd Bergmann  * address space, e.g. all NOMMU machines.
9eed417ddSArnd Bergmann  */
10eed417ddSArnd Bergmann #include <linux/string.h>
11*967747bbSArnd Bergmann #include <asm-generic/access_ok.h>
12eed417ddSArnd Bergmann 
13bd79f947SChristoph Hellwig #ifdef CONFIG_UACCESS_MEMCPY
14931de11fSChristoph Hellwig #include <asm/unaligned.h>
15931de11fSChristoph Hellwig 
160bcd0a2bSChristoph Hellwig static __always_inline int
__get_user_fn(size_t size,const void __user * from,void * to)170bcd0a2bSChristoph Hellwig __get_user_fn(size_t size, const void __user *from, void *to)
18931de11fSChristoph Hellwig {
19931de11fSChristoph Hellwig 	BUILD_BUG_ON(!__builtin_constant_p(size));
20931de11fSChristoph Hellwig 
21931de11fSChristoph Hellwig 	switch (size) {
22931de11fSChristoph Hellwig 	case 1:
23d40d8179SArnd Bergmann 		*(u8 *)to = *((u8 __force *)from);
24931de11fSChristoph Hellwig 		return 0;
25931de11fSChristoph Hellwig 	case 2:
26931de11fSChristoph Hellwig 		*(u16 *)to = get_unaligned((u16 __force *)from);
27931de11fSChristoph Hellwig 		return 0;
28931de11fSChristoph Hellwig 	case 4:
29931de11fSChristoph Hellwig 		*(u32 *)to = get_unaligned((u32 __force *)from);
30931de11fSChristoph Hellwig 		return 0;
31931de11fSChristoph Hellwig 	case 8:
32931de11fSChristoph Hellwig 		*(u64 *)to = get_unaligned((u64 __force *)from);
33931de11fSChristoph Hellwig 		return 0;
34931de11fSChristoph Hellwig 	default:
35931de11fSChristoph Hellwig 		BUILD_BUG();
36931de11fSChristoph Hellwig 		return 0;
37931de11fSChristoph Hellwig 	}
38931de11fSChristoph Hellwig 
39931de11fSChristoph Hellwig }
40931de11fSChristoph Hellwig #define __get_user_fn(sz, u, k)	__get_user_fn(sz, u, k)
41931de11fSChristoph Hellwig 
420bcd0a2bSChristoph Hellwig static __always_inline int
__put_user_fn(size_t size,void __user * to,void * from)430bcd0a2bSChristoph Hellwig __put_user_fn(size_t size, void __user *to, void *from)
44931de11fSChristoph Hellwig {
45931de11fSChristoph Hellwig 	BUILD_BUG_ON(!__builtin_constant_p(size));
46931de11fSChristoph Hellwig 
47931de11fSChristoph Hellwig 	switch (size) {
48931de11fSChristoph Hellwig 	case 1:
49d40d8179SArnd Bergmann 		*(u8 __force *)to = *(u8 *)from;
50931de11fSChristoph Hellwig 		return 0;
51931de11fSChristoph Hellwig 	case 2:
52931de11fSChristoph Hellwig 		put_unaligned(*(u16 *)from, (u16 __force *)to);
53931de11fSChristoph Hellwig 		return 0;
54931de11fSChristoph Hellwig 	case 4:
55931de11fSChristoph Hellwig 		put_unaligned(*(u32 *)from, (u32 __force *)to);
56931de11fSChristoph Hellwig 		return 0;
57931de11fSChristoph Hellwig 	case 8:
58931de11fSChristoph Hellwig 		put_unaligned(*(u64 *)from, (u64 __force *)to);
59931de11fSChristoph Hellwig 		return 0;
60931de11fSChristoph Hellwig 	default:
61931de11fSChristoph Hellwig 		BUILD_BUG();
62931de11fSChristoph Hellwig 		return 0;
63931de11fSChristoph Hellwig 	}
64931de11fSChristoph Hellwig }
65931de11fSChristoph Hellwig #define __put_user_fn(sz, u, k)	__put_user_fn(sz, u, k)
66931de11fSChristoph Hellwig 
672d2d2554SChristoph Hellwig #define __get_kernel_nofault(dst, src, type, err_label)			\
682d2d2554SChristoph Hellwig do {									\
692d2d2554SChristoph Hellwig 	*((type *)dst) = get_unaligned((type *)(src));			\
702d2d2554SChristoph Hellwig 	if (0) /* make sure the label looks used to the compiler */	\
712d2d2554SChristoph Hellwig 		goto err_label;						\
722d2d2554SChristoph Hellwig } while (0)
732d2d2554SChristoph Hellwig 
742d2d2554SChristoph Hellwig #define __put_kernel_nofault(dst, src, type, err_label)			\
752d2d2554SChristoph Hellwig do {									\
762d2d2554SChristoph Hellwig 	put_unaligned(*((type *)src), (type *)(dst));			\
772d2d2554SChristoph Hellwig 	if (0) /* make sure the label looks used to the compiler */	\
782d2d2554SChristoph Hellwig 		goto err_label;						\
792d2d2554SChristoph Hellwig } while (0)
802d2d2554SChristoph Hellwig 
81bd79f947SChristoph Hellwig static inline __must_check unsigned long
raw_copy_from_user(void * to,const void __user * from,unsigned long n)82bd79f947SChristoph Hellwig raw_copy_from_user(void *to, const void __user * from, unsigned long n)
83bd79f947SChristoph Hellwig {
84bd79f947SChristoph Hellwig 	memcpy(to, (const void __force *)from, n);
85bd79f947SChristoph Hellwig 	return 0;
86bd79f947SChristoph Hellwig }
87bd79f947SChristoph Hellwig 
88bd79f947SChristoph Hellwig static inline __must_check unsigned long
raw_copy_to_user(void __user * to,const void * from,unsigned long n)89bd79f947SChristoph Hellwig raw_copy_to_user(void __user *to, const void *from, unsigned long n)
90bd79f947SChristoph Hellwig {
91bd79f947SChristoph Hellwig 	memcpy((void __force *)to, from, n);
92bd79f947SChristoph Hellwig 	return 0;
93bd79f947SChristoph Hellwig }
94bd79f947SChristoph Hellwig #define INLINE_COPY_FROM_USER
95bd79f947SChristoph Hellwig #define INLINE_COPY_TO_USER
96bd79f947SChristoph Hellwig #endif /* CONFIG_UACCESS_MEMCPY */
97bd79f947SChristoph Hellwig 
98eed417ddSArnd Bergmann /*
99eed417ddSArnd Bergmann  * These are the main single-value transfer routines.  They automatically
100eed417ddSArnd Bergmann  * use the right size if we just have the right pointer type.
101eed417ddSArnd Bergmann  * This version just falls back to copy_{from,to}_user, which should
102eed417ddSArnd Bergmann  * provide a fast-path for small values.
103eed417ddSArnd Bergmann  */
104eed417ddSArnd Bergmann #define __put_user(x, ptr) \
105eed417ddSArnd Bergmann ({								\
106eed417ddSArnd Bergmann 	__typeof__(*(ptr)) __x = (x);				\
107eed417ddSArnd Bergmann 	int __pu_err = -EFAULT;					\
108eed417ddSArnd Bergmann         __chk_user_ptr(ptr);                                    \
109eed417ddSArnd Bergmann 	switch (sizeof (*(ptr))) {				\
110eed417ddSArnd Bergmann 	case 1:							\
111eed417ddSArnd Bergmann 	case 2:							\
112eed417ddSArnd Bergmann 	case 4:							\
113eed417ddSArnd Bergmann 	case 8:							\
114eed417ddSArnd Bergmann 		__pu_err = __put_user_fn(sizeof (*(ptr)),	\
115eed417ddSArnd Bergmann 					 ptr, &__x);		\
116eed417ddSArnd Bergmann 		break;						\
117eed417ddSArnd Bergmann 	default:						\
118eed417ddSArnd Bergmann 		__put_user_bad();				\
119eed417ddSArnd Bergmann 		break;						\
120eed417ddSArnd Bergmann 	 }							\
121eed417ddSArnd Bergmann 	__pu_err;						\
122eed417ddSArnd Bergmann })
123eed417ddSArnd Bergmann 
124eed417ddSArnd Bergmann #define put_user(x, ptr)					\
125eed417ddSArnd Bergmann ({								\
1261985296aSAl Viro 	void __user *__p = (ptr);				\
127e0acd0bdSMichael S. Tsirkin 	might_fault();						\
12896d4f267SLinus Torvalds 	access_ok(__p, sizeof(*ptr)) ?		\
1291985296aSAl Viro 		__put_user((x), ((__typeof__(*(ptr)) __user *)__p)) :	\
130eed417ddSArnd Bergmann 		-EFAULT;					\
131eed417ddSArnd Bergmann })
132eed417ddSArnd Bergmann 
13305d88a49SVineet Gupta #ifndef __put_user_fn
13405d88a49SVineet Gupta 
__put_user_fn(size_t size,void __user * ptr,void * x)135eed417ddSArnd Bergmann static inline int __put_user_fn(size_t size, void __user *ptr, void *x)
136eed417ddSArnd Bergmann {
137d597580dSAl Viro 	return unlikely(raw_copy_to_user(ptr, x, size)) ? -EFAULT : 0;
138eed417ddSArnd Bergmann }
139eed417ddSArnd Bergmann 
14005d88a49SVineet Gupta #define __put_user_fn(sz, u, k)	__put_user_fn(sz, u, k)
14105d88a49SVineet Gupta 
14205d88a49SVineet Gupta #endif
14305d88a49SVineet Gupta 
144eed417ddSArnd Bergmann extern int __put_user_bad(void) __attribute__((noreturn));
145eed417ddSArnd Bergmann 
146eed417ddSArnd Bergmann #define __get_user(x, ptr)					\
147eed417ddSArnd Bergmann ({								\
148eed417ddSArnd Bergmann 	int __gu_err = -EFAULT;					\
149eed417ddSArnd Bergmann 	__chk_user_ptr(ptr);					\
150eed417ddSArnd Bergmann 	switch (sizeof(*(ptr))) {				\
151eed417ddSArnd Bergmann 	case 1: {						\
152c1aad8dcSAl Viro 		unsigned char __x = 0;				\
153eed417ddSArnd Bergmann 		__gu_err = __get_user_fn(sizeof (*(ptr)),	\
154eed417ddSArnd Bergmann 					 ptr, &__x);		\
155eed417ddSArnd Bergmann 		(x) = *(__force __typeof__(*(ptr)) *) &__x;	\
156eed417ddSArnd Bergmann 		break;						\
157eed417ddSArnd Bergmann 	};							\
158eed417ddSArnd Bergmann 	case 2: {						\
159c1aad8dcSAl Viro 		unsigned short __x = 0;				\
160eed417ddSArnd Bergmann 		__gu_err = __get_user_fn(sizeof (*(ptr)),	\
161eed417ddSArnd Bergmann 					 ptr, &__x);		\
162eed417ddSArnd Bergmann 		(x) = *(__force __typeof__(*(ptr)) *) &__x;	\
163eed417ddSArnd Bergmann 		break;						\
164eed417ddSArnd Bergmann 	};							\
165eed417ddSArnd Bergmann 	case 4: {						\
166c1aad8dcSAl Viro 		unsigned int __x = 0;				\
167eed417ddSArnd Bergmann 		__gu_err = __get_user_fn(sizeof (*(ptr)),	\
168eed417ddSArnd Bergmann 					 ptr, &__x);		\
169eed417ddSArnd Bergmann 		(x) = *(__force __typeof__(*(ptr)) *) &__x;	\
170eed417ddSArnd Bergmann 		break;						\
171eed417ddSArnd Bergmann 	};							\
172eed417ddSArnd Bergmann 	case 8: {						\
173c1aad8dcSAl Viro 		unsigned long long __x = 0;			\
174eed417ddSArnd Bergmann 		__gu_err = __get_user_fn(sizeof (*(ptr)),	\
175eed417ddSArnd Bergmann 					 ptr, &__x);		\
176eed417ddSArnd Bergmann 		(x) = *(__force __typeof__(*(ptr)) *) &__x;	\
177eed417ddSArnd Bergmann 		break;						\
178eed417ddSArnd Bergmann 	};							\
179eed417ddSArnd Bergmann 	default:						\
180eed417ddSArnd Bergmann 		__get_user_bad();				\
181eed417ddSArnd Bergmann 		break;						\
182eed417ddSArnd Bergmann 	}							\
183eed417ddSArnd Bergmann 	__gu_err;						\
184eed417ddSArnd Bergmann })
185eed417ddSArnd Bergmann 
186eed417ddSArnd Bergmann #define get_user(x, ptr)					\
187eed417ddSArnd Bergmann ({								\
1881985296aSAl Viro 	const void __user *__p = (ptr);				\
189e0acd0bdSMichael S. Tsirkin 	might_fault();						\
19096d4f267SLinus Torvalds 	access_ok(__p, sizeof(*ptr)) ?		\
1911985296aSAl Viro 		__get_user((x), (__typeof__(*(ptr)) __user *)__p) :\
1929ad18b75SAl Viro 		((x) = (__typeof__(*(ptr)))0,-EFAULT);		\
193eed417ddSArnd Bergmann })
194eed417ddSArnd Bergmann 
19505d88a49SVineet Gupta #ifndef __get_user_fn
__get_user_fn(size_t size,const void __user * ptr,void * x)196eed417ddSArnd Bergmann static inline int __get_user_fn(size_t size, const void __user *ptr, void *x)
197eed417ddSArnd Bergmann {
198d597580dSAl Viro 	return unlikely(raw_copy_from_user(x, ptr, size)) ? -EFAULT : 0;
199eed417ddSArnd Bergmann }
200eed417ddSArnd Bergmann 
20105d88a49SVineet Gupta #define __get_user_fn(sz, u, k)	__get_user_fn(sz, u, k)
20205d88a49SVineet Gupta 
20305d88a49SVineet Gupta #endif
20405d88a49SVineet Gupta 
205eed417ddSArnd Bergmann extern int __get_user_bad(void) __attribute__((noreturn));
206eed417ddSArnd Bergmann 
207eed417ddSArnd Bergmann /*
208eed417ddSArnd Bergmann  * Zero Userspace
209eed417ddSArnd Bergmann  */
210eed417ddSArnd Bergmann #ifndef __clear_user
211eed417ddSArnd Bergmann static inline __must_check unsigned long
__clear_user(void __user * to,unsigned long n)212eed417ddSArnd Bergmann __clear_user(void __user *to, unsigned long n)
213eed417ddSArnd Bergmann {
214eed417ddSArnd Bergmann 	memset((void __force *)to, 0, n);
215eed417ddSArnd Bergmann 	return 0;
216eed417ddSArnd Bergmann }
217eed417ddSArnd Bergmann #endif
218eed417ddSArnd Bergmann 
219eed417ddSArnd Bergmann static inline __must_check unsigned long
clear_user(void __user * to,unsigned long n)220eed417ddSArnd Bergmann clear_user(void __user *to, unsigned long n)
221eed417ddSArnd Bergmann {
222e0acd0bdSMichael S. Tsirkin 	might_fault();
22396d4f267SLinus Torvalds 	if (!access_ok(to, n))
224eed417ddSArnd Bergmann 		return n;
225eed417ddSArnd Bergmann 
226eed417ddSArnd Bergmann 	return __clear_user(to, n);
227eed417ddSArnd Bergmann }
228eed417ddSArnd Bergmann 
229aaa2e7acSAl Viro #include <asm/extable.h>
230aaa2e7acSAl Viro 
23198b861a3SArnd Bergmann __must_check long strncpy_from_user(char *dst, const char __user *src,
23298b861a3SArnd Bergmann 				    long count);
23398b861a3SArnd Bergmann __must_check long strnlen_user(const char __user *src, long n);
23498b861a3SArnd Bergmann 
235eed417ddSArnd Bergmann #endif /* __ASM_GENERIC_UACCESS_H */
236