xref: /openbmc/linux/arch/x86/include/asm/uaccess_32.h (revision b78412b8)
1 #ifndef _ASM_X86_UACCESS_32_H
2 #define _ASM_X86_UACCESS_32_H
3 
4 /*
5  * User space memory access functions
6  */
7 #include <linux/string.h>
8 #include <asm/asm.h>
9 #include <asm/page.h>
10 
11 unsigned long __must_check __copy_user_ll
12 		(void *to, const void *from, unsigned long n);
13 unsigned long __must_check __copy_from_user_ll_nocache_nozero
14 		(void *to, const void __user *from, unsigned long n);
15 
16 static __always_inline unsigned long __must_check
17 raw_copy_to_user(void __user *to, const void *from, unsigned long n)
18 {
19 	return __copy_user_ll((__force void *)to, from, n);
20 }
21 
22 static __always_inline unsigned long
23 raw_copy_from_user(void *to, const void __user *from, unsigned long n)
24 {
25 	if (__builtin_constant_p(n)) {
26 		unsigned long ret;
27 
28 		switch (n) {
29 		case 1:
30 			ret = 0;
31 			__uaccess_begin();
32 			__get_user_asm_nozero(*(u8 *)to, from, ret,
33 					      "b", "b", "=q", 1);
34 			__uaccess_end();
35 			return ret;
36 		case 2:
37 			ret = 0;
38 			__uaccess_begin();
39 			__get_user_asm_nozero(*(u16 *)to, from, ret,
40 					      "w", "w", "=r", 2);
41 			__uaccess_end();
42 			return ret;
43 		case 4:
44 			ret = 0;
45 			__uaccess_begin();
46 			__get_user_asm_nozero(*(u32 *)to, from, ret,
47 					      "l", "k", "=r", 4);
48 			__uaccess_end();
49 			return ret;
50 		}
51 	}
52 	return __copy_user_ll(to, (__force const void *)from, n);
53 }
54 
55 static __always_inline unsigned long
56 __copy_from_user_inatomic_nocache(void *to, const void __user *from,
57 				  unsigned long n)
58 {
59        return __copy_from_user_ll_nocache_nozero(to, from, n);
60 }
61 
62 #endif /* _ASM_X86_UACCESS_32_H */
63