xref: /openbmc/linux/arch/csky/include/asm/uaccess.h (revision 3381df09)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 // Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd.
3 
4 #ifndef __ASM_CSKY_UACCESS_H
5 #define __ASM_CSKY_UACCESS_H
6 
7 /*
8  * User space memory access functions
9  */
10 #include <linux/compiler.h>
11 #include <linux/errno.h>
12 #include <linux/types.h>
13 #include <linux/sched.h>
14 #include <linux/string.h>
15 #include <linux/version.h>
16 #include <asm/segment.h>
17 
18 static inline int access_ok(const void *addr, unsigned long size)
19 {
20 	unsigned long limit = current_thread_info()->addr_limit.seg;
21 
22 	return (((unsigned long)addr < limit) &&
23 		((unsigned long)(addr + size) < limit));
24 }
25 
26 #define __addr_ok(addr) (access_ok(addr, 0))
27 
28 extern int __put_user_bad(void);
29 
30 /*
31  * Tell gcc we read from memory instead of writing: this is because
32  * we do not write to any memory gcc knows about, so there are no
33  * aliasing issues.
34  */
35 
36 /*
37  * These are the main single-value transfer routines.  They automatically
38  * use the right size if we just have the right pointer type.
39  *
40  * This gets kind of ugly. We want to return _two_ values in "get_user()"
41  * and yet we don't want to do any pointers, because that is too much
42  * of a performance impact. Thus we have a few rather ugly macros here,
43  * and hide all the ugliness from the user.
44  *
45  * The "__xxx" versions of the user access functions are versions that
46  * do not verify the address space, that must have been done previously
47  * with a separate "access_ok()" call (this is used when we do multiple
48  * accesses to the same area of user memory).
49  *
50  * As we use the same address space for kernel and user data on
51  * Ckcore, we can just do these as direct assignments.  (Of course, the
52  * exception handling means that it's no longer "just"...)
53  */
54 
55 #define put_user(x, ptr) \
56 	__put_user_check((x), (ptr), sizeof(*(ptr)))
57 
58 #define __put_user(x, ptr) \
59 	__put_user_nocheck((x), (ptr), sizeof(*(ptr)))
60 
61 #define __ptr(x) ((unsigned long *)(x))
62 
63 #define get_user(x, ptr) \
64 	__get_user_check((x), (ptr), sizeof(*(ptr)))
65 
66 #define __get_user(x, ptr) \
67 	__get_user_nocheck((x), (ptr), sizeof(*(ptr)))
68 
69 #define __put_user_nocheck(x, ptr, size)				\
70 ({									\
71 	long __pu_err = 0;						\
72 	typeof(*(ptr)) *__pu_addr = (ptr);				\
73 	typeof(*(ptr)) __pu_val = (typeof(*(ptr)))(x);			\
74 	if (__pu_addr)							\
75 		__put_user_size(__pu_val, (__pu_addr), (size),		\
76 				__pu_err);				\
77 	__pu_err;							\
78 })
79 
80 #define __put_user_check(x, ptr, size)					\
81 ({									\
82 	long __pu_err = -EFAULT;					\
83 	typeof(*(ptr)) *__pu_addr = (ptr);				\
84 	typeof(*(ptr)) __pu_val = (typeof(*(ptr)))(x);			\
85 	if (access_ok(__pu_addr, size) && __pu_addr)	\
86 		__put_user_size(__pu_val, __pu_addr, (size), __pu_err);	\
87 	__pu_err;							\
88 })
89 
90 #define __put_user_size(x, ptr, size, retval)		\
91 do {							\
92 	retval = 0;					\
93 	switch (size) {                                 \
94 	case 1:						\
95 		__put_user_asm_b(x, ptr, retval);	\
96 		break;					\
97 	case 2:						\
98 		__put_user_asm_h(x, ptr, retval);	\
99 		break;					\
100 	case 4:						\
101 		__put_user_asm_w(x, ptr, retval);	\
102 		break;					\
103 	case 8:						\
104 		__put_user_asm_64(x, ptr, retval);	\
105 		break;					\
106 	default:					\
107 		__put_user_bad();			\
108 	}	                                        \
109 } while (0)
110 
111 /*
112  * We don't tell gcc that we are accessing memory, but this is OK
113  * because we do not write to any memory gcc knows about, so there
114  * are no aliasing issues.
115  *
116  * Note that PC at a fault is the address *after* the faulting
117  * instruction.
118  */
119 #define __put_user_asm_b(x, ptr, err)			\
120 do {							\
121 	int errcode;					\
122 	asm volatile(					\
123 	"1:     stb   %1, (%2,0)	\n"		\
124 	"       br    3f		\n"		\
125 	"2:     mov   %0, %3		\n"		\
126 	"       br    3f		\n"		\
127 	".section __ex_table, \"a\"	\n"		\
128 	".align   2			\n"		\
129 	".long    1b,2b			\n"		\
130 	".previous			\n"		\
131 	"3:				\n"		\
132 	: "=r"(err), "=r"(x), "=r"(ptr), "=r"(errcode)	\
133 	: "0"(err), "1"(x), "2"(ptr), "3"(-EFAULT)	\
134 	: "memory");					\
135 } while (0)
136 
137 #define __put_user_asm_h(x, ptr, err)			\
138 do {							\
139 	int errcode;					\
140 	asm volatile(					\
141 	"1:     sth   %1, (%2,0)	\n"		\
142 	"       br    3f		\n"		\
143 	"2:     mov   %0, %3		\n"		\
144 	"       br    3f		\n"		\
145 	".section __ex_table, \"a\"	\n"		\
146 	".align   2			\n"		\
147 	".long    1b,2b			\n"		\
148 	".previous			\n"		\
149 	"3:				\n"		\
150 	: "=r"(err), "=r"(x), "=r"(ptr), "=r"(errcode)	\
151 	: "0"(err), "1"(x), "2"(ptr), "3"(-EFAULT)	\
152 	: "memory");					\
153 } while (0)
154 
155 #define __put_user_asm_w(x, ptr, err)			\
156 do {							\
157 	int errcode;					\
158 	asm volatile(					\
159 	"1:     stw   %1, (%2,0)	\n"		\
160 	"       br    3f		\n"		\
161 	"2:     mov   %0, %3		\n"		\
162 	"       br    3f		\n"		\
163 	".section __ex_table,\"a\"	\n"		\
164 	".align   2			\n"		\
165 	".long    1b, 2b		\n"		\
166 	".previous			\n"		\
167 	"3:				\n"		\
168 	: "=r"(err), "=r"(x), "=r"(ptr), "=r"(errcode)	\
169 	: "0"(err), "1"(x), "2"(ptr), "3"(-EFAULT)	\
170 	: "memory");					\
171 } while (0)
172 
173 #define __put_user_asm_64(x, ptr, err)				\
174 do {								\
175 	int tmp;						\
176 	int errcode;						\
177 	typeof(*(ptr))src = (typeof(*(ptr)))x;			\
178 	typeof(*(ptr))*psrc = &src;				\
179 								\
180 	asm volatile(						\
181 	"     ldw     %3, (%1, 0)     \n"			\
182 	"1:   stw     %3, (%2, 0)     \n"			\
183 	"     ldw     %3, (%1, 4)     \n"			\
184 	"2:   stw     %3, (%2, 4)     \n"			\
185 	"     br      4f              \n"			\
186 	"3:   mov     %0, %4          \n"			\
187 	"     br      4f              \n"			\
188 	".section __ex_table, \"a\"   \n"			\
189 	".align   2                   \n"			\
190 	".long    1b, 3b              \n"			\
191 	".long    2b, 3b              \n"			\
192 	".previous                    \n"			\
193 	"4:                           \n"			\
194 	: "=r"(err), "=r"(psrc), "=r"(ptr),			\
195 	  "=r"(tmp), "=r"(errcode)				\
196 	: "0"(err), "1"(psrc), "2"(ptr), "3"(0), "4"(-EFAULT)	\
197 	: "memory");						\
198 } while (0)
199 
200 #define __get_user_nocheck(x, ptr, size)			\
201 ({								\
202 	long  __gu_err;						\
203 	__get_user_size(x, (ptr), (size), __gu_err);		\
204 	__gu_err;						\
205 })
206 
207 #define __get_user_check(x, ptr, size)				\
208 ({								\
209 	int __gu_err = -EFAULT;					\
210 	const __typeof__(*(ptr)) __user *__gu_ptr = (ptr);	\
211 	if (access_ok(__gu_ptr, size) && __gu_ptr)	\
212 		__get_user_size(x, __gu_ptr, size, __gu_err);	\
213 	__gu_err;						\
214 })
215 
216 #define __get_user_size(x, ptr, size, retval)			\
217 do {								\
218 	switch (size) {						\
219 	case 1:							\
220 		__get_user_asm_common((x), ptr, "ldb", retval);	\
221 		break;						\
222 	case 2:							\
223 		__get_user_asm_common((x), ptr, "ldh", retval);	\
224 		break;						\
225 	case 4:							\
226 		__get_user_asm_common((x), ptr, "ldw", retval);	\
227 		break;						\
228 	default:						\
229 		x = 0;						\
230 		(retval) = __get_user_bad();			\
231 	}							\
232 } while (0)
233 
234 #define __get_user_asm_common(x, ptr, ins, err)			\
235 do {								\
236 	int errcode;						\
237 	asm volatile(						\
238 	"1:   " ins " %1, (%4,0)	\n"			\
239 	"       br    3f		\n"			\
240 	/* Fix up codes */					\
241 	"2:     mov   %0, %2		\n"			\
242 	"       movi  %1, 0		\n"			\
243 	"       br    3f		\n"			\
244 	".section __ex_table,\"a\"      \n"			\
245 	".align   2			\n"			\
246 	".long    1b, 2b		\n"			\
247 	".previous			\n"			\
248 	"3:				\n" 			\
249 	: "=r"(err), "=r"(x), "=r"(errcode)			\
250 	: "0"(0), "r"(ptr), "2"(-EFAULT)			\
251 	: "memory");						\
252 } while (0)
253 
254 extern int __get_user_bad(void);
255 
256 #define __copy_user(to, from, n)			\
257 do {							\
258 	int w0, w1, w2, w3;				\
259 	asm volatile(					\
260 	"0:     cmpnei  %1, 0           \n"		\
261 	"       bf      8f              \n"		\
262 	"       mov     %3, %1          \n"		\
263 	"       or      %3, %2          \n"		\
264 	"       andi    %3, 3           \n"		\
265 	"       cmpnei  %3, 0           \n"		\
266 	"       bf      1f              \n"		\
267 	"       br      5f              \n"		\
268 	"1:     cmplti  %0, 16          \n" /* 4W */	\
269 	"       bt      3f              \n"		\
270 	"       ldw     %3, (%2, 0)     \n"		\
271 	"       ldw     %4, (%2, 4)     \n"		\
272 	"       ldw     %5, (%2, 8)     \n"		\
273 	"       ldw     %6, (%2, 12)    \n"		\
274 	"2:     stw     %3, (%1, 0)     \n"		\
275 	"9:     stw     %4, (%1, 4)     \n"		\
276 	"10:    stw     %5, (%1, 8)     \n"		\
277 	"11:    stw     %6, (%1, 12)    \n"		\
278 	"       addi    %2, 16          \n"		\
279 	"       addi    %1, 16          \n"		\
280 	"       subi    %0, 16          \n"		\
281 	"       br      1b              \n"		\
282 	"3:     cmplti  %0, 4           \n" /* 1W */	\
283 	"       bt      5f              \n"		\
284 	"       ldw     %3, (%2, 0)     \n"		\
285 	"4:     stw     %3, (%1, 0)     \n"		\
286 	"       addi    %2, 4           \n"		\
287 	"       addi    %1, 4           \n"		\
288 	"       subi    %0, 4           \n"		\
289 	"       br      3b              \n"		\
290 	"5:     cmpnei  %0, 0           \n"  /* 1B */   \
291 	"       bf      8f              \n"		\
292 	"       ldb     %3, (%2, 0)     \n"		\
293 	"6:     stb     %3, (%1, 0)     \n"		\
294 	"       addi    %2,  1          \n"		\
295 	"       addi    %1,  1          \n"		\
296 	"       subi    %0,  1          \n"		\
297 	"       br      5b              \n"		\
298 	"7:     br      8f              \n"		\
299 	".section __ex_table, \"a\"     \n"		\
300 	".align   2                     \n"		\
301 	".long    2b, 7b                \n"		\
302 	".long    9b, 7b                \n"		\
303 	".long   10b, 7b                \n"		\
304 	".long   11b, 7b                \n"		\
305 	".long    4b, 7b                \n"		\
306 	".long    6b, 7b                \n"		\
307 	".previous                      \n"		\
308 	"8:                             \n"		\
309 	: "=r"(n), "=r"(to), "=r"(from), "=r"(w0),	\
310 	  "=r"(w1), "=r"(w2), "=r"(w3)			\
311 	: "0"(n), "1"(to), "2"(from)			\
312 	: "memory");					\
313 } while (0)
314 
315 #define __copy_user_zeroing(to, from, n)		\
316 do {							\
317 	int tmp;					\
318 	int nsave;					\
319 	asm volatile(					\
320 	"0:     cmpnei  %1, 0           \n"		\
321 	"       bf      7f              \n"		\
322 	"       mov     %3, %1          \n"		\
323 	"       or      %3, %2          \n"		\
324 	"       andi    %3, 3           \n"		\
325 	"       cmpnei  %3, 0           \n"		\
326 	"       bf      1f              \n"		\
327 	"       br      5f              \n"		\
328 	"1:     cmplti  %0, 16          \n"		\
329 	"       bt      3f              \n"		\
330 	"2:     ldw     %3, (%2, 0)     \n"		\
331 	"10:    ldw     %4, (%2, 4)     \n"		\
332 	"       stw     %3, (%1, 0)     \n"		\
333 	"       stw     %4, (%1, 4)     \n"		\
334 	"11:    ldw     %3, (%2, 8)     \n"		\
335 	"12:    ldw     %4, (%2, 12)    \n"		\
336 	"       stw     %3, (%1, 8)     \n"		\
337 	"       stw     %4, (%1, 12)    \n"		\
338 	"       addi    %2, 16          \n"		\
339 	"       addi    %1, 16          \n"		\
340 	"       subi    %0, 16          \n"		\
341 	"       br      1b              \n"		\
342 	"3:     cmplti  %0, 4           \n"		\
343 	"       bt      5f              \n"		\
344 	"4:     ldw     %3, (%2, 0)     \n"		\
345 	"       stw     %3, (%1, 0)     \n"		\
346 	"       addi    %2, 4           \n"		\
347 	"       addi    %1, 4           \n"		\
348 	"       subi    %0, 4           \n"		\
349 	"       br      3b              \n"		\
350 	"5:     cmpnei  %0, 0           \n"		\
351 	"       bf      7f              \n"		\
352 	"6:     ldb     %3, (%2, 0)     \n"		\
353 	"       stb     %3, (%1, 0)     \n"		\
354 	"       addi    %2,  1          \n"		\
355 	"       addi    %1,  1          \n"		\
356 	"       subi    %0,  1          \n"		\
357 	"       br      5b              \n"		\
358 	"8:     mov     %3, %0          \n"		\
359 	"       movi    %4, 0           \n"		\
360 	"9:     stb     %4, (%1, 0)     \n"		\
361 	"       addi    %1, 1           \n"		\
362 	"       subi    %3, 1           \n"		\
363 	"       cmpnei  %3, 0           \n"		\
364 	"       bt      9b              \n"		\
365 	"       br      7f              \n"		\
366 	".section __ex_table, \"a\"     \n"		\
367 	".align   2                     \n"		\
368 	".long    2b, 8b                \n"		\
369 	".long   10b, 8b                \n"		\
370 	".long   11b, 8b                \n"		\
371 	".long   12b, 8b                \n"		\
372 	".long    4b, 8b                \n"		\
373 	".long    6b, 8b                \n"		\
374 	".previous                      \n"		\
375 	"7:                             \n"		\
376 	: "=r"(n), "=r"(to), "=r"(from), "=r"(nsave),	\
377 	  "=r"(tmp)					\
378 	: "0"(n), "1"(to), "2"(from)			\
379 	: "memory");					\
380 } while (0)
381 
382 unsigned long raw_copy_from_user(void *to, const void *from, unsigned long n);
383 unsigned long raw_copy_to_user(void *to, const void *from, unsigned long n);
384 
385 unsigned long clear_user(void *to, unsigned long n);
386 unsigned long __clear_user(void __user *to, unsigned long n);
387 
388 long strncpy_from_user(char *dst, const char *src, long count);
389 long __strncpy_from_user(char *dst, const char *src, long count);
390 
391 /*
392  * Return the size of a string (including the ending 0)
393  *
394  * Return 0 on exception, a value greater than N if too long
395  */
396 long strnlen_user(const char *src, long n);
397 
398 #define strlen_user(str) strnlen_user(str, 32767)
399 
400 struct exception_table_entry {
401 	unsigned long insn;
402 	unsigned long nextinsn;
403 };
404 
405 extern int fixup_exception(struct pt_regs *regs);
406 
407 #endif /* __ASM_CSKY_UACCESS_H */
408