xref: /openbmc/linux/arch/csky/include/asm/uaccess.h (revision 31e67366)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 
3 #ifndef __ASM_CSKY_UACCESS_H
4 #define __ASM_CSKY_UACCESS_H
5 
6 /*
7  * User space memory access functions
8  */
9 #include <linux/compiler.h>
10 #include <linux/errno.h>
11 #include <linux/types.h>
12 #include <linux/sched.h>
13 #include <linux/string.h>
14 #include <linux/version.h>
15 #include <asm/segment.h>
16 
17 static inline int access_ok(const void *addr, unsigned long size)
18 {
19 	unsigned long limit = current_thread_info()->addr_limit.seg;
20 
21 	return (((unsigned long)addr < limit) &&
22 		((unsigned long)(addr + size) < limit));
23 }
24 
25 #define __addr_ok(addr) (access_ok(addr, 0))
26 
27 extern int __put_user_bad(void);
28 
29 /*
30  * Tell gcc we read from memory instead of writing: this is because
31  * we do not write to any memory gcc knows about, so there are no
32  * aliasing issues.
33  */
34 
35 /*
36  * These are the main single-value transfer routines.  They automatically
37  * use the right size if we just have the right pointer type.
38  *
39  * This gets kind of ugly. We want to return _two_ values in "get_user()"
40  * and yet we don't want to do any pointers, because that is too much
41  * of a performance impact. Thus we have a few rather ugly macros here,
42  * and hide all the ugliness from the user.
43  *
44  * The "__xxx" versions of the user access functions are versions that
45  * do not verify the address space, that must have been done previously
46  * with a separate "access_ok()" call (this is used when we do multiple
47  * accesses to the same area of user memory).
48  *
49  * As we use the same address space for kernel and user data on
50  * Ckcore, we can just do these as direct assignments.  (Of course, the
51  * exception handling means that it's no longer "just"...)
52  */
53 
54 #define put_user(x, ptr) \
55 	__put_user_check((x), (ptr), sizeof(*(ptr)))
56 
57 #define __put_user(x, ptr) \
58 	__put_user_nocheck((x), (ptr), sizeof(*(ptr)))
59 
60 #define __ptr(x) ((unsigned long *)(x))
61 
62 #define get_user(x, ptr) \
63 	__get_user_check((x), (ptr), sizeof(*(ptr)))
64 
65 #define __get_user(x, ptr) \
66 	__get_user_nocheck((x), (ptr), sizeof(*(ptr)))
67 
68 #define __put_user_nocheck(x, ptr, size)				\
69 ({									\
70 	long __pu_err = 0;						\
71 	typeof(*(ptr)) *__pu_addr = (ptr);				\
72 	typeof(*(ptr)) __pu_val = (typeof(*(ptr)))(x);			\
73 	if (__pu_addr)							\
74 		__put_user_size(__pu_val, (__pu_addr), (size),		\
75 				__pu_err);				\
76 	__pu_err;							\
77 })
78 
79 #define __put_user_check(x, ptr, size)					\
80 ({									\
81 	long __pu_err = -EFAULT;					\
82 	typeof(*(ptr)) *__pu_addr = (ptr);				\
83 	typeof(*(ptr)) __pu_val = (typeof(*(ptr)))(x);			\
84 	if (access_ok(__pu_addr, size) && __pu_addr)	\
85 		__put_user_size(__pu_val, __pu_addr, (size), __pu_err);	\
86 	__pu_err;							\
87 })
88 
89 #define __put_user_size(x, ptr, size, retval)		\
90 do {							\
91 	retval = 0;					\
92 	switch (size) {                                 \
93 	case 1:						\
94 		__put_user_asm_b(x, ptr, retval);	\
95 		break;					\
96 	case 2:						\
97 		__put_user_asm_h(x, ptr, retval);	\
98 		break;					\
99 	case 4:						\
100 		__put_user_asm_w(x, ptr, retval);	\
101 		break;					\
102 	case 8:						\
103 		__put_user_asm_64(x, ptr, retval);	\
104 		break;					\
105 	default:					\
106 		__put_user_bad();			\
107 	}	                                        \
108 } while (0)
109 
110 /*
111  * We don't tell gcc that we are accessing memory, but this is OK
112  * because we do not write to any memory gcc knows about, so there
113  * are no aliasing issues.
114  *
115  * Note that PC at a fault is the address *after* the faulting
116  * instruction.
117  */
118 #define __put_user_asm_b(x, ptr, err)			\
119 do {							\
120 	int errcode;					\
121 	asm volatile(					\
122 	"1:     stb   %1, (%2,0)	\n"		\
123 	"       br    3f		\n"		\
124 	"2:     mov   %0, %3		\n"		\
125 	"       br    3f		\n"		\
126 	".section __ex_table, \"a\"	\n"		\
127 	".align   2			\n"		\
128 	".long    1b,2b			\n"		\
129 	".previous			\n"		\
130 	"3:				\n"		\
131 	: "=r"(err), "=r"(x), "=r"(ptr), "=r"(errcode)	\
132 	: "0"(err), "1"(x), "2"(ptr), "3"(-EFAULT)	\
133 	: "memory");					\
134 } while (0)
135 
136 #define __put_user_asm_h(x, ptr, err)			\
137 do {							\
138 	int errcode;					\
139 	asm volatile(					\
140 	"1:     sth   %1, (%2,0)	\n"		\
141 	"       br    3f		\n"		\
142 	"2:     mov   %0, %3		\n"		\
143 	"       br    3f		\n"		\
144 	".section __ex_table, \"a\"	\n"		\
145 	".align   2			\n"		\
146 	".long    1b,2b			\n"		\
147 	".previous			\n"		\
148 	"3:				\n"		\
149 	: "=r"(err), "=r"(x), "=r"(ptr), "=r"(errcode)	\
150 	: "0"(err), "1"(x), "2"(ptr), "3"(-EFAULT)	\
151 	: "memory");					\
152 } while (0)
153 
154 #define __put_user_asm_w(x, ptr, err)			\
155 do {							\
156 	int errcode;					\
157 	asm volatile(					\
158 	"1:     stw   %1, (%2,0)	\n"		\
159 	"       br    3f		\n"		\
160 	"2:     mov   %0, %3		\n"		\
161 	"       br    3f		\n"		\
162 	".section __ex_table,\"a\"	\n"		\
163 	".align   2			\n"		\
164 	".long    1b, 2b		\n"		\
165 	".previous			\n"		\
166 	"3:				\n"		\
167 	: "=r"(err), "=r"(x), "=r"(ptr), "=r"(errcode)	\
168 	: "0"(err), "1"(x), "2"(ptr), "3"(-EFAULT)	\
169 	: "memory");					\
170 } while (0)
171 
172 #define __put_user_asm_64(x, ptr, err)				\
173 do {								\
174 	int tmp;						\
175 	int errcode;						\
176 	typeof(*(ptr))src = (typeof(*(ptr)))x;			\
177 	typeof(*(ptr))*psrc = &src;				\
178 								\
179 	asm volatile(						\
180 	"     ldw     %3, (%1, 0)     \n"			\
181 	"1:   stw     %3, (%2, 0)     \n"			\
182 	"     ldw     %3, (%1, 4)     \n"			\
183 	"2:   stw     %3, (%2, 4)     \n"			\
184 	"     br      4f              \n"			\
185 	"3:   mov     %0, %4          \n"			\
186 	"     br      4f              \n"			\
187 	".section __ex_table, \"a\"   \n"			\
188 	".align   2                   \n"			\
189 	".long    1b, 3b              \n"			\
190 	".long    2b, 3b              \n"			\
191 	".previous                    \n"			\
192 	"4:                           \n"			\
193 	: "=r"(err), "=r"(psrc), "=r"(ptr),			\
194 	  "=r"(tmp), "=r"(errcode)				\
195 	: "0"(err), "1"(psrc), "2"(ptr), "3"(0), "4"(-EFAULT)	\
196 	: "memory");						\
197 } while (0)
198 
199 #define __get_user_nocheck(x, ptr, size)			\
200 ({								\
201 	long  __gu_err;						\
202 	__get_user_size(x, (ptr), (size), __gu_err);		\
203 	__gu_err;						\
204 })
205 
206 #define __get_user_check(x, ptr, size)				\
207 ({								\
208 	int __gu_err = -EFAULT;					\
209 	const __typeof__(*(ptr)) __user *__gu_ptr = (ptr);	\
210 	if (access_ok(__gu_ptr, size) && __gu_ptr)	\
211 		__get_user_size(x, __gu_ptr, size, __gu_err);	\
212 	__gu_err;						\
213 })
214 
215 #define __get_user_size(x, ptr, size, retval)			\
216 do {								\
217 	switch (size) {						\
218 	case 1:							\
219 		__get_user_asm_common((x), ptr, "ldb", retval);	\
220 		break;						\
221 	case 2:							\
222 		__get_user_asm_common((x), ptr, "ldh", retval);	\
223 		break;						\
224 	case 4:							\
225 		__get_user_asm_common((x), ptr, "ldw", retval);	\
226 		break;						\
227 	default:						\
228 		x = 0;						\
229 		(retval) = __get_user_bad();			\
230 	}							\
231 } while (0)
232 
233 #define __get_user_asm_common(x, ptr, ins, err)			\
234 do {								\
235 	int errcode;						\
236 	asm volatile(						\
237 	"1:   " ins " %1, (%4,0)	\n"			\
238 	"       br    3f		\n"			\
239 	/* Fix up codes */					\
240 	"2:     mov   %0, %2		\n"			\
241 	"       movi  %1, 0		\n"			\
242 	"       br    3f		\n"			\
243 	".section __ex_table,\"a\"      \n"			\
244 	".align   2			\n"			\
245 	".long    1b, 2b		\n"			\
246 	".previous			\n"			\
247 	"3:				\n" 			\
248 	: "=r"(err), "=r"(x), "=r"(errcode)			\
249 	: "0"(0), "r"(ptr), "2"(-EFAULT)			\
250 	: "memory");						\
251 } while (0)
252 
253 extern int __get_user_bad(void);
254 
255 #define ___copy_to_user(to, from, n)			\
256 do {							\
257 	int w0, w1, w2, w3;				\
258 	asm volatile(					\
259 	"0:     cmpnei  %1, 0           \n"		\
260 	"       bf      8f              \n"		\
261 	"       mov     %3, %1          \n"		\
262 	"       or      %3, %2          \n"		\
263 	"       andi    %3, 3           \n"		\
264 	"       cmpnei  %3, 0           \n"		\
265 	"       bf      1f              \n"		\
266 	"       br      5f              \n"		\
267 	"1:     cmplti  %0, 16          \n" /* 4W */	\
268 	"       bt      3f              \n"		\
269 	"       ldw     %3, (%2, 0)     \n"		\
270 	"       ldw     %4, (%2, 4)     \n"		\
271 	"       ldw     %5, (%2, 8)     \n"		\
272 	"       ldw     %6, (%2, 12)    \n"		\
273 	"2:     stw     %3, (%1, 0)     \n"		\
274 	"9:     stw     %4, (%1, 4)     \n"		\
275 	"10:    stw     %5, (%1, 8)     \n"		\
276 	"11:    stw     %6, (%1, 12)    \n"		\
277 	"       addi    %2, 16          \n"		\
278 	"       addi    %1, 16          \n"		\
279 	"       subi    %0, 16          \n"		\
280 	"       br      1b              \n"		\
281 	"3:     cmplti  %0, 4           \n" /* 1W */	\
282 	"       bt      5f              \n"		\
283 	"       ldw     %3, (%2, 0)     \n"		\
284 	"4:     stw     %3, (%1, 0)     \n"		\
285 	"       addi    %2, 4           \n"		\
286 	"       addi    %1, 4           \n"		\
287 	"       subi    %0, 4           \n"		\
288 	"       br      3b              \n"		\
289 	"5:     cmpnei  %0, 0           \n"  /* 1B */   \
290 	"       bf      13f             \n"		\
291 	"       ldb     %3, (%2, 0)     \n"		\
292 	"6:     stb     %3, (%1, 0)     \n"		\
293 	"       addi    %2,  1          \n"		\
294 	"       addi    %1,  1          \n"		\
295 	"       subi    %0,  1          \n"		\
296 	"       br      5b              \n"		\
297 	"7:     subi	%0,  4          \n"		\
298 	"8:     subi	%0,  4          \n"		\
299 	"12:    subi	%0,  4          \n"		\
300 	"       br      13f             \n"		\
301 	".section __ex_table, \"a\"     \n"		\
302 	".align   2                     \n"		\
303 	".long    2b, 13f               \n"		\
304 	".long    4b, 13f               \n"		\
305 	".long    6b, 13f               \n"		\
306 	".long    9b, 12b               \n"		\
307 	".long   10b, 8b                \n"		\
308 	".long   11b, 7b                \n"		\
309 	".previous                      \n"		\
310 	"13:                            \n"		\
311 	: "=r"(n), "=r"(to), "=r"(from), "=r"(w0),	\
312 	  "=r"(w1), "=r"(w2), "=r"(w3)			\
313 	: "0"(n), "1"(to), "2"(from)			\
314 	: "memory");					\
315 } while (0)
316 
317 #define ___copy_from_user(to, from, n)			\
318 do {							\
319 	int tmp;					\
320 	int nsave;					\
321 	asm volatile(					\
322 	"0:     cmpnei  %1, 0           \n"		\
323 	"       bf      7f              \n"		\
324 	"       mov     %3, %1          \n"		\
325 	"       or      %3, %2          \n"		\
326 	"       andi    %3, 3           \n"		\
327 	"       cmpnei  %3, 0           \n"		\
328 	"       bf      1f              \n"		\
329 	"       br      5f              \n"		\
330 	"1:     cmplti  %0, 16          \n"		\
331 	"       bt      3f              \n"		\
332 	"2:     ldw     %3, (%2, 0)     \n"		\
333 	"10:    ldw     %4, (%2, 4)     \n"		\
334 	"       stw     %3, (%1, 0)     \n"		\
335 	"       stw     %4, (%1, 4)     \n"		\
336 	"11:    ldw     %3, (%2, 8)     \n"		\
337 	"12:    ldw     %4, (%2, 12)    \n"		\
338 	"       stw     %3, (%1, 8)     \n"		\
339 	"       stw     %4, (%1, 12)    \n"		\
340 	"       addi    %2, 16          \n"		\
341 	"       addi    %1, 16          \n"		\
342 	"       subi    %0, 16          \n"		\
343 	"       br      1b              \n"		\
344 	"3:     cmplti  %0, 4           \n"		\
345 	"       bt      5f              \n"		\
346 	"4:     ldw     %3, (%2, 0)     \n"		\
347 	"       stw     %3, (%1, 0)     \n"		\
348 	"       addi    %2, 4           \n"		\
349 	"       addi    %1, 4           \n"		\
350 	"       subi    %0, 4           \n"		\
351 	"       br      3b              \n"		\
352 	"5:     cmpnei  %0, 0           \n"		\
353 	"       bf      7f              \n"		\
354 	"6:     ldb     %3, (%2, 0)     \n"		\
355 	"       stb     %3, (%1, 0)     \n"		\
356 	"       addi    %2,  1          \n"		\
357 	"       addi    %1,  1          \n"		\
358 	"       subi    %0,  1          \n"		\
359 	"       br      5b              \n"		\
360 	"8:     stw     %3, (%1, 0)     \n"		\
361 	"       subi    %0, 4           \n"		\
362 	"       bf      7f              \n"		\
363 	"9:     subi    %0, 8           \n"		\
364 	"       bf      7f              \n"		\
365 	"13:    stw     %3, (%1, 8)     \n"		\
366 	"       subi    %0, 12          \n"		\
367 	"       bf      7f              \n"		\
368 	".section __ex_table, \"a\"     \n"		\
369 	".align   2                     \n"		\
370 	".long    2b, 7f                \n"		\
371 	".long    4b, 7f                \n"		\
372 	".long    6b, 7f                \n"		\
373 	".long   10b, 8b                \n"		\
374 	".long   11b, 9b                \n"		\
375 	".long   12b,13b                \n"		\
376 	".previous                      \n"		\
377 	"7:                             \n"		\
378 	: "=r"(n), "=r"(to), "=r"(from), "=r"(nsave),	\
379 	  "=r"(tmp)					\
380 	: "0"(n), "1"(to), "2"(from)			\
381 	: "memory");					\
382 } while (0)
383 
384 unsigned long raw_copy_from_user(void *to, const void *from, unsigned long n);
385 unsigned long raw_copy_to_user(void *to, const void *from, unsigned long n);
386 
387 unsigned long clear_user(void *to, unsigned long n);
388 unsigned long __clear_user(void __user *to, unsigned long n);
389 
390 long strncpy_from_user(char *dst, const char *src, long count);
391 long __strncpy_from_user(char *dst, const char *src, long count);
392 
393 /*
394  * Return the size of a string (including the ending 0)
395  *
396  * Return 0 on exception, a value greater than N if too long
397  */
398 long strnlen_user(const char *src, long n);
399 
400 #define strlen_user(str) strnlen_user(str, 32767)
401 
402 struct exception_table_entry {
403 	unsigned long insn;
404 	unsigned long nextinsn;
405 };
406 
407 extern int fixup_exception(struct pt_regs *regs);
408 
409 #endif /* __ASM_CSKY_UACCESS_H */
410