1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __M68K_UACCESS_H
3 #define __M68K_UACCESS_H
4
5 #ifdef CONFIG_MMU
6
7 /*
8 * User space memory access functions
9 */
10 #include <linux/compiler.h>
11 #include <linux/types.h>
12 #include <asm/extable.h>
13 #include <asm-generic/access_ok.h>
14
15 /*
16 * Not all varients of the 68k family support the notion of address spaces.
17 * The traditional 680x0 parts do, and they use the sfc/dfc registers and
18 * the "moves" instruction to access user space from kernel space. Other
19 * family members like ColdFire don't support this, and only have a single
20 * address space, and use the usual "move" instruction for user space access.
21 *
22 * Outside of this difference the user space access functions are the same.
23 * So lets keep the code simple and just define in what we need to use.
24 */
25 #ifdef CONFIG_CPU_HAS_ADDRESS_SPACES
26 #define MOVES "moves"
27 #else
28 #define MOVES "move"
29 #endif
30
31 #define __put_user_asm(inst, res, x, ptr, bwl, reg, err) \
32 asm volatile ("\n" \
33 "1: "inst"."#bwl" %2,%1\n" \
34 "2:\n" \
35 " .section .fixup,\"ax\"\n" \
36 " .even\n" \
37 "10: moveq.l %3,%0\n" \
38 " jra 2b\n" \
39 " .previous\n" \
40 "\n" \
41 " .section __ex_table,\"a\"\n" \
42 " .align 4\n" \
43 " .long 1b,10b\n" \
44 " .long 2b,10b\n" \
45 " .previous" \
46 : "+d" (res), "=m" (*(ptr)) \
47 : #reg (x), "i" (err))
48
49 #define __put_user_asm8(inst, res, x, ptr) \
50 do { \
51 const void *__pu_ptr = (const void __force *)(ptr); \
52 \
53 asm volatile ("\n" \
54 "1: "inst".l %2,(%1)+\n" \
55 "2: "inst".l %R2,(%1)\n" \
56 "3:\n" \
57 " .section .fixup,\"ax\"\n" \
58 " .even\n" \
59 "10: movel %3,%0\n" \
60 " jra 3b\n" \
61 " .previous\n" \
62 "\n" \
63 " .section __ex_table,\"a\"\n" \
64 " .align 4\n" \
65 " .long 1b,10b\n" \
66 " .long 2b,10b\n" \
67 " .long 3b,10b\n" \
68 " .previous" \
69 : "+d" (res), "+a" (__pu_ptr) \
70 : "r" (x), "i" (-EFAULT) \
71 : "memory"); \
72 } while (0)
73
74 /*
75 * These are the main single-value transfer routines. They automatically
76 * use the right size if we just have the right pointer type.
77 */
78
79 #define __put_user(x, ptr) \
80 ({ \
81 typeof(*(ptr)) __pu_val = (x); \
82 int __pu_err = 0; \
83 __chk_user_ptr(ptr); \
84 switch (sizeof (*(ptr))) { \
85 case 1: \
86 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, b, d, -EFAULT); \
87 break; \
88 case 2: \
89 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, w, r, -EFAULT); \
90 break; \
91 case 4: \
92 __put_user_asm(MOVES, __pu_err, __pu_val, ptr, l, r, -EFAULT); \
93 break; \
94 case 8: \
95 __put_user_asm8(MOVES, __pu_err, __pu_val, ptr); \
96 break; \
97 default: \
98 BUILD_BUG(); \
99 } \
100 __pu_err; \
101 })
102 #define put_user(x, ptr) __put_user(x, ptr)
103
104
105 #define __get_user_asm(inst, res, x, ptr, type, bwl, reg, err) ({ \
106 type __gu_val; \
107 asm volatile ("\n" \
108 "1: "inst"."#bwl" %2,%1\n" \
109 "2:\n" \
110 " .section .fixup,\"ax\"\n" \
111 " .even\n" \
112 "10: move.l %3,%0\n" \
113 " sub.l %1,%1\n" \
114 " jra 2b\n" \
115 " .previous\n" \
116 "\n" \
117 " .section __ex_table,\"a\"\n" \
118 " .align 4\n" \
119 " .long 1b,10b\n" \
120 " .previous" \
121 : "+d" (res), "=&" #reg (__gu_val) \
122 : "m" (*(ptr)), "i" (err)); \
123 (x) = (__force typeof(*(ptr)))(__force unsigned long)__gu_val; \
124 })
125
126 #define __get_user_asm8(inst, res, x, ptr) \
127 do { \
128 const void *__gu_ptr = (const void __force *)(ptr); \
129 union { \
130 u64 l; \
131 __typeof__(*(ptr)) t; \
132 } __gu_val; \
133 \
134 asm volatile ("\n" \
135 "1: "inst".l (%2)+,%1\n" \
136 "2: "inst".l (%2),%R1\n" \
137 "3:\n" \
138 " .section .fixup,\"ax\"\n" \
139 " .even\n" \
140 "10: move.l %3,%0\n" \
141 " sub.l %1,%1\n" \
142 " sub.l %R1,%R1\n" \
143 " jra 3b\n" \
144 " .previous\n" \
145 "\n" \
146 " .section __ex_table,\"a\"\n" \
147 " .align 4\n" \
148 " .long 1b,10b\n" \
149 " .long 2b,10b\n" \
150 " .previous" \
151 : "+d" (res), "=&r" (__gu_val.l), \
152 "+a" (__gu_ptr) \
153 : "i" (-EFAULT) \
154 : "memory"); \
155 (x) = __gu_val.t; \
156 } while (0)
157
158 #define __get_user(x, ptr) \
159 ({ \
160 int __gu_err = 0; \
161 __chk_user_ptr(ptr); \
162 switch (sizeof(*(ptr))) { \
163 case 1: \
164 __get_user_asm(MOVES, __gu_err, x, ptr, u8, b, d, -EFAULT); \
165 break; \
166 case 2: \
167 __get_user_asm(MOVES, __gu_err, x, ptr, u16, w, r, -EFAULT); \
168 break; \
169 case 4: \
170 __get_user_asm(MOVES, __gu_err, x, ptr, u32, l, r, -EFAULT); \
171 break; \
172 case 8: \
173 __get_user_asm8(MOVES, __gu_err, x, ptr); \
174 break; \
175 default: \
176 BUILD_BUG(); \
177 } \
178 __gu_err; \
179 })
180 #define get_user(x, ptr) __get_user(x, ptr)
181
182 unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
183 unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
184
185 #define __suffix0
186 #define __suffix1 b
187 #define __suffix2 w
188 #define __suffix4 l
189
190 #define ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
191 asm volatile ("\n" \
192 "1: "MOVES"."#s1" (%2)+,%3\n" \
193 " move."#s1" %3,(%1)+\n" \
194 " .ifnc \""#s2"\",\"\"\n" \
195 "2: "MOVES"."#s2" (%2)+,%3\n" \
196 " move."#s2" %3,(%1)+\n" \
197 " .ifnc \""#s3"\",\"\"\n" \
198 "3: "MOVES"."#s3" (%2)+,%3\n" \
199 " move."#s3" %3,(%1)+\n" \
200 " .endif\n" \
201 " .endif\n" \
202 "4:\n" \
203 " .section __ex_table,\"a\"\n" \
204 " .align 4\n" \
205 " .long 1b,10f\n" \
206 " .ifnc \""#s2"\",\"\"\n" \
207 " .long 2b,20f\n" \
208 " .ifnc \""#s3"\",\"\"\n" \
209 " .long 3b,30f\n" \
210 " .endif\n" \
211 " .endif\n" \
212 " .previous\n" \
213 "\n" \
214 " .section .fixup,\"ax\"\n" \
215 " .even\n" \
216 "10: addq.l #"#n1",%0\n" \
217 " .ifnc \""#s2"\",\"\"\n" \
218 "20: addq.l #"#n2",%0\n" \
219 " .ifnc \""#s3"\",\"\"\n" \
220 "30: addq.l #"#n3",%0\n" \
221 " .endif\n" \
222 " .endif\n" \
223 " jra 4b\n" \
224 " .previous\n" \
225 : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \
226 : : "memory")
227
228 #define ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
229 ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)
230 #define __constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3) \
231 ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, \
232 __suffix##n1, __suffix##n2, __suffix##n3)
233
234 static __always_inline unsigned long
__constant_copy_from_user(void * to,const void __user * from,unsigned long n)235 __constant_copy_from_user(void *to, const void __user *from, unsigned long n)
236 {
237 unsigned long res = 0, tmp;
238
239 switch (n) {
240 case 1:
241 __constant_copy_from_user_asm(res, to, from, tmp, 1, 0, 0);
242 break;
243 case 2:
244 __constant_copy_from_user_asm(res, to, from, tmp, 2, 0, 0);
245 break;
246 case 3:
247 __constant_copy_from_user_asm(res, to, from, tmp, 2, 1, 0);
248 break;
249 case 4:
250 __constant_copy_from_user_asm(res, to, from, tmp, 4, 0, 0);
251 break;
252 case 5:
253 __constant_copy_from_user_asm(res, to, from, tmp, 4, 1, 0);
254 break;
255 case 6:
256 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 0);
257 break;
258 case 7:
259 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 1);
260 break;
261 case 8:
262 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 0);
263 break;
264 case 9:
265 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 1);
266 break;
267 case 10:
268 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 2);
269 break;
270 case 12:
271 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 4);
272 break;
273 default:
274 /* we limit the inlined version to 3 moves */
275 return __generic_copy_from_user(to, from, n);
276 }
277
278 return res;
279 }
280
281 #define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3) \
282 asm volatile ("\n" \
283 " move."#s1" (%2)+,%3\n" \
284 "11: "MOVES"."#s1" %3,(%1)+\n" \
285 "12: move."#s2" (%2)+,%3\n" \
286 "21: "MOVES"."#s2" %3,(%1)+\n" \
287 "22:\n" \
288 " .ifnc \""#s3"\",\"\"\n" \
289 " move."#s3" (%2)+,%3\n" \
290 "31: "MOVES"."#s3" %3,(%1)+\n" \
291 "32:\n" \
292 " .endif\n" \
293 "4:\n" \
294 "\n" \
295 " .section __ex_table,\"a\"\n" \
296 " .align 4\n" \
297 " .long 11b,5f\n" \
298 " .long 12b,5f\n" \
299 " .long 21b,5f\n" \
300 " .long 22b,5f\n" \
301 " .ifnc \""#s3"\",\"\"\n" \
302 " .long 31b,5f\n" \
303 " .long 32b,5f\n" \
304 " .endif\n" \
305 " .previous\n" \
306 "\n" \
307 " .section .fixup,\"ax\"\n" \
308 " .even\n" \
309 "5: moveq.l #"#n",%0\n" \
310 " jra 4b\n" \
311 " .previous\n" \
312 : "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp) \
313 : : "memory")
314
315 static __always_inline unsigned long
__constant_copy_to_user(void __user * to,const void * from,unsigned long n)316 __constant_copy_to_user(void __user *to, const void *from, unsigned long n)
317 {
318 unsigned long res = 0, tmp;
319
320 switch (n) {
321 case 1:
322 __put_user_asm(MOVES, res, *(u8 *)from, (u8 __user *)to,
323 b, d, 1);
324 break;
325 case 2:
326 __put_user_asm(MOVES, res, *(u16 *)from, (u16 __user *)to,
327 w, r, 2);
328 break;
329 case 3:
330 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
331 break;
332 case 4:
333 __put_user_asm(MOVES, res, *(u32 *)from, (u32 __user *)to,
334 l, r, 4);
335 break;
336 case 5:
337 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
338 break;
339 case 6:
340 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
341 break;
342 case 7:
343 __constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
344 break;
345 case 8:
346 __constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
347 break;
348 case 9:
349 __constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
350 break;
351 case 10:
352 __constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
353 break;
354 case 12:
355 __constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
356 break;
357 default:
358 /* limit the inlined version to 3 moves */
359 return __generic_copy_to_user(to, from, n);
360 }
361
362 return res;
363 }
364
365 static inline unsigned long
raw_copy_from_user(void * to,const void __user * from,unsigned long n)366 raw_copy_from_user(void *to, const void __user *from, unsigned long n)
367 {
368 if (__builtin_constant_p(n))
369 return __constant_copy_from_user(to, from, n);
370 return __generic_copy_from_user(to, from, n);
371 }
372
373 static inline unsigned long
raw_copy_to_user(void __user * to,const void * from,unsigned long n)374 raw_copy_to_user(void __user *to, const void *from, unsigned long n)
375 {
376 if (__builtin_constant_p(n))
377 return __constant_copy_to_user(to, from, n);
378 return __generic_copy_to_user(to, from, n);
379 }
380 #define INLINE_COPY_FROM_USER
381 #define INLINE_COPY_TO_USER
382
383 #define __get_kernel_nofault(dst, src, type, err_label) \
384 do { \
385 type *__gk_dst = (type *)(dst); \
386 type *__gk_src = (type *)(src); \
387 int __gk_err = 0; \
388 \
389 switch (sizeof(type)) { \
390 case 1: \
391 __get_user_asm("move", __gk_err, *__gk_dst, __gk_src, \
392 u8, b, d, -EFAULT); \
393 break; \
394 case 2: \
395 __get_user_asm("move", __gk_err, *__gk_dst, __gk_src, \
396 u16, w, r, -EFAULT); \
397 break; \
398 case 4: \
399 __get_user_asm("move", __gk_err, *__gk_dst, __gk_src, \
400 u32, l, r, -EFAULT); \
401 break; \
402 case 8: \
403 __get_user_asm8("move", __gk_err, *__gk_dst, __gk_src); \
404 break; \
405 default: \
406 BUILD_BUG(); \
407 } \
408 if (unlikely(__gk_err)) \
409 goto err_label; \
410 } while (0)
411
412 #define __put_kernel_nofault(dst, src, type, err_label) \
413 do { \
414 type __pk_src = *(type *)(src); \
415 type *__pk_dst = (type *)(dst); \
416 int __pk_err = 0; \
417 \
418 switch (sizeof(type)) { \
419 case 1: \
420 __put_user_asm("move", __pk_err, __pk_src, __pk_dst, \
421 b, d, -EFAULT); \
422 break; \
423 case 2: \
424 __put_user_asm("move", __pk_err, __pk_src, __pk_dst, \
425 w, r, -EFAULT); \
426 break; \
427 case 4: \
428 __put_user_asm("move", __pk_err, __pk_src, __pk_dst, \
429 l, r, -EFAULT); \
430 break; \
431 case 8: \
432 __put_user_asm8("move", __pk_err, __pk_src, __pk_dst); \
433 break; \
434 default: \
435 BUILD_BUG(); \
436 } \
437 if (unlikely(__pk_err)) \
438 goto err_label; \
439 } while (0)
440
441 extern long strncpy_from_user(char *dst, const char __user *src, long count);
442 extern __must_check long strnlen_user(const char __user *str, long n);
443
444 unsigned long __clear_user(void __user *to, unsigned long n);
445
446 #define clear_user __clear_user
447
448 #else /* !CONFIG_MMU */
449 #include <asm-generic/uaccess.h>
450 #endif
451
452 #endif /* _M68K_UACCESS_H */
453