1 /* 2 * User space memory access functions 3 * 4 * Copyright (C) 1999, 2002 Niibe Yutaka 5 * Copyright (C) 2003 - 2008 Paul Mundt 6 * 7 * Based on: 8 * MIPS implementation version 1.15 by 9 * Copyright (C) 1996, 1997, 1998 by Ralf Baechle 10 * and i386 version. 11 */ 12 #ifndef __ASM_SH_UACCESS_32_H 13 #define __ASM_SH_UACCESS_32_H 14 15 #define __get_user_size(x,ptr,size,retval) \ 16 do { \ 17 retval = 0; \ 18 switch (size) { \ 19 case 1: \ 20 __get_user_asm(x, ptr, retval, "b"); \ 21 break; \ 22 case 2: \ 23 __get_user_asm(x, ptr, retval, "w"); \ 24 break; \ 25 case 4: \ 26 __get_user_asm(x, ptr, retval, "l"); \ 27 break; \ 28 default: \ 29 __get_user_unknown(); \ 30 break; \ 31 } \ 32 } while (0) 33 34 #ifdef CONFIG_MMU 35 #define __get_user_asm(x, addr, err, insn) \ 36 ({ \ 37 __asm__ __volatile__( \ 38 "1:\n\t" \ 39 "mov." insn " %2, %1\n\t" \ 40 "2:\n" \ 41 ".section .fixup,\"ax\"\n" \ 42 "3:\n\t" \ 43 "mov #0, %1\n\t" \ 44 "mov.l 4f, %0\n\t" \ 45 "jmp @%0\n\t" \ 46 " mov %3, %0\n\t" \ 47 ".balign 4\n" \ 48 "4: .long 2b\n\t" \ 49 ".previous\n" \ 50 ".section __ex_table,\"a\"\n\t" \ 51 ".long 1b, 3b\n\t" \ 52 ".previous" \ 53 :"=&r" (err), "=&r" (x) \ 54 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); }) 55 #else 56 #define __get_user_asm(x, addr, err, insn) \ 57 do { \ 58 __asm__ __volatile__ ( \ 59 "mov." insn " %1, %0\n\t" \ 60 : "=&r" (x) \ 61 : "m" (__m(addr)) \ 62 ); \ 63 } while (0) 64 #endif /* CONFIG_MMU */ 65 66 extern void __get_user_unknown(void); 67 68 #define __put_user_size(x,ptr,size,retval) \ 69 do { \ 70 retval = 0; \ 71 switch (size) { \ 72 case 1: \ 73 __put_user_asm(x, ptr, retval, "b"); \ 74 break; \ 75 case 2: \ 76 __put_user_asm(x, ptr, retval, "w"); \ 77 break; \ 78 case 4: \ 79 __put_user_asm((u32)x, ptr, \ 80 retval, "l"); \ 81 break; \ 82 case 8: \ 83 __put_user_u64(x, ptr, retval); \ 84 break; \ 85 default: \ 86 __put_user_unknown(); \ 87 } \ 88 } while (0) 89 90 #ifdef CONFIG_MMU 91 #define __put_user_asm(x, addr, err, insn) \ 92 do { \ 93 __asm__ __volatile__ ( \ 94 "1:\n\t" \ 95 "mov." insn " %1, %2\n\t" \ 96 "2:\n" \ 97 ".section .fixup,\"ax\"\n" \ 98 "3:\n\t" \ 99 "mov.l 4f, %0\n\t" \ 100 "jmp @%0\n\t" \ 101 " mov %3, %0\n\t" \ 102 ".balign 4\n" \ 103 "4: .long 2b\n\t" \ 104 ".previous\n" \ 105 ".section __ex_table,\"a\"\n\t" \ 106 ".long 1b, 3b\n\t" \ 107 ".previous" \ 108 : "=&r" (err) \ 109 : "r" (x), "m" (__m(addr)), "i" (-EFAULT), \ 110 "0" (err) \ 111 : "memory" \ 112 ); \ 113 } while (0) 114 #else 115 #define __put_user_asm(x, addr, err, insn) \ 116 do { \ 117 __asm__ __volatile__ ( \ 118 "mov." insn " %0, %1\n\t" \ 119 : /* no outputs */ \ 120 : "r" (x), "m" (__m(addr)) \ 121 : "memory" \ 122 ); \ 123 } while (0) 124 #endif /* CONFIG_MMU */ 125 126 #if defined(CONFIG_CPU_LITTLE_ENDIAN) 127 #define __put_user_u64(val,addr,retval) \ 128 ({ \ 129 __asm__ __volatile__( \ 130 "1:\n\t" \ 131 "mov.l %R1,%2\n\t" \ 132 "mov.l %S1,%T2\n\t" \ 133 "2:\n" \ 134 ".section .fixup,\"ax\"\n" \ 135 "3:\n\t" \ 136 "mov.l 4f,%0\n\t" \ 137 "jmp @%0\n\t" \ 138 " mov %3,%0\n\t" \ 139 ".balign 4\n" \ 140 "4: .long 2b\n\t" \ 141 ".previous\n" \ 142 ".section __ex_table,\"a\"\n\t" \ 143 ".long 1b, 3b\n\t" \ 144 ".previous" \ 145 : "=r" (retval) \ 146 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \ 147 : "memory"); }) 148 #else 149 #define __put_user_u64(val,addr,retval) \ 150 ({ \ 151 __asm__ __volatile__( \ 152 "1:\n\t" \ 153 "mov.l %S1,%2\n\t" \ 154 "mov.l %R1,%T2\n\t" \ 155 "2:\n" \ 156 ".section .fixup,\"ax\"\n" \ 157 "3:\n\t" \ 158 "mov.l 4f,%0\n\t" \ 159 "jmp @%0\n\t" \ 160 " mov %3,%0\n\t" \ 161 ".balign 4\n" \ 162 "4: .long 2b\n\t" \ 163 ".previous\n" \ 164 ".section __ex_table,\"a\"\n\t" \ 165 ".long 1b, 3b\n\t" \ 166 ".previous" \ 167 : "=r" (retval) \ 168 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \ 169 : "memory"); }) 170 #endif 171 172 extern void __put_user_unknown(void); 173 174 static inline int 175 __strncpy_from_user(unsigned long __dest, unsigned long __user __src, int __count) 176 { 177 __kernel_size_t res; 178 unsigned long __dummy, _d, _s, _c; 179 180 __asm__ __volatile__( 181 "9:\n" 182 "mov.b @%2+, %1\n\t" 183 "cmp/eq #0, %1\n\t" 184 "bt/s 2f\n" 185 "1:\n" 186 "mov.b %1, @%3\n\t" 187 "dt %4\n\t" 188 "bf/s 9b\n\t" 189 " add #1, %3\n\t" 190 "2:\n\t" 191 "sub %4, %0\n" 192 "3:\n" 193 ".section .fixup,\"ax\"\n" 194 "4:\n\t" 195 "mov.l 5f, %1\n\t" 196 "jmp @%1\n\t" 197 " mov %9, %0\n\t" 198 ".balign 4\n" 199 "5: .long 3b\n" 200 ".previous\n" 201 ".section __ex_table,\"a\"\n" 202 " .balign 4\n" 203 " .long 9b,4b\n" 204 ".previous" 205 : "=r" (res), "=&z" (__dummy), "=r" (_s), "=r" (_d), "=r"(_c) 206 : "0" (__count), "2" (__src), "3" (__dest), "4" (__count), 207 "i" (-EFAULT) 208 : "memory", "t"); 209 210 return res; 211 } 212 213 /* 214 * Return the size of a string (including the ending 0 even when we have 215 * exceeded the maximum string length). 216 */ 217 static inline long __strnlen_user(const char __user *__s, long __n) 218 { 219 unsigned long res; 220 unsigned long __dummy; 221 222 __asm__ __volatile__( 223 "1:\t" 224 "mov.b @(%0,%3), %1\n\t" 225 "cmp/eq %4, %0\n\t" 226 "bt/s 2f\n\t" 227 " add #1, %0\n\t" 228 "tst %1, %1\n\t" 229 "bf 1b\n\t" 230 "2:\n" 231 ".section .fixup,\"ax\"\n" 232 "3:\n\t" 233 "mov.l 4f, %1\n\t" 234 "jmp @%1\n\t" 235 " mov #0, %0\n" 236 ".balign 4\n" 237 "4: .long 2b\n" 238 ".previous\n" 239 ".section __ex_table,\"a\"\n" 240 " .balign 4\n" 241 " .long 1b,3b\n" 242 ".previous" 243 : "=z" (res), "=&r" (__dummy) 244 : "0" (0), "r" (__s), "r" (__n) 245 : "t"); 246 return res; 247 } 248 249 #endif /* __ASM_SH_UACCESS_32_H */ 250