1 /* 2 * User space memory access functions 3 * 4 * Copyright (C) 1999, 2002 Niibe Yutaka 5 * Copyright (C) 2003 - 2008 Paul Mundt 6 * 7 * Based on: 8 * MIPS implementation version 1.15 by 9 * Copyright (C) 1996, 1997, 1998 by Ralf Baechle 10 * and i386 version. 11 */ 12 #ifndef __ASM_SH_UACCESS_32_H 13 #define __ASM_SH_UACCESS_32_H 14 15 #define __get_user_size(x,ptr,size,retval) \ 16 do { \ 17 retval = 0; \ 18 switch (size) { \ 19 case 1: \ 20 __get_user_asm(x, ptr, retval, "b"); \ 21 break; \ 22 case 2: \ 23 __get_user_asm(x, ptr, retval, "w"); \ 24 break; \ 25 case 4: \ 26 __get_user_asm(x, ptr, retval, "l"); \ 27 break; \ 28 default: \ 29 __get_user_unknown(); \ 30 break; \ 31 } \ 32 } while (0) 33 34 #ifdef CONFIG_MMU 35 #define __get_user_asm(x, addr, err, insn) \ 36 ({ \ 37 __asm__ __volatile__( \ 38 "1:\n\t" \ 39 "mov." insn " %2, %1\n\t" \ 40 "2:\n" \ 41 ".section .fixup,\"ax\"\n" \ 42 "3:\n\t" \ 43 "mov #0, %1\n\t" \ 44 "mov.l 4f, %0\n\t" \ 45 "jmp @%0\n\t" \ 46 " mov %3, %0\n\t" \ 47 ".balign 4\n" \ 48 "4: .long 2b\n\t" \ 49 ".previous\n" \ 50 ".section __ex_table,\"a\"\n\t" \ 51 ".long 1b, 3b\n\t" \ 52 ".previous" \ 53 :"=&r" (err), "=&r" (x) \ 54 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); }) 55 #else 56 #define __get_user_asm(x, addr, err, insn) \ 57 do { \ 58 __asm__ __volatile__ ( \ 59 "mov." insn " %1, %0\n\t" \ 60 : "=&r" (x) \ 61 : "m" (__m(addr)) \ 62 ); \ 63 } while (0) 64 #endif /* CONFIG_MMU */ 65 66 extern void __get_user_unknown(void); 67 68 #define __put_user_size(x,ptr,size,retval) \ 69 do { \ 70 retval = 0; \ 71 switch (size) { \ 72 case 1: \ 73 __put_user_asm(x, ptr, retval, "b"); \ 74 break; \ 75 case 2: \ 76 __put_user_asm(x, ptr, retval, "w"); \ 77 break; \ 78 case 4: \ 79 __put_user_asm(x, ptr, retval, "l"); \ 80 break; \ 81 case 8: \ 82 __put_user_u64(x, ptr, retval); \ 83 break; \ 84 default: \ 85 __put_user_unknown(); \ 86 } \ 87 } while (0) 88 89 #ifdef CONFIG_MMU 90 #define __put_user_asm(x, addr, err, insn) \ 91 do { \ 92 __asm__ __volatile__ ( \ 93 "1:\n\t" \ 94 "mov." insn " %1, %2\n\t" \ 95 "2:\n" \ 96 ".section .fixup,\"ax\"\n" \ 97 "3:\n\t" \ 98 "mov.l 4f, %0\n\t" \ 99 "jmp @%0\n\t" \ 100 " mov %3, %0\n\t" \ 101 ".balign 4\n" \ 102 "4: .long 2b\n\t" \ 103 ".previous\n" \ 104 ".section __ex_table,\"a\"\n\t" \ 105 ".long 1b, 3b\n\t" \ 106 ".previous" \ 107 : "=&r" (err) \ 108 : "r" (x), "m" (__m(addr)), "i" (-EFAULT), \ 109 "0" (err) \ 110 : "memory" \ 111 ); \ 112 } while (0) 113 #else 114 #define __put_user_asm(x, addr, err, insn) \ 115 do { \ 116 __asm__ __volatile__ ( \ 117 "mov." insn " %0, %1\n\t" \ 118 : /* no outputs */ \ 119 : "r" (x), "m" (__m(addr)) \ 120 : "memory" \ 121 ); \ 122 } while (0) 123 #endif /* CONFIG_MMU */ 124 125 #if defined(CONFIG_CPU_LITTLE_ENDIAN) 126 #define __put_user_u64(val,addr,retval) \ 127 ({ \ 128 __asm__ __volatile__( \ 129 "1:\n\t" \ 130 "mov.l %R1,%2\n\t" \ 131 "mov.l %S1,%T2\n\t" \ 132 "2:\n" \ 133 ".section .fixup,\"ax\"\n" \ 134 "3:\n\t" \ 135 "mov.l 4f,%0\n\t" \ 136 "jmp @%0\n\t" \ 137 " mov %3,%0\n\t" \ 138 ".balign 4\n" \ 139 "4: .long 2b\n\t" \ 140 ".previous\n" \ 141 ".section __ex_table,\"a\"\n\t" \ 142 ".long 1b, 3b\n\t" \ 143 ".previous" \ 144 : "=r" (retval) \ 145 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \ 146 : "memory"); }) 147 #else 148 #define __put_user_u64(val,addr,retval) \ 149 ({ \ 150 __asm__ __volatile__( \ 151 "1:\n\t" \ 152 "mov.l %S1,%2\n\t" \ 153 "mov.l %R1,%T2\n\t" \ 154 "2:\n" \ 155 ".section .fixup,\"ax\"\n" \ 156 "3:\n\t" \ 157 "mov.l 4f,%0\n\t" \ 158 "jmp @%0\n\t" \ 159 " mov %3,%0\n\t" \ 160 ".balign 4\n" \ 161 "4: .long 2b\n\t" \ 162 ".previous\n" \ 163 ".section __ex_table,\"a\"\n\t" \ 164 ".long 1b, 3b\n\t" \ 165 ".previous" \ 166 : "=r" (retval) \ 167 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \ 168 : "memory"); }) 169 #endif 170 171 extern void __put_user_unknown(void); 172 173 #endif /* __ASM_SH_UACCESS_32_H */ 174