1 #ifndef _ASM_ARM_FUTEX_H 2 #define _ASM_ARM_FUTEX_H 3 4 #ifdef __KERNEL__ 5 6 #ifdef CONFIG_SMP 7 8 #include <asm-generic/futex.h> 9 10 #else /* !SMP, we can work around lack of atomic ops by disabling preemption */ 11 12 #include <linux/futex.h> 13 #include <linux/preempt.h> 14 #include <linux/uaccess.h> 15 #include <asm/errno.h> 16 #include <asm/domain.h> 17 18 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ 19 __asm__ __volatile__( \ 20 "1: " T(ldr) " %1, [%2]\n" \ 21 " " insn "\n" \ 22 "2: " T(str) " %0, [%2]\n" \ 23 " mov %0, #0\n" \ 24 "3:\n" \ 25 " .pushsection __ex_table,\"a\"\n" \ 26 " .align 3\n" \ 27 " .long 1b, 4f, 2b, 4f\n" \ 28 " .popsection\n" \ 29 " .pushsection .fixup,\"ax\"\n" \ 30 "4: mov %0, %4\n" \ 31 " b 3b\n" \ 32 " .popsection" \ 33 : "=&r" (ret), "=&r" (oldval) \ 34 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \ 35 : "cc", "memory") 36 37 static inline int 38 futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr) 39 { 40 int op = (encoded_op >> 28) & 7; 41 int cmp = (encoded_op >> 24) & 15; 42 int oparg = (encoded_op << 8) >> 20; 43 int cmparg = (encoded_op << 20) >> 20; 44 int oldval = 0, ret; 45 46 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28)) 47 oparg = 1 << oparg; 48 49 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) 50 return -EFAULT; 51 52 pagefault_disable(); /* implies preempt_disable() */ 53 54 switch (op) { 55 case FUTEX_OP_SET: 56 __futex_atomic_op("mov %0, %3", ret, oldval, uaddr, oparg); 57 break; 58 case FUTEX_OP_ADD: 59 __futex_atomic_op("add %0, %1, %3", ret, oldval, uaddr, oparg); 60 break; 61 case FUTEX_OP_OR: 62 __futex_atomic_op("orr %0, %1, %3", ret, oldval, uaddr, oparg); 63 break; 64 case FUTEX_OP_ANDN: 65 __futex_atomic_op("and %0, %1, %3", ret, oldval, uaddr, ~oparg); 66 break; 67 case FUTEX_OP_XOR: 68 __futex_atomic_op("eor %0, %1, %3", ret, oldval, uaddr, oparg); 69 break; 70 default: 71 ret = -ENOSYS; 72 } 73 74 pagefault_enable(); /* subsumes preempt_enable() */ 75 76 if (!ret) { 77 switch (cmp) { 78 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break; 79 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break; 80 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break; 81 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break; 82 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break; 83 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break; 84 default: ret = -ENOSYS; 85 } 86 } 87 return ret; 88 } 89 90 static inline int 91 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, 92 u32 oldval, u32 newval) 93 { 94 int ret = 0; 95 u32 val; 96 97 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) 98 return -EFAULT; 99 100 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n" 101 "1: " T(ldr) " %1, [%4]\n" 102 " teq %1, %2\n" 103 " it eq @ explicit IT needed for the 2b label\n" 104 "2: " T(streq) " %3, [%4]\n" 105 "3:\n" 106 " .pushsection __ex_table,\"a\"\n" 107 " .align 3\n" 108 " .long 1b, 4f, 2b, 4f\n" 109 " .popsection\n" 110 " .pushsection .fixup,\"ax\"\n" 111 "4: mov %0, %5\n" 112 " b 3b\n" 113 " .popsection" 114 : "+r" (ret), "=&r" (val) 115 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT) 116 : "cc", "memory"); 117 118 *uval = val; 119 return ret; 120 } 121 122 #endif /* !SMP */ 123 124 #endif /* __KERNEL__ */ 125 #endif /* _ASM_ARM_FUTEX_H */ 126