1 #ifndef _ASM_ALPHA_FUTEX_H 2 #define _ASM_ALPHA_FUTEX_H 3 4 #ifdef __KERNEL__ 5 6 #include <linux/futex.h> 7 #include <linux/uaccess.h> 8 #include <asm/errno.h> 9 #include <asm/barrier.h> 10 11 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ 12 __asm__ __volatile__( \ 13 __ASM_SMP_MB \ 14 "1: ldl_l %0,0(%2)\n" \ 15 insn \ 16 "2: stl_c %1,0(%2)\n" \ 17 " beq %1,4f\n" \ 18 " mov $31,%1\n" \ 19 "3: .subsection 2\n" \ 20 "4: br 1b\n" \ 21 " .previous\n" \ 22 EXC(1b,3b,%1,$31) \ 23 EXC(2b,3b,%1,$31) \ 24 : "=&r" (oldval), "=&r"(ret) \ 25 : "r" (uaddr), "r"(oparg) \ 26 : "memory") 27 28 static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval, 29 u32 __user *uaddr) 30 { 31 int oldval = 0, ret; 32 33 pagefault_disable(); 34 35 switch (op) { 36 case FUTEX_OP_SET: 37 __futex_atomic_op("mov %3,%1\n", ret, oldval, uaddr, oparg); 38 break; 39 case FUTEX_OP_ADD: 40 __futex_atomic_op("addl %0,%3,%1\n", ret, oldval, uaddr, oparg); 41 break; 42 case FUTEX_OP_OR: 43 __futex_atomic_op("or %0,%3,%1\n", ret, oldval, uaddr, oparg); 44 break; 45 case FUTEX_OP_ANDN: 46 __futex_atomic_op("andnot %0,%3,%1\n", ret, oldval, uaddr, oparg); 47 break; 48 case FUTEX_OP_XOR: 49 __futex_atomic_op("xor %0,%3,%1\n", ret, oldval, uaddr, oparg); 50 break; 51 default: 52 ret = -ENOSYS; 53 } 54 55 pagefault_enable(); 56 57 if (!ret) 58 *oval = oldval; 59 60 return ret; 61 } 62 63 static inline int 64 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, 65 u32 oldval, u32 newval) 66 { 67 int ret = 0, cmp; 68 u32 prev; 69 70 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) 71 return -EFAULT; 72 73 __asm__ __volatile__ ( 74 __ASM_SMP_MB 75 "1: ldl_l %1,0(%3)\n" 76 " cmpeq %1,%4,%2\n" 77 " beq %2,3f\n" 78 " mov %5,%2\n" 79 "2: stl_c %2,0(%3)\n" 80 " beq %2,4f\n" 81 "3: .subsection 2\n" 82 "4: br 1b\n" 83 " .previous\n" 84 EXC(1b,3b,%0,$31) 85 EXC(2b,3b,%0,$31) 86 : "+r"(ret), "=&r"(prev), "=&r"(cmp) 87 : "r"(uaddr), "r"((long)(int)oldval), "r"(newval) 88 : "memory"); 89 90 *uval = prev; 91 return ret; 92 } 93 94 #endif /* __KERNEL__ */ 95 #endif /* _ASM_ALPHA_FUTEX_H */ 96