xref: /openbmc/linux/arch/alpha/include/asm/futex.h (revision d236d361)
1 #ifndef _ASM_ALPHA_FUTEX_H
2 #define _ASM_ALPHA_FUTEX_H
3 
4 #ifdef __KERNEL__
5 
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
8 #include <asm/errno.h>
9 #include <asm/barrier.h>
10 
11 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)	\
12 	__asm__ __volatile__(					\
13 		__ASM_SMP_MB					\
14 	"1:	ldl_l	%0,0(%2)\n"				\
15 		insn						\
16 	"2:	stl_c	%1,0(%2)\n"				\
17 	"	beq	%1,4f\n"				\
18 	"	mov	$31,%1\n"				\
19 	"3:	.subsection 2\n"				\
20 	"4:	br	1b\n"					\
21 	"	.previous\n"					\
22 	EXC(1b,3b,%1,$31)					\
23 	EXC(2b,3b,%1,$31)					\
24 	:	"=&r" (oldval), "=&r"(ret)			\
25 	:	"r" (uaddr), "r"(oparg)				\
26 	:	"memory")
27 
28 static inline int futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
29 {
30 	int op = (encoded_op >> 28) & 7;
31 	int cmp = (encoded_op >> 24) & 15;
32 	int oparg = (encoded_op << 8) >> 20;
33 	int cmparg = (encoded_op << 20) >> 20;
34 	int oldval = 0, ret;
35 	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
36 		oparg = 1 << oparg;
37 
38 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
39 		return -EFAULT;
40 
41 	pagefault_disable();
42 
43 	switch (op) {
44 	case FUTEX_OP_SET:
45 		__futex_atomic_op("mov %3,%1\n", ret, oldval, uaddr, oparg);
46 		break;
47 	case FUTEX_OP_ADD:
48 		__futex_atomic_op("addl %0,%3,%1\n", ret, oldval, uaddr, oparg);
49 		break;
50 	case FUTEX_OP_OR:
51 		__futex_atomic_op("or %0,%3,%1\n", ret, oldval, uaddr, oparg);
52 		break;
53 	case FUTEX_OP_ANDN:
54 		__futex_atomic_op("andnot %0,%3,%1\n", ret, oldval, uaddr, oparg);
55 		break;
56 	case FUTEX_OP_XOR:
57 		__futex_atomic_op("xor %0,%3,%1\n", ret, oldval, uaddr, oparg);
58 		break;
59 	default:
60 		ret = -ENOSYS;
61 	}
62 
63 	pagefault_enable();
64 
65 	if (!ret) {
66 		switch (cmp) {
67 		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
68 		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
69 		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
70 		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
71 		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
72 		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
73 		default: ret = -ENOSYS;
74 		}
75 	}
76 	return ret;
77 }
78 
79 static inline int
80 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
81 			      u32 oldval, u32 newval)
82 {
83 	int ret = 0, cmp;
84 	u32 prev;
85 
86 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
87 		return -EFAULT;
88 
89 	__asm__ __volatile__ (
90 		__ASM_SMP_MB
91 	"1:	ldl_l	%1,0(%3)\n"
92 	"	cmpeq	%1,%4,%2\n"
93 	"	beq	%2,3f\n"
94 	"	mov	%5,%2\n"
95 	"2:	stl_c	%2,0(%3)\n"
96 	"	beq	%2,4f\n"
97 	"3:	.subsection 2\n"
98 	"4:	br	1b\n"
99 	"	.previous\n"
100 	EXC(1b,3b,%0,$31)
101 	EXC(2b,3b,%0,$31)
102 	:	"+r"(ret), "=&r"(prev), "=&r"(cmp)
103 	:	"r"(uaddr), "r"((long)(int)oldval), "r"(newval)
104 	:	"memory");
105 
106 	*uval = prev;
107 	return ret;
108 }
109 
110 #endif /* __KERNEL__ */
111 #endif /* _ASM_ALPHA_FUTEX_H */
112