xref: /openbmc/linux/arch/sparc/include/asm/futex_64.h (revision 8569c914)
1 #ifndef _SPARC64_FUTEX_H
2 #define _SPARC64_FUTEX_H
3 
4 #include <linux/futex.h>
5 #include <linux/uaccess.h>
6 #include <asm/errno.h>
7 #include <asm/system.h>
8 
9 #define __futex_cas_op(insn, ret, oldval, uaddr, oparg)	\
10 	__asm__ __volatile__(				\
11 	"\n1:	lduwa	[%3] %%asi, %2\n"		\
12 	"	" insn "\n"				\
13 	"2:	casa	[%3] %%asi, %2, %1\n"		\
14 	"	cmp	%2, %1\n"			\
15 	"	bne,pn	%%icc, 1b\n"			\
16 	"	 mov	0, %0\n"			\
17 	"3:\n"						\
18 	"	.section .fixup,#alloc,#execinstr\n"	\
19 	"	.align	4\n"				\
20 	"4:	sethi	%%hi(3b), %0\n"			\
21 	"	jmpl	%0 + %%lo(3b), %%g0\n"		\
22 	"	 mov	%5, %0\n"			\
23 	"	.previous\n"				\
24 	"	.section __ex_table,\"a\"\n"		\
25 	"	.align	4\n"				\
26 	"	.word	1b, 4b\n"			\
27 	"	.word	2b, 4b\n"			\
28 	"	.previous\n"				\
29 	: "=&r" (ret), "=&r" (oldval), "=&r" (tem)	\
30 	: "r" (uaddr), "r" (oparg), "i" (-EFAULT)	\
31 	: "memory")
32 
33 static inline int futex_atomic_op_inuser(int encoded_op, int __user *uaddr)
34 {
35 	int op = (encoded_op >> 28) & 7;
36 	int cmp = (encoded_op >> 24) & 15;
37 	int oparg = (encoded_op << 8) >> 20;
38 	int cmparg = (encoded_op << 20) >> 20;
39 	int oldval = 0, ret, tem;
40 
41 	if (unlikely(!access_ok(VERIFY_WRITE, uaddr, sizeof(int))))
42 		return -EFAULT;
43 	if (unlikely((((unsigned long) uaddr) & 0x3UL)))
44 		return -EINVAL;
45 
46 	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
47 		oparg = 1 << oparg;
48 
49 	pagefault_disable();
50 
51 	switch (op) {
52 	case FUTEX_OP_SET:
53 		__futex_cas_op("mov\t%4, %1", ret, oldval, uaddr, oparg);
54 		break;
55 	case FUTEX_OP_ADD:
56 		__futex_cas_op("add\t%2, %4, %1", ret, oldval, uaddr, oparg);
57 		break;
58 	case FUTEX_OP_OR:
59 		__futex_cas_op("or\t%2, %4, %1", ret, oldval, uaddr, oparg);
60 		break;
61 	case FUTEX_OP_ANDN:
62 		__futex_cas_op("andn\t%2, %4, %1", ret, oldval, uaddr, oparg);
63 		break;
64 	case FUTEX_OP_XOR:
65 		__futex_cas_op("xor\t%2, %4, %1", ret, oldval, uaddr, oparg);
66 		break;
67 	default:
68 		ret = -ENOSYS;
69 	}
70 
71 	pagefault_enable();
72 
73 	if (!ret) {
74 		switch (cmp) {
75 		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
76 		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
77 		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
78 		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
79 		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
80 		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
81 		default: ret = -ENOSYS;
82 		}
83 	}
84 	return ret;
85 }
86 
87 static inline int
88 futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
89 {
90 	__asm__ __volatile__(
91 	"\n1:	casa	[%3] %%asi, %2, %0\n"
92 	"2:\n"
93 	"	.section .fixup,#alloc,#execinstr\n"
94 	"	.align	4\n"
95 	"3:	sethi	%%hi(2b), %0\n"
96 	"	jmpl	%0 + %%lo(2b), %%g0\n"
97 	"	 mov	%4, %0\n"
98 	"	.previous\n"
99 	"	.section __ex_table,\"a\"\n"
100 	"	.align	4\n"
101 	"	.word	1b, 3b\n"
102 	"	.previous\n"
103 	: "=r" (newval)
104 	: "0" (newval), "r" (oldval), "r" (uaddr), "i" (-EFAULT)
105 	: "memory");
106 
107 	return newval;
108 }
109 
110 #endif /* !(_SPARC64_FUTEX_H) */
111