xref: /openbmc/linux/arch/openrisc/include/asm/futex.h (revision 6b5fc336)
1 #ifndef __ASM_OPENRISC_FUTEX_H
2 #define __ASM_OPENRISC_FUTEX_H
3 
4 #ifdef __KERNEL__
5 
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
8 #include <asm/errno.h>
9 
10 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
11 ({								\
12 	__asm__ __volatile__ (					\
13 		"1:	l.lwa	%0, %2			\n"	\
14 			insn				"\n"	\
15 		"2:	l.swa	%2, %1			\n"	\
16 		"	l.bnf	1b			\n"	\
17 		"	 l.ori	%1, r0, 0		\n"	\
18 		"3:					\n"	\
19 		".section .fixup,\"ax\"			\n"	\
20 		"4:	l.j	3b			\n"	\
21 		"	 l.addi	%1, r0, %3		\n"	\
22 		".previous				\n"	\
23 		".section __ex_table,\"a\"		\n"	\
24 		".word	1b,4b,2b,4b			\n"	\
25 		".previous				\n"	\
26 		: "=&r" (oldval), "=&r" (ret), "+m" (*uaddr)	\
27 		: "i" (-EFAULT), "r" (oparg)			\
28 		: "cc", "memory"				\
29 		);						\
30 })
31 
32 static inline int
33 futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
34 {
35 	int op = (encoded_op >> 28) & 7;
36 	int cmp = (encoded_op >> 24) & 15;
37 	int oparg = (encoded_op << 8) >> 20;
38 	int cmparg = (encoded_op << 20) >> 20;
39 	int oldval = 0, ret;
40 
41 	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
42 		oparg = 1 << oparg;
43 
44 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
45 		return -EFAULT;
46 
47 	pagefault_disable();
48 
49 	switch (op) {
50 	case FUTEX_OP_SET:
51 		__futex_atomic_op("l.or %1,%4,%4", ret, oldval, uaddr, oparg);
52 		break;
53 	case FUTEX_OP_ADD:
54 		__futex_atomic_op("l.add %1,%0,%4", ret, oldval, uaddr, oparg);
55 		break;
56 	case FUTEX_OP_OR:
57 		__futex_atomic_op("l.or %1,%0,%4", ret, oldval, uaddr, oparg);
58 		break;
59 	case FUTEX_OP_ANDN:
60 		__futex_atomic_op("l.and %1,%0,%4", ret, oldval, uaddr, ~oparg);
61 		break;
62 	case FUTEX_OP_XOR:
63 		__futex_atomic_op("l.xor %1,%0,%4", ret, oldval, uaddr, oparg);
64 		break;
65 	default:
66 		ret = -ENOSYS;
67 	}
68 
69 	pagefault_enable();
70 
71 	if (!ret) {
72 		switch (cmp) {
73 		case FUTEX_OP_CMP_EQ:
74 			ret = (oldval == cmparg);
75 			break;
76 		case FUTEX_OP_CMP_NE:
77 			ret = (oldval != cmparg);
78 			break;
79 		case FUTEX_OP_CMP_LT:
80 			ret = (oldval < cmparg);
81 			break;
82 		case FUTEX_OP_CMP_GE:
83 			ret = (oldval >= cmparg);
84 			break;
85 		case FUTEX_OP_CMP_LE:
86 			ret = (oldval <= cmparg);
87 			break;
88 		case FUTEX_OP_CMP_GT:
89 			ret = (oldval > cmparg);
90 			break;
91 		default:
92 			ret = -ENOSYS;
93 		}
94 	}
95 	return ret;
96 }
97 
98 static inline int
99 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
100 			      u32 oldval, u32 newval)
101 {
102 	int ret = 0;
103 	u32 prev;
104 
105 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
106 		return -EFAULT;
107 
108 	__asm__ __volatile__ (				\
109 		"1:	l.lwa	%1, %2		\n"	\
110 		"	l.sfeq	%1, %3		\n"	\
111 		"	l.bnf	3f		\n"	\
112 		"	 l.nop			\n"	\
113 		"2:	l.swa	%2, %4		\n"	\
114 		"	l.bnf	1b		\n"	\
115 		"	 l.nop			\n"	\
116 		"3:				\n"	\
117 		".section .fixup,\"ax\"		\n"	\
118 		"4:	l.j	3b		\n"	\
119 		"	 l.addi	%0, r0, %5	\n"	\
120 		".previous			\n"	\
121 		".section __ex_table,\"a\"	\n"	\
122 		".word	1b,4b,2b,4b		\n"	\
123 		".previous			\n"	\
124 		: "+r" (ret), "=&r" (prev), "+m" (*uaddr) \
125 		: "r" (oldval), "r" (newval), "i" (-EFAULT) \
126 		: "cc",	"memory"			\
127 		);
128 
129 	*uval = prev;
130 	return ret;
131 }
132 
133 #endif /* __KERNEL__ */
134 
135 #endif /* __ASM_OPENRISC_FUTEX_H */
136