xref: /openbmc/linux/arch/arm/include/asm/futex.h (revision 7dd65feb)
1 #ifndef _ASM_ARM_FUTEX_H
2 #define _ASM_ARM_FUTEX_H
3 
4 #ifdef __KERNEL__
5 
6 #ifdef CONFIG_SMP
7 
8 #include <asm-generic/futex.h>
9 
10 #else /* !SMP, we can work around lack of atomic ops by disabling preemption */
11 
12 #include <linux/futex.h>
13 #include <linux/preempt.h>
14 #include <linux/uaccess.h>
15 #include <asm/errno.h>
16 
17 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)	\
18 	__asm__ __volatile__(					\
19 	"1:	ldrt	%1, [%2]\n"				\
20 	"	" insn "\n"					\
21 	"2:	strt	%0, [%2]\n"				\
22 	"	mov	%0, #0\n"				\
23 	"3:\n"							\
24 	"	.section __ex_table,\"a\"\n"			\
25 	"	.align	3\n"					\
26 	"	.long	1b, 4f, 2b, 4f\n"			\
27 	"	.previous\n"					\
28 	"	.section .fixup,\"ax\"\n"			\
29 	"4:	mov	%0, %4\n"				\
30 	"	b	3b\n"					\
31 	"	.previous"					\
32 	: "=&r" (ret), "=&r" (oldval)				\
33 	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
34 	: "cc", "memory")
35 
36 static inline int
37 futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
38 {
39 	int op = (encoded_op >> 28) & 7;
40 	int cmp = (encoded_op >> 24) & 15;
41 	int oparg = (encoded_op << 8) >> 20;
42 	int cmparg = (encoded_op << 20) >> 20;
43 	int oldval = 0, ret;
44 
45 	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
46 		oparg = 1 << oparg;
47 
48 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
49 		return -EFAULT;
50 
51 	pagefault_disable();	/* implies preempt_disable() */
52 
53 	switch (op) {
54 	case FUTEX_OP_SET:
55 		__futex_atomic_op("mov	%0, %3", ret, oldval, uaddr, oparg);
56 		break;
57 	case FUTEX_OP_ADD:
58 		__futex_atomic_op("add	%0, %1, %3", ret, oldval, uaddr, oparg);
59 		break;
60 	case FUTEX_OP_OR:
61 		__futex_atomic_op("orr	%0, %1, %3", ret, oldval, uaddr, oparg);
62 		break;
63 	case FUTEX_OP_ANDN:
64 		__futex_atomic_op("and	%0, %1, %3", ret, oldval, uaddr, ~oparg);
65 		break;
66 	case FUTEX_OP_XOR:
67 		__futex_atomic_op("eor	%0, %1, %3", ret, oldval, uaddr, oparg);
68 		break;
69 	default:
70 		ret = -ENOSYS;
71 	}
72 
73 	pagefault_enable();	/* subsumes preempt_enable() */
74 
75 	if (!ret) {
76 		switch (cmp) {
77 		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
78 		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
79 		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
80 		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
81 		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
82 		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
83 		default: ret = -ENOSYS;
84 		}
85 	}
86 	return ret;
87 }
88 
89 static inline int
90 futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
91 {
92 	int val;
93 
94 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
95 		return -EFAULT;
96 
97 	pagefault_disable();	/* implies preempt_disable() */
98 
99 	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
100 	"1:	ldrt	%0, [%3]\n"
101 	"	teq	%0, %1\n"
102 	"	it	eq	@ explicit IT needed for the 2b label\n"
103 	"2:	streqt	%2, [%3]\n"
104 	"3:\n"
105 	"	.section __ex_table,\"a\"\n"
106 	"	.align	3\n"
107 	"	.long	1b, 4f, 2b, 4f\n"
108 	"	.previous\n"
109 	"	.section .fixup,\"ax\"\n"
110 	"4:	mov	%0, %4\n"
111 	"	b	3b\n"
112 	"	.previous"
113 	: "=&r" (val)
114 	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
115 	: "cc", "memory");
116 
117 	pagefault_enable();	/* subsumes preempt_enable() */
118 
119 	return val;
120 }
121 
122 #endif /* !SMP */
123 
124 #endif /* __KERNEL__ */
125 #endif /* _ASM_ARM_FUTEX_H */
126