xref: /openbmc/linux/arch/arm/include/asm/futex.h (revision 711aab1d)
1 #ifndef _ASM_ARM_FUTEX_H
2 #define _ASM_ARM_FUTEX_H
3 
4 #ifdef __KERNEL__
5 
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
8 #include <asm/errno.h>
9 
10 #define __futex_atomic_ex_table(err_reg)			\
11 	"3:\n"							\
12 	"	.pushsection __ex_table,\"a\"\n"		\
13 	"	.align	3\n"					\
14 	"	.long	1b, 4f, 2b, 4f\n"			\
15 	"	.popsection\n"					\
16 	"	.pushsection .text.fixup,\"ax\"\n"		\
17 	"	.align	2\n"					\
18 	"4:	mov	%0, " err_reg "\n"			\
19 	"	b	3b\n"					\
20 	"	.popsection"
21 
22 #ifdef CONFIG_SMP
23 
24 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
25 ({								\
26 	unsigned int __ua_flags;				\
27 	smp_mb();						\
28 	prefetchw(uaddr);					\
29 	__ua_flags = uaccess_save_and_enable();			\
30 	__asm__ __volatile__(					\
31 	"1:	ldrex	%1, [%3]\n"				\
32 	"	" insn "\n"					\
33 	"2:	strex	%2, %0, [%3]\n"				\
34 	"	teq	%2, #0\n"				\
35 	"	bne	1b\n"					\
36 	"	mov	%0, #0\n"				\
37 	__futex_atomic_ex_table("%5")				\
38 	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
39 	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
40 	: "cc", "memory");					\
41 	uaccess_restore(__ua_flags);				\
42 })
43 
44 static inline int
45 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
46 			      u32 oldval, u32 newval)
47 {
48 	unsigned int __ua_flags;
49 	int ret;
50 	u32 val;
51 
52 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
53 		return -EFAULT;
54 
55 	smp_mb();
56 	/* Prefetching cannot fault */
57 	prefetchw(uaddr);
58 	__ua_flags = uaccess_save_and_enable();
59 	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
60 	"1:	ldrex	%1, [%4]\n"
61 	"	teq	%1, %2\n"
62 	"	ite	eq	@ explicit IT needed for the 2b label\n"
63 	"2:	strexeq	%0, %3, [%4]\n"
64 	"	movne	%0, #0\n"
65 	"	teq	%0, #0\n"
66 	"	bne	1b\n"
67 	__futex_atomic_ex_table("%5")
68 	: "=&r" (ret), "=&r" (val)
69 	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
70 	: "cc", "memory");
71 	uaccess_restore(__ua_flags);
72 	smp_mb();
73 
74 	*uval = val;
75 	return ret;
76 }
77 
78 #else /* !SMP, we can work around lack of atomic ops by disabling preemption */
79 
80 #include <linux/preempt.h>
81 #include <asm/domain.h>
82 
83 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
84 ({								\
85 	unsigned int __ua_flags = uaccess_save_and_enable();	\
86 	__asm__ __volatile__(					\
87 	"1:	" TUSER(ldr) "	%1, [%3]\n"			\
88 	"	" insn "\n"					\
89 	"2:	" TUSER(str) "	%0, [%3]\n"			\
90 	"	mov	%0, #0\n"				\
91 	__futex_atomic_ex_table("%5")				\
92 	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
93 	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
94 	: "cc", "memory");					\
95 	uaccess_restore(__ua_flags);				\
96 })
97 
98 static inline int
99 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
100 			      u32 oldval, u32 newval)
101 {
102 	unsigned int __ua_flags;
103 	int ret = 0;
104 	u32 val;
105 
106 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
107 		return -EFAULT;
108 
109 	preempt_disable();
110 	__ua_flags = uaccess_save_and_enable();
111 	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
112 	"1:	" TUSER(ldr) "	%1, [%4]\n"
113 	"	teq	%1, %2\n"
114 	"	it	eq	@ explicit IT needed for the 2b label\n"
115 	"2:	" TUSER(streq) "	%3, [%4]\n"
116 	__futex_atomic_ex_table("%5")
117 	: "+r" (ret), "=&r" (val)
118 	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
119 	: "cc", "memory");
120 	uaccess_restore(__ua_flags);
121 
122 	*uval = val;
123 	preempt_enable();
124 
125 	return ret;
126 }
127 
128 #endif /* !SMP */
129 
130 static inline int
131 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
132 {
133 	int oldval = 0, ret, tmp;
134 
135 #ifndef CONFIG_SMP
136 	preempt_disable();
137 #endif
138 	pagefault_disable();
139 
140 	switch (op) {
141 	case FUTEX_OP_SET:
142 		__futex_atomic_op("mov	%0, %4", ret, oldval, tmp, uaddr, oparg);
143 		break;
144 	case FUTEX_OP_ADD:
145 		__futex_atomic_op("add	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
146 		break;
147 	case FUTEX_OP_OR:
148 		__futex_atomic_op("orr	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
149 		break;
150 	case FUTEX_OP_ANDN:
151 		__futex_atomic_op("and	%0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
152 		break;
153 	case FUTEX_OP_XOR:
154 		__futex_atomic_op("eor	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
155 		break;
156 	default:
157 		ret = -ENOSYS;
158 	}
159 
160 	pagefault_enable();
161 #ifndef CONFIG_SMP
162 	preempt_enable();
163 #endif
164 
165 	if (!ret)
166 		*oval = oldval;
167 
168 	return ret;
169 }
170 
171 #endif /* __KERNEL__ */
172 #endif /* _ASM_ARM_FUTEX_H */
173