xref: /openbmc/linux/arch/x86/include/asm/futex.h (revision a2cce7a9)
1 #ifndef _ASM_X86_FUTEX_H
2 #define _ASM_X86_FUTEX_H
3 
4 #ifdef __KERNEL__
5 
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
8 
9 #include <asm/asm.h>
10 #include <asm/errno.h>
11 #include <asm/processor.h>
12 #include <asm/smap.h>
13 
14 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg)	\
15 	asm volatile("\t" ASM_STAC "\n"				\
16 		     "1:\t" insn "\n"				\
17 		     "2:\t" ASM_CLAC "\n"			\
18 		     "\t.section .fixup,\"ax\"\n"		\
19 		     "3:\tmov\t%3, %1\n"			\
20 		     "\tjmp\t2b\n"				\
21 		     "\t.previous\n"				\
22 		     _ASM_EXTABLE(1b, 3b)			\
23 		     : "=r" (oldval), "=r" (ret), "+m" (*uaddr)	\
24 		     : "i" (-EFAULT), "0" (oparg), "1" (0))
25 
26 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg)	\
27 	asm volatile("\t" ASM_STAC "\n"				\
28 		     "1:\tmovl	%2, %0\n"			\
29 		     "\tmovl\t%0, %3\n"				\
30 		     "\t" insn "\n"				\
31 		     "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"	\
32 		     "\tjnz\t1b\n"				\
33 		     "3:\t" ASM_CLAC "\n"			\
34 		     "\t.section .fixup,\"ax\"\n"		\
35 		     "4:\tmov\t%5, %1\n"			\
36 		     "\tjmp\t3b\n"				\
37 		     "\t.previous\n"				\
38 		     _ASM_EXTABLE(1b, 4b)			\
39 		     _ASM_EXTABLE(2b, 4b)			\
40 		     : "=&a" (oldval), "=&r" (ret),		\
41 		       "+m" (*uaddr), "=&r" (tem)		\
42 		     : "r" (oparg), "i" (-EFAULT), "1" (0))
43 
44 static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
45 {
46 	int op = (encoded_op >> 28) & 7;
47 	int cmp = (encoded_op >> 24) & 15;
48 	int oparg = (encoded_op << 8) >> 20;
49 	int cmparg = (encoded_op << 20) >> 20;
50 	int oldval = 0, ret, tem;
51 
52 	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
53 		oparg = 1 << oparg;
54 
55 	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
56 		return -EFAULT;
57 
58 	pagefault_disable();
59 
60 	switch (op) {
61 	case FUTEX_OP_SET:
62 		__futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
63 		break;
64 	case FUTEX_OP_ADD:
65 		__futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
66 				   uaddr, oparg);
67 		break;
68 	case FUTEX_OP_OR:
69 		__futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
70 		break;
71 	case FUTEX_OP_ANDN:
72 		__futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
73 		break;
74 	case FUTEX_OP_XOR:
75 		__futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
76 		break;
77 	default:
78 		ret = -ENOSYS;
79 	}
80 
81 	pagefault_enable();
82 
83 	if (!ret) {
84 		switch (cmp) {
85 		case FUTEX_OP_CMP_EQ:
86 			ret = (oldval == cmparg);
87 			break;
88 		case FUTEX_OP_CMP_NE:
89 			ret = (oldval != cmparg);
90 			break;
91 		case FUTEX_OP_CMP_LT:
92 			ret = (oldval < cmparg);
93 			break;
94 		case FUTEX_OP_CMP_GE:
95 			ret = (oldval >= cmparg);
96 			break;
97 		case FUTEX_OP_CMP_LE:
98 			ret = (oldval <= cmparg);
99 			break;
100 		case FUTEX_OP_CMP_GT:
101 			ret = (oldval > cmparg);
102 			break;
103 		default:
104 			ret = -ENOSYS;
105 		}
106 	}
107 	return ret;
108 }
109 
110 static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
111 						u32 oldval, u32 newval)
112 {
113 	return user_atomic_cmpxchg_inatomic(uval, uaddr, oldval, newval);
114 }
115 
116 #endif
117 #endif /* _ASM_X86_FUTEX_H */
118