1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef _ASM_X86_FUTEX_H 3 #define _ASM_X86_FUTEX_H 4 5 #ifdef __KERNEL__ 6 7 #include <linux/futex.h> 8 #include <linux/uaccess.h> 9 10 #include <asm/asm.h> 11 #include <asm/errno.h> 12 #include <asm/processor.h> 13 #include <asm/smap.h> 14 15 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \ 16 asm volatile("\t" ASM_STAC "\n" \ 17 "1:\t" insn "\n" \ 18 "2:\t" ASM_CLAC "\n" \ 19 "\t.section .fixup,\"ax\"\n" \ 20 "3:\tmov\t%3, %1\n" \ 21 "\tjmp\t2b\n" \ 22 "\t.previous\n" \ 23 _ASM_EXTABLE(1b, 3b) \ 24 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \ 25 : "i" (-EFAULT), "0" (oparg), "1" (0)) 26 27 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \ 28 asm volatile("\t" ASM_STAC "\n" \ 29 "1:\tmovl %2, %0\n" \ 30 "\tmovl\t%0, %3\n" \ 31 "\t" insn "\n" \ 32 "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \ 33 "\tjnz\t1b\n" \ 34 "3:\t" ASM_CLAC "\n" \ 35 "\t.section .fixup,\"ax\"\n" \ 36 "4:\tmov\t%5, %1\n" \ 37 "\tjmp\t3b\n" \ 38 "\t.previous\n" \ 39 _ASM_EXTABLE(1b, 4b) \ 40 _ASM_EXTABLE(2b, 4b) \ 41 : "=&a" (oldval), "=&r" (ret), \ 42 "+m" (*uaddr), "=&r" (tem) \ 43 : "r" (oparg), "i" (-EFAULT), "1" (0)) 44 45 static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval, 46 u32 __user *uaddr) 47 { 48 int oldval = 0, ret, tem; 49 50 pagefault_disable(); 51 52 switch (op) { 53 case FUTEX_OP_SET: 54 __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg); 55 break; 56 case FUTEX_OP_ADD: 57 __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval, 58 uaddr, oparg); 59 break; 60 case FUTEX_OP_OR: 61 __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg); 62 break; 63 case FUTEX_OP_ANDN: 64 __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg); 65 break; 66 case FUTEX_OP_XOR: 67 __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg); 68 break; 69 default: 70 ret = -ENOSYS; 71 } 72 73 pagefault_enable(); 74 75 if (!ret) 76 *oval = oldval; 77 78 return ret; 79 } 80 81 static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, 82 u32 oldval, u32 newval) 83 { 84 return user_atomic_cmpxchg_inatomic(uval, uaddr, oldval, newval); 85 } 86 87 #endif 88 #endif /* _ASM_X86_FUTEX_H */ 89