1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* 3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 */ 5 #ifndef _ASM_FUTEX_H 6 #define _ASM_FUTEX_H 7 8 #include <linux/futex.h> 9 #include <linux/uaccess.h> 10 #include <asm/barrier.h> 11 #include <asm/compiler.h> 12 #include <asm/errno.h> 13 14 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ 15 { \ 16 __asm__ __volatile__( \ 17 "1: ll.w %1, %4 # __futex_atomic_op\n" \ 18 " " insn " \n" \ 19 "2: sc.w $t0, %2 \n" \ 20 " beq $t0, $zero, 1b \n" \ 21 "3: \n" \ 22 " .section .fixup,\"ax\" \n" \ 23 "4: li.w %0, %6 \n" \ 24 " b 3b \n" \ 25 " .previous \n" \ 26 " .section __ex_table,\"a\" \n" \ 27 " "__UA_ADDR "\t1b, 4b \n" \ 28 " "__UA_ADDR "\t2b, 4b \n" \ 29 " .previous \n" \ 30 : "=r" (ret), "=&r" (oldval), \ 31 "=ZC" (*uaddr) \ 32 : "0" (0), "ZC" (*uaddr), "Jr" (oparg), \ 33 "i" (-EFAULT) \ 34 : "memory", "t0"); \ 35 } 36 37 static inline int 38 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr) 39 { 40 int oldval = 0, ret = 0; 41 42 pagefault_disable(); 43 44 switch (op) { 45 case FUTEX_OP_SET: 46 __futex_atomic_op("move $t0, %z5", ret, oldval, uaddr, oparg); 47 break; 48 case FUTEX_OP_ADD: 49 __futex_atomic_op("add.w $t0, %1, %z5", ret, oldval, uaddr, oparg); 50 break; 51 case FUTEX_OP_OR: 52 __futex_atomic_op("or $t0, %1, %z5", ret, oldval, uaddr, oparg); 53 break; 54 case FUTEX_OP_ANDN: 55 __futex_atomic_op("and $t0, %1, %z5", ret, oldval, uaddr, ~oparg); 56 break; 57 case FUTEX_OP_XOR: 58 __futex_atomic_op("xor $t0, %1, %z5", ret, oldval, uaddr, oparg); 59 break; 60 default: 61 ret = -ENOSYS; 62 } 63 64 pagefault_enable(); 65 66 if (!ret) 67 *oval = oldval; 68 69 return ret; 70 } 71 72 static inline int 73 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, u32 newval) 74 { 75 int ret = 0; 76 u32 val = 0; 77 78 if (!access_ok(uaddr, sizeof(u32))) 79 return -EFAULT; 80 81 __asm__ __volatile__( 82 "# futex_atomic_cmpxchg_inatomic \n" 83 "1: ll.w %1, %3 \n" 84 " bne %1, %z4, 3f \n" 85 " or $t0, %z5, $zero \n" 86 "2: sc.w $t0, %2 \n" 87 " beq $zero, $t0, 1b \n" 88 "3: \n" 89 __WEAK_LLSC_MB 90 " .section .fixup,\"ax\" \n" 91 "4: li.d %0, %6 \n" 92 " b 3b \n" 93 " .previous \n" 94 " .section __ex_table,\"a\" \n" 95 " "__UA_ADDR "\t1b, 4b \n" 96 " "__UA_ADDR "\t2b, 4b \n" 97 " .previous \n" 98 : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr) 99 : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), 100 "i" (-EFAULT) 101 : "memory", "t0"); 102 103 *uval = val; 104 105 return ret; 106 } 107 108 #endif /* _ASM_FUTEX_H */ 109