1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* 3 * Copyright (c) 2006 Ralf Baechle (ralf@linux-mips.org) 4 * Copyright (c) 2018 Jim Wilson (jimw@sifive.com) 5 */ 6 7 #ifndef _ASM_RISCV_FUTEX_H 8 #define _ASM_RISCV_FUTEX_H 9 10 #include <linux/futex.h> 11 #include <linux/uaccess.h> 12 #include <linux/errno.h> 13 #include <asm/asm.h> 14 15 /* We don't even really need the extable code, but for now keep it simple */ 16 #ifndef CONFIG_MMU 17 #define __enable_user_access() do { } while (0) 18 #define __disable_user_access() do { } while (0) 19 #endif 20 21 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ 22 { \ 23 uintptr_t tmp; \ 24 __enable_user_access(); \ 25 __asm__ __volatile__ ( \ 26 "1: " insn " \n" \ 27 "2: \n" \ 28 " .section .fixup,\"ax\" \n" \ 29 " .balign 4 \n" \ 30 "3: li %[r],%[e] \n" \ 31 " jump 2b,%[t] \n" \ 32 " .previous \n" \ 33 " .section __ex_table,\"a\" \n" \ 34 " .balign " RISCV_SZPTR " \n" \ 35 " " RISCV_PTR " 1b, 3b \n" \ 36 " .previous \n" \ 37 : [r] "+r" (ret), [ov] "=&r" (oldval), \ 38 [u] "+m" (*uaddr), [t] "=&r" (tmp) \ 39 : [op] "Jr" (oparg), [e] "i" (-EFAULT) \ 40 : "memory"); \ 41 __disable_user_access(); \ 42 } 43 44 static inline int 45 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr) 46 { 47 int oldval = 0, ret = 0; 48 49 pagefault_disable(); 50 51 switch (op) { 52 case FUTEX_OP_SET: 53 __futex_atomic_op("amoswap.w.aqrl %[ov],%z[op],%[u]", 54 ret, oldval, uaddr, oparg); 55 break; 56 case FUTEX_OP_ADD: 57 __futex_atomic_op("amoadd.w.aqrl %[ov],%z[op],%[u]", 58 ret, oldval, uaddr, oparg); 59 break; 60 case FUTEX_OP_OR: 61 __futex_atomic_op("amoor.w.aqrl %[ov],%z[op],%[u]", 62 ret, oldval, uaddr, oparg); 63 break; 64 case FUTEX_OP_ANDN: 65 __futex_atomic_op("amoand.w.aqrl %[ov],%z[op],%[u]", 66 ret, oldval, uaddr, ~oparg); 67 break; 68 case FUTEX_OP_XOR: 69 __futex_atomic_op("amoxor.w.aqrl %[ov],%z[op],%[u]", 70 ret, oldval, uaddr, oparg); 71 break; 72 default: 73 ret = -ENOSYS; 74 } 75 76 pagefault_enable(); 77 78 if (!ret) 79 *oval = oldval; 80 81 return ret; 82 } 83 84 static inline int 85 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, 86 u32 oldval, u32 newval) 87 { 88 int ret = 0; 89 u32 val; 90 uintptr_t tmp; 91 92 if (!access_ok(uaddr, sizeof(u32))) 93 return -EFAULT; 94 95 __enable_user_access(); 96 __asm__ __volatile__ ( 97 "1: lr.w.aqrl %[v],%[u] \n" 98 " bne %[v],%z[ov],3f \n" 99 "2: sc.w.aqrl %[t],%z[nv],%[u] \n" 100 " bnez %[t],1b \n" 101 "3: \n" 102 " .section .fixup,\"ax\" \n" 103 " .balign 4 \n" 104 "4: li %[r],%[e] \n" 105 " jump 3b,%[t] \n" 106 " .previous \n" 107 " .section __ex_table,\"a\" \n" 108 " .balign " RISCV_SZPTR " \n" 109 " " RISCV_PTR " 1b, 4b \n" 110 " " RISCV_PTR " 2b, 4b \n" 111 " .previous \n" 112 : [r] "+r" (ret), [v] "=&r" (val), [u] "+m" (*uaddr), [t] "=&r" (tmp) 113 : [ov] "Jr" (oldval), [nv] "Jr" (newval), [e] "i" (-EFAULT) 114 : "memory"); 115 __disable_user_access(); 116 117 *uval = val; 118 return ret; 119 } 120 121 #endif /* _ASM_RISCV_FUTEX_H */ 122