1 /* SPDX-License-Identifier: GPL-2.0-only */ 2 /* 3 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com) 4 * 5 * Vineetg: August 2010: From Android kernel work 6 */ 7 8 #ifndef _ASM_FUTEX_H 9 #define _ASM_FUTEX_H 10 11 #include <linux/futex.h> 12 #include <linux/preempt.h> 13 #include <linux/uaccess.h> 14 #include <asm/errno.h> 15 16 #ifdef CONFIG_ARC_HAS_LLSC 17 18 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\ 19 \ 20 smp_mb(); \ 21 __asm__ __volatile__( \ 22 "1: llock %1, [%2] \n" \ 23 insn "\n" \ 24 "2: scond %0, [%2] \n" \ 25 " bnz 1b \n" \ 26 " mov %0, 0 \n" \ 27 "3: \n" \ 28 " .section .fixup,\"ax\" \n" \ 29 " .align 4 \n" \ 30 "4: mov %0, %4 \n" \ 31 " j 3b \n" \ 32 " .previous \n" \ 33 " .section __ex_table,\"a\" \n" \ 34 " .align 4 \n" \ 35 " .word 1b, 4b \n" \ 36 " .word 2b, 4b \n" \ 37 " .previous \n" \ 38 \ 39 : "=&r" (ret), "=&r" (oldval) \ 40 : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \ 41 : "cc", "memory"); \ 42 smp_mb() \ 43 44 #else /* !CONFIG_ARC_HAS_LLSC */ 45 46 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\ 47 \ 48 smp_mb(); \ 49 __asm__ __volatile__( \ 50 "1: ld %1, [%2] \n" \ 51 insn "\n" \ 52 "2: st %0, [%2] \n" \ 53 " mov %0, 0 \n" \ 54 "3: \n" \ 55 " .section .fixup,\"ax\" \n" \ 56 " .align 4 \n" \ 57 "4: mov %0, %4 \n" \ 58 " j 3b \n" \ 59 " .previous \n" \ 60 " .section __ex_table,\"a\" \n" \ 61 " .align 4 \n" \ 62 " .word 1b, 4b \n" \ 63 " .word 2b, 4b \n" \ 64 " .previous \n" \ 65 \ 66 : "=&r" (ret), "=&r" (oldval) \ 67 : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \ 68 : "cc", "memory"); \ 69 smp_mb() \ 70 71 #endif 72 73 static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval, 74 u32 __user *uaddr) 75 { 76 int oldval = 0, ret; 77 78 #ifndef CONFIG_ARC_HAS_LLSC 79 preempt_disable(); /* to guarantee atomic r-m-w of futex op */ 80 #endif 81 pagefault_disable(); 82 83 switch (op) { 84 case FUTEX_OP_SET: 85 __futex_atomic_op("mov %0, %3", ret, oldval, uaddr, oparg); 86 break; 87 case FUTEX_OP_ADD: 88 /* oldval = *uaddr; *uaddr += oparg ; ret = *uaddr */ 89 __futex_atomic_op("add %0, %1, %3", ret, oldval, uaddr, oparg); 90 break; 91 case FUTEX_OP_OR: 92 __futex_atomic_op("or %0, %1, %3", ret, oldval, uaddr, oparg); 93 break; 94 case FUTEX_OP_ANDN: 95 __futex_atomic_op("bic %0, %1, %3", ret, oldval, uaddr, oparg); 96 break; 97 case FUTEX_OP_XOR: 98 __futex_atomic_op("xor %0, %1, %3", ret, oldval, uaddr, oparg); 99 break; 100 default: 101 ret = -ENOSYS; 102 } 103 104 pagefault_enable(); 105 #ifndef CONFIG_ARC_HAS_LLSC 106 preempt_enable(); 107 #endif 108 109 if (!ret) 110 *oval = oldval; 111 112 return ret; 113 } 114 115 /* 116 * cmpxchg of futex (pagefaults disabled by caller) 117 * Return 0 for success, -EFAULT otherwise 118 */ 119 static inline int 120 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 expval, 121 u32 newval) 122 { 123 int ret = 0; 124 u32 existval; 125 126 if (!access_ok(uaddr, sizeof(u32))) 127 return -EFAULT; 128 129 #ifndef CONFIG_ARC_HAS_LLSC 130 preempt_disable(); /* to guarantee atomic r-m-w of futex op */ 131 #endif 132 smp_mb(); 133 134 __asm__ __volatile__( 135 #ifdef CONFIG_ARC_HAS_LLSC 136 "1: llock %1, [%4] \n" 137 " brne %1, %2, 3f \n" 138 "2: scond %3, [%4] \n" 139 " bnz 1b \n" 140 #else 141 "1: ld %1, [%4] \n" 142 " brne %1, %2, 3f \n" 143 "2: st %3, [%4] \n" 144 #endif 145 "3: \n" 146 " .section .fixup,\"ax\" \n" 147 "4: mov %0, %5 \n" 148 " j 3b \n" 149 " .previous \n" 150 " .section __ex_table,\"a\" \n" 151 " .align 4 \n" 152 " .word 1b, 4b \n" 153 " .word 2b, 4b \n" 154 " .previous\n" 155 : "+&r"(ret), "=&r"(existval) 156 : "r"(expval), "r"(newval), "r"(uaddr), "ir"(-EFAULT) 157 : "cc", "memory"); 158 159 smp_mb(); 160 161 #ifndef CONFIG_ARC_HAS_LLSC 162 preempt_enable(); 163 #endif 164 *uval = existval; 165 return ret; 166 } 167 168 #endif 169