1 /* SPDX-License-Identifier: GPL-2.0-only */ 2 /* 3 * Based on arch/arm/include/asm/atomic.h 4 * 5 * Copyright (C) 1996 Russell King. 6 * Copyright (C) 2002 Deep Blue Solutions Ltd. 7 * Copyright (C) 2012 ARM Ltd. 8 */ 9 #ifndef __ASM_ATOMIC_H 10 #define __ASM_ATOMIC_H 11 12 #include <linux/compiler.h> 13 #include <linux/types.h> 14 15 #include <asm/barrier.h> 16 #include <asm/cmpxchg.h> 17 #include <asm/lse.h> 18 19 #define ATOMIC_OP(op) \ 20 static inline void arch_##op(int i, atomic_t *v) \ 21 { \ 22 __lse_ll_sc_body(op, i, v); \ 23 } 24 25 ATOMIC_OP(atomic_andnot) 26 ATOMIC_OP(atomic_or) 27 ATOMIC_OP(atomic_xor) 28 ATOMIC_OP(atomic_add) 29 ATOMIC_OP(atomic_and) 30 ATOMIC_OP(atomic_sub) 31 32 #undef ATOMIC_OP 33 34 #define ATOMIC_FETCH_OP(name, op) \ 35 static inline int arch_##op##name(int i, atomic_t *v) \ 36 { \ 37 return __lse_ll_sc_body(op##name, i, v); \ 38 } 39 40 #define ATOMIC_FETCH_OPS(op) \ 41 ATOMIC_FETCH_OP(_relaxed, op) \ 42 ATOMIC_FETCH_OP(_acquire, op) \ 43 ATOMIC_FETCH_OP(_release, op) \ 44 ATOMIC_FETCH_OP( , op) 45 46 ATOMIC_FETCH_OPS(atomic_fetch_andnot) 47 ATOMIC_FETCH_OPS(atomic_fetch_or) 48 ATOMIC_FETCH_OPS(atomic_fetch_xor) 49 ATOMIC_FETCH_OPS(atomic_fetch_add) 50 ATOMIC_FETCH_OPS(atomic_fetch_and) 51 ATOMIC_FETCH_OPS(atomic_fetch_sub) 52 ATOMIC_FETCH_OPS(atomic_add_return) 53 ATOMIC_FETCH_OPS(atomic_sub_return) 54 55 #undef ATOMIC_FETCH_OP 56 #undef ATOMIC_FETCH_OPS 57 58 #define ATOMIC64_OP(op) \ 59 static inline void arch_##op(long i, atomic64_t *v) \ 60 { \ 61 __lse_ll_sc_body(op, i, v); \ 62 } 63 64 ATOMIC64_OP(atomic64_andnot) 65 ATOMIC64_OP(atomic64_or) 66 ATOMIC64_OP(atomic64_xor) 67 ATOMIC64_OP(atomic64_add) 68 ATOMIC64_OP(atomic64_and) 69 ATOMIC64_OP(atomic64_sub) 70 71 #undef ATOMIC64_OP 72 73 #define ATOMIC64_FETCH_OP(name, op) \ 74 static inline long arch_##op##name(long i, atomic64_t *v) \ 75 { \ 76 return __lse_ll_sc_body(op##name, i, v); \ 77 } 78 79 #define ATOMIC64_FETCH_OPS(op) \ 80 ATOMIC64_FETCH_OP(_relaxed, op) \ 81 ATOMIC64_FETCH_OP(_acquire, op) \ 82 ATOMIC64_FETCH_OP(_release, op) \ 83 ATOMIC64_FETCH_OP( , op) 84 85 ATOMIC64_FETCH_OPS(atomic64_fetch_andnot) 86 ATOMIC64_FETCH_OPS(atomic64_fetch_or) 87 ATOMIC64_FETCH_OPS(atomic64_fetch_xor) 88 ATOMIC64_FETCH_OPS(atomic64_fetch_add) 89 ATOMIC64_FETCH_OPS(atomic64_fetch_and) 90 ATOMIC64_FETCH_OPS(atomic64_fetch_sub) 91 ATOMIC64_FETCH_OPS(atomic64_add_return) 92 ATOMIC64_FETCH_OPS(atomic64_sub_return) 93 94 #undef ATOMIC64_FETCH_OP 95 #undef ATOMIC64_FETCH_OPS 96 97 static inline long arch_atomic64_dec_if_positive(atomic64_t *v) 98 { 99 return __lse_ll_sc_body(atomic64_dec_if_positive, v); 100 } 101 102 #define arch_atomic_read(v) __READ_ONCE((v)->counter) 103 #define arch_atomic_set(v, i) __WRITE_ONCE(((v)->counter), (i)) 104 105 #define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed 106 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire 107 #define arch_atomic_add_return_release arch_atomic_add_return_release 108 #define arch_atomic_add_return arch_atomic_add_return 109 110 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed 111 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire 112 #define arch_atomic_sub_return_release arch_atomic_sub_return_release 113 #define arch_atomic_sub_return arch_atomic_sub_return 114 115 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed 116 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire 117 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release 118 #define arch_atomic_fetch_add arch_atomic_fetch_add 119 120 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed 121 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire 122 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release 123 #define arch_atomic_fetch_sub arch_atomic_fetch_sub 124 125 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed 126 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire 127 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release 128 #define arch_atomic_fetch_and arch_atomic_fetch_and 129 130 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed 131 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire 132 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release 133 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot 134 135 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed 136 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire 137 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release 138 #define arch_atomic_fetch_or arch_atomic_fetch_or 139 140 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed 141 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire 142 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release 143 #define arch_atomic_fetch_xor arch_atomic_fetch_xor 144 145 #define arch_atomic_xchg_relaxed(v, new) \ 146 arch_xchg_relaxed(&((v)->counter), (new)) 147 #define arch_atomic_xchg_acquire(v, new) \ 148 arch_xchg_acquire(&((v)->counter), (new)) 149 #define arch_atomic_xchg_release(v, new) \ 150 arch_xchg_release(&((v)->counter), (new)) 151 #define arch_atomic_xchg(v, new) \ 152 arch_xchg(&((v)->counter), (new)) 153 154 #define arch_atomic_cmpxchg_relaxed(v, old, new) \ 155 arch_cmpxchg_relaxed(&((v)->counter), (old), (new)) 156 #define arch_atomic_cmpxchg_acquire(v, old, new) \ 157 arch_cmpxchg_acquire(&((v)->counter), (old), (new)) 158 #define arch_atomic_cmpxchg_release(v, old, new) \ 159 arch_cmpxchg_release(&((v)->counter), (old), (new)) 160 #define arch_atomic_cmpxchg(v, old, new) \ 161 arch_cmpxchg(&((v)->counter), (old), (new)) 162 163 #define arch_atomic_andnot arch_atomic_andnot 164 165 /* 166 * 64-bit arch_atomic operations. 167 */ 168 #define ATOMIC64_INIT ATOMIC_INIT 169 #define arch_atomic64_read arch_atomic_read 170 #define arch_atomic64_set arch_atomic_set 171 172 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed 173 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire 174 #define arch_atomic64_add_return_release arch_atomic64_add_return_release 175 #define arch_atomic64_add_return arch_atomic64_add_return 176 177 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed 178 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire 179 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release 180 #define arch_atomic64_sub_return arch_atomic64_sub_return 181 182 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed 183 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire 184 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release 185 #define arch_atomic64_fetch_add arch_atomic64_fetch_add 186 187 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed 188 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire 189 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release 190 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub 191 192 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed 193 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire 194 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release 195 #define arch_atomic64_fetch_and arch_atomic64_fetch_and 196 197 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed 198 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire 199 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release 200 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot 201 202 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed 203 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire 204 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release 205 #define arch_atomic64_fetch_or arch_atomic64_fetch_or 206 207 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed 208 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire 209 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release 210 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor 211 212 #define arch_atomic64_xchg_relaxed arch_atomic_xchg_relaxed 213 #define arch_atomic64_xchg_acquire arch_atomic_xchg_acquire 214 #define arch_atomic64_xchg_release arch_atomic_xchg_release 215 #define arch_atomic64_xchg arch_atomic_xchg 216 217 #define arch_atomic64_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed 218 #define arch_atomic64_cmpxchg_acquire arch_atomic_cmpxchg_acquire 219 #define arch_atomic64_cmpxchg_release arch_atomic_cmpxchg_release 220 #define arch_atomic64_cmpxchg arch_atomic_cmpxchg 221 222 #define arch_atomic64_andnot arch_atomic64_andnot 223 224 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive 225 226 #define ARCH_ATOMIC 227 228 #endif /* __ASM_ATOMIC_H */ 229