1caab277bSThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */
2c0385b24SWill Deacon /*
3c0385b24SWill Deacon * Based on arch/arm/include/asm/atomic.h
4c0385b24SWill Deacon *
5c0385b24SWill Deacon * Copyright (C) 1996 Russell King.
6c0385b24SWill Deacon * Copyright (C) 2002 Deep Blue Solutions Ltd.
7c0385b24SWill Deacon * Copyright (C) 2012 ARM Ltd.
8c0385b24SWill Deacon */
9c0385b24SWill Deacon
10c0385b24SWill Deacon #ifndef __ASM_ATOMIC_LSE_H
11c0385b24SWill Deacon #define __ASM_ATOMIC_LSE_H
12c0385b24SWill Deacon
136822a84dSWill Deacon #define ATOMIC_OP(op, asm_op) \
1478f6f5c9SMark Rutland static __always_inline void \
1578f6f5c9SMark Rutland __lse_atomic_##op(int i, atomic_t *v) \
166822a84dSWill Deacon { \
17addfc386SAndrew Murray asm volatile( \
18e0d5896bSSami Tolvanen __LSE_PREAMBLE \
19addfc386SAndrew Murray " " #asm_op " %w[i], %[v]\n" \
208a578a75SMark Rutland : [v] "+Q" (v->counter) \
218a578a75SMark Rutland : [i] "r" (i)); \
22c0385b24SWill Deacon }
23c0385b24SWill Deacon
ATOMIC_OP(andnot,stclr)246822a84dSWill Deacon ATOMIC_OP(andnot, stclr)
256822a84dSWill Deacon ATOMIC_OP(or, stset)
266822a84dSWill Deacon ATOMIC_OP(xor, steor)
276822a84dSWill Deacon ATOMIC_OP(add, stadd)
28c0385b24SWill Deacon
2978f6f5c9SMark Rutland static __always_inline void __lse_atomic_sub(int i, atomic_t *v)
30ef532450SMark Rutland {
31ef532450SMark Rutland __lse_atomic_add(-i, v);
32ef532450SMark Rutland }
33ef532450SMark Rutland
346822a84dSWill Deacon #undef ATOMIC_OP
35c09d6a04SWill Deacon
362efe95feSWill Deacon #define ATOMIC_FETCH_OP(name, mb, op, asm_op, cl...) \
3778f6f5c9SMark Rutland static __always_inline int \
3878f6f5c9SMark Rutland __lse_atomic_fetch_##op##name(int i, atomic_t *v) \
392efe95feSWill Deacon { \
408a578a75SMark Rutland int old; \
418a578a75SMark Rutland \
42addfc386SAndrew Murray asm volatile( \
43e0d5896bSSami Tolvanen __LSE_PREAMBLE \
448a578a75SMark Rutland " " #asm_op #mb " %w[i], %w[old], %[v]" \
458a578a75SMark Rutland : [v] "+Q" (v->counter), \
468a578a75SMark Rutland [old] "=r" (old) \
478a578a75SMark Rutland : [i] "r" (i) \
48addfc386SAndrew Murray : cl); \
492efe95feSWill Deacon \
508a578a75SMark Rutland return old; \
512efe95feSWill Deacon }
522efe95feSWill Deacon
532efe95feSWill Deacon #define ATOMIC_FETCH_OPS(op, asm_op) \
542efe95feSWill Deacon ATOMIC_FETCH_OP(_relaxed, , op, asm_op) \
552efe95feSWill Deacon ATOMIC_FETCH_OP(_acquire, a, op, asm_op, "memory") \
562efe95feSWill Deacon ATOMIC_FETCH_OP(_release, l, op, asm_op, "memory") \
572efe95feSWill Deacon ATOMIC_FETCH_OP( , al, op, asm_op, "memory")
582efe95feSWill Deacon
ATOMIC_FETCH_OPS(andnot,ldclr)592efe95feSWill Deacon ATOMIC_FETCH_OPS(andnot, ldclr)
602efe95feSWill Deacon ATOMIC_FETCH_OPS(or, ldset)
612efe95feSWill Deacon ATOMIC_FETCH_OPS(xor, ldeor)
622efe95feSWill Deacon ATOMIC_FETCH_OPS(add, ldadd)
632efe95feSWill Deacon
642efe95feSWill Deacon #undef ATOMIC_FETCH_OP
652efe95feSWill Deacon #undef ATOMIC_FETCH_OPS
662efe95feSWill Deacon
67ef532450SMark Rutland #define ATOMIC_FETCH_OP_SUB(name) \
6878f6f5c9SMark Rutland static __always_inline int \
6978f6f5c9SMark Rutland __lse_atomic_fetch_sub##name(int i, atomic_t *v) \
70ef532450SMark Rutland { \
71ef532450SMark Rutland return __lse_atomic_fetch_add##name(-i, v); \
72ef532450SMark Rutland }
73ef532450SMark Rutland
74ef532450SMark Rutland ATOMIC_FETCH_OP_SUB(_relaxed)
75ef532450SMark Rutland ATOMIC_FETCH_OP_SUB(_acquire)
76ef532450SMark Rutland ATOMIC_FETCH_OP_SUB(_release)
77ef532450SMark Rutland ATOMIC_FETCH_OP_SUB( )
78ef532450SMark Rutland
79ef532450SMark Rutland #undef ATOMIC_FETCH_OP_SUB
80ef532450SMark Rutland
81053f58baSMark Rutland #define ATOMIC_OP_ADD_SUB_RETURN(name) \
8278f6f5c9SMark Rutland static __always_inline int \
8378f6f5c9SMark Rutland __lse_atomic_add_return##name(int i, atomic_t *v) \
84305d454aSWill Deacon { \
85053f58baSMark Rutland return __lse_atomic_fetch_add##name(i, v) + i; \
86ef532450SMark Rutland } \
87ef532450SMark Rutland \
8878f6f5c9SMark Rutland static __always_inline int \
8978f6f5c9SMark Rutland __lse_atomic_sub_return##name(int i, atomic_t *v) \
90ef532450SMark Rutland { \
91053f58baSMark Rutland return __lse_atomic_fetch_sub(i, v) - i; \
92c09d6a04SWill Deacon }
93c09d6a04SWill Deacon
94053f58baSMark Rutland ATOMIC_OP_ADD_SUB_RETURN(_relaxed)
95053f58baSMark Rutland ATOMIC_OP_ADD_SUB_RETURN(_acquire)
96053f58baSMark Rutland ATOMIC_OP_ADD_SUB_RETURN(_release)
97053f58baSMark Rutland ATOMIC_OP_ADD_SUB_RETURN( )
98305d454aSWill Deacon
99ef532450SMark Rutland #undef ATOMIC_OP_ADD_SUB_RETURN
100305d454aSWill Deacon
10178f6f5c9SMark Rutland static __always_inline void __lse_atomic_and(int i, atomic_t *v)
102c09d6a04SWill Deacon {
1035e9e43c9SMark Rutland return __lse_atomic_andnot(~i, v);
104c09d6a04SWill Deacon }
105c09d6a04SWill Deacon
1062efe95feSWill Deacon #define ATOMIC_FETCH_OP_AND(name, mb, cl...) \
10778f6f5c9SMark Rutland static __always_inline int \
10878f6f5c9SMark Rutland __lse_atomic_fetch_and##name(int i, atomic_t *v) \
1092efe95feSWill Deacon { \
1105e9e43c9SMark Rutland return __lse_atomic_fetch_andnot##name(~i, v); \
1112efe95feSWill Deacon }
1122efe95feSWill Deacon
1132efe95feSWill Deacon ATOMIC_FETCH_OP_AND(_relaxed, )
1142efe95feSWill Deacon ATOMIC_FETCH_OP_AND(_acquire, a, "memory")
1152efe95feSWill Deacon ATOMIC_FETCH_OP_AND(_release, l, "memory")
1162efe95feSWill Deacon ATOMIC_FETCH_OP_AND( , al, "memory")
1172efe95feSWill Deacon
1182efe95feSWill Deacon #undef ATOMIC_FETCH_OP_AND
1192efe95feSWill Deacon
1206822a84dSWill Deacon #define ATOMIC64_OP(op, asm_op) \
12178f6f5c9SMark Rutland static __always_inline void \
12278f6f5c9SMark Rutland __lse_atomic64_##op(s64 i, atomic64_t *v) \
1236822a84dSWill Deacon { \
124addfc386SAndrew Murray asm volatile( \
125e0d5896bSSami Tolvanen __LSE_PREAMBLE \
126addfc386SAndrew Murray " " #asm_op " %[i], %[v]\n" \
1278a578a75SMark Rutland : [v] "+Q" (v->counter) \
1288a578a75SMark Rutland : [i] "r" (i)); \
129c0385b24SWill Deacon }
130c0385b24SWill Deacon
ATOMIC64_OP(andnot,stclr)1316822a84dSWill Deacon ATOMIC64_OP(andnot, stclr)
1326822a84dSWill Deacon ATOMIC64_OP(or, stset)
1336822a84dSWill Deacon ATOMIC64_OP(xor, steor)
1346822a84dSWill Deacon ATOMIC64_OP(add, stadd)
135c0385b24SWill Deacon
13678f6f5c9SMark Rutland static __always_inline void __lse_atomic64_sub(s64 i, atomic64_t *v)
137ef532450SMark Rutland {
138ef532450SMark Rutland __lse_atomic64_add(-i, v);
139ef532450SMark Rutland }
140ef532450SMark Rutland
1416822a84dSWill Deacon #undef ATOMIC64_OP
142c09d6a04SWill Deacon
1432efe95feSWill Deacon #define ATOMIC64_FETCH_OP(name, mb, op, asm_op, cl...) \
14478f6f5c9SMark Rutland static __always_inline long \
14578f6f5c9SMark Rutland __lse_atomic64_fetch_##op##name(s64 i, atomic64_t *v) \
1462efe95feSWill Deacon { \
1478a578a75SMark Rutland s64 old; \
1488a578a75SMark Rutland \
149addfc386SAndrew Murray asm volatile( \
150e0d5896bSSami Tolvanen __LSE_PREAMBLE \
1518a578a75SMark Rutland " " #asm_op #mb " %[i], %[old], %[v]" \
1528a578a75SMark Rutland : [v] "+Q" (v->counter), \
1538a578a75SMark Rutland [old] "=r" (old) \
1548a578a75SMark Rutland : [i] "r" (i) \
155addfc386SAndrew Murray : cl); \
1562efe95feSWill Deacon \
1578a578a75SMark Rutland return old; \
1582efe95feSWill Deacon }
1592efe95feSWill Deacon
1602efe95feSWill Deacon #define ATOMIC64_FETCH_OPS(op, asm_op) \
1612efe95feSWill Deacon ATOMIC64_FETCH_OP(_relaxed, , op, asm_op) \
1622efe95feSWill Deacon ATOMIC64_FETCH_OP(_acquire, a, op, asm_op, "memory") \
1632efe95feSWill Deacon ATOMIC64_FETCH_OP(_release, l, op, asm_op, "memory") \
1642efe95feSWill Deacon ATOMIC64_FETCH_OP( , al, op, asm_op, "memory")
1652efe95feSWill Deacon
ATOMIC64_FETCH_OPS(andnot,ldclr)1662efe95feSWill Deacon ATOMIC64_FETCH_OPS(andnot, ldclr)
1672efe95feSWill Deacon ATOMIC64_FETCH_OPS(or, ldset)
1682efe95feSWill Deacon ATOMIC64_FETCH_OPS(xor, ldeor)
1692efe95feSWill Deacon ATOMIC64_FETCH_OPS(add, ldadd)
1702efe95feSWill Deacon
1712efe95feSWill Deacon #undef ATOMIC64_FETCH_OP
1722efe95feSWill Deacon #undef ATOMIC64_FETCH_OPS
1732efe95feSWill Deacon
174ef532450SMark Rutland #define ATOMIC64_FETCH_OP_SUB(name) \
17578f6f5c9SMark Rutland static __always_inline long \
17678f6f5c9SMark Rutland __lse_atomic64_fetch_sub##name(s64 i, atomic64_t *v) \
177ef532450SMark Rutland { \
178ef532450SMark Rutland return __lse_atomic64_fetch_add##name(-i, v); \
179ef532450SMark Rutland }
180ef532450SMark Rutland
181ef532450SMark Rutland ATOMIC64_FETCH_OP_SUB(_relaxed)
182ef532450SMark Rutland ATOMIC64_FETCH_OP_SUB(_acquire)
183ef532450SMark Rutland ATOMIC64_FETCH_OP_SUB(_release)
184ef532450SMark Rutland ATOMIC64_FETCH_OP_SUB( )
185ef532450SMark Rutland
186ef532450SMark Rutland #undef ATOMIC64_FETCH_OP_SUB
187ef532450SMark Rutland
188053f58baSMark Rutland #define ATOMIC64_OP_ADD_SUB_RETURN(name) \
18978f6f5c9SMark Rutland static __always_inline long \
19078f6f5c9SMark Rutland __lse_atomic64_add_return##name(s64 i, atomic64_t *v) \
191305d454aSWill Deacon { \
192053f58baSMark Rutland return __lse_atomic64_fetch_add##name(i, v) + i; \
193ef532450SMark Rutland } \
194ef532450SMark Rutland \
19578f6f5c9SMark Rutland static __always_inline long \
19678f6f5c9SMark Rutland __lse_atomic64_sub_return##name(s64 i, atomic64_t *v) \
197ef532450SMark Rutland { \
198053f58baSMark Rutland return __lse_atomic64_fetch_sub##name(i, v) - i; \
199c09d6a04SWill Deacon }
200c09d6a04SWill Deacon
201053f58baSMark Rutland ATOMIC64_OP_ADD_SUB_RETURN(_relaxed)
202053f58baSMark Rutland ATOMIC64_OP_ADD_SUB_RETURN(_acquire)
203053f58baSMark Rutland ATOMIC64_OP_ADD_SUB_RETURN(_release)
204053f58baSMark Rutland ATOMIC64_OP_ADD_SUB_RETURN( )
205305d454aSWill Deacon
206ef532450SMark Rutland #undef ATOMIC64_OP_ADD_SUB_RETURN
207305d454aSWill Deacon
20878f6f5c9SMark Rutland static __always_inline void __lse_atomic64_and(s64 i, atomic64_t *v)
209c09d6a04SWill Deacon {
2105e9e43c9SMark Rutland return __lse_atomic64_andnot(~i, v);
211c09d6a04SWill Deacon }
212c09d6a04SWill Deacon
2132efe95feSWill Deacon #define ATOMIC64_FETCH_OP_AND(name, mb, cl...) \
21478f6f5c9SMark Rutland static __always_inline long \
21578f6f5c9SMark Rutland __lse_atomic64_fetch_and##name(s64 i, atomic64_t *v) \
2162efe95feSWill Deacon { \
2175e9e43c9SMark Rutland return __lse_atomic64_fetch_andnot##name(~i, v); \
2182efe95feSWill Deacon }
2192efe95feSWill Deacon
2202efe95feSWill Deacon ATOMIC64_FETCH_OP_AND(_relaxed, )
2212efe95feSWill Deacon ATOMIC64_FETCH_OP_AND(_acquire, a, "memory")
2222efe95feSWill Deacon ATOMIC64_FETCH_OP_AND(_release, l, "memory")
2232efe95feSWill Deacon ATOMIC64_FETCH_OP_AND( , al, "memory")
2242efe95feSWill Deacon
2252efe95feSWill Deacon #undef ATOMIC64_FETCH_OP_AND
2262efe95feSWill Deacon
__lse_atomic64_dec_if_positive(atomic64_t * v)22778f6f5c9SMark Rutland static __always_inline s64 __lse_atomic64_dec_if_positive(atomic64_t *v)
228c0385b24SWill Deacon {
2293337cb5aSAndrew Murray unsigned long tmp;
2303337cb5aSAndrew Murray
231addfc386SAndrew Murray asm volatile(
232e0d5896bSSami Tolvanen __LSE_PREAMBLE
2333337cb5aSAndrew Murray "1: ldr %x[tmp], %[v]\n"
2343337cb5aSAndrew Murray " subs %[ret], %x[tmp], #1\n"
235db26217eSWill Deacon " b.lt 2f\n"
2363337cb5aSAndrew Murray " casal %x[tmp], %[ret], %[v]\n"
2373337cb5aSAndrew Murray " sub %x[tmp], %x[tmp], #1\n"
2383337cb5aSAndrew Murray " sub %x[tmp], %x[tmp], %[ret]\n"
2393337cb5aSAndrew Murray " cbnz %x[tmp], 1b\n"
240addfc386SAndrew Murray "2:"
2413337cb5aSAndrew Murray : [ret] "+&r" (v), [v] "+Q" (v->counter), [tmp] "=&r" (tmp)
242c0385b24SWill Deacon :
2433337cb5aSAndrew Murray : "cc", "memory");
244c0385b24SWill Deacon
245addfc386SAndrew Murray return (long)v;
246c0385b24SWill Deacon }
247c0385b24SWill Deacon
2485ef3fe4cSWill Deacon #define __CMPXCHG_CASE(w, sfx, name, sz, mb, cl...) \
249a48e61deSWill Deacon static __always_inline u##sz \
250a48e61deSWill Deacon __lse__cmpxchg_case_##name##sz(volatile void *ptr, \
251b4f9209bSWill Deacon u##sz old, \
2525ef3fe4cSWill Deacon u##sz new) \
253c342f782SWill Deacon { \
254addfc386SAndrew Murray asm volatile( \
255e0d5896bSSami Tolvanen __LSE_PREAMBLE \
256*e5cacb54SMark Rutland " cas" #mb #sfx " %" #w "[old], %" #w "[new], %[v]\n" \
257*e5cacb54SMark Rutland : [v] "+Q" (*(u##sz *)ptr), \
258*e5cacb54SMark Rutland [old] "+r" (old) \
259*e5cacb54SMark Rutland : [new] "rZ" (new) \
2603337cb5aSAndrew Murray : cl); \
261c342f782SWill Deacon \
262*e5cacb54SMark Rutland return old; \
263c342f782SWill Deacon }
264c342f782SWill Deacon
2655ef3fe4cSWill Deacon __CMPXCHG_CASE(w, b, , 8, )
2665ef3fe4cSWill Deacon __CMPXCHG_CASE(w, h, , 16, )
2675ef3fe4cSWill Deacon __CMPXCHG_CASE(w, , , 32, )
2685ef3fe4cSWill Deacon __CMPXCHG_CASE(x, , , 64, )
2695ef3fe4cSWill Deacon __CMPXCHG_CASE(w, b, acq_, 8, a, "memory")
2705ef3fe4cSWill Deacon __CMPXCHG_CASE(w, h, acq_, 16, a, "memory")
2715ef3fe4cSWill Deacon __CMPXCHG_CASE(w, , acq_, 32, a, "memory")
2725ef3fe4cSWill Deacon __CMPXCHG_CASE(x, , acq_, 64, a, "memory")
2735ef3fe4cSWill Deacon __CMPXCHG_CASE(w, b, rel_, 8, l, "memory")
2745ef3fe4cSWill Deacon __CMPXCHG_CASE(w, h, rel_, 16, l, "memory")
2755ef3fe4cSWill Deacon __CMPXCHG_CASE(w, , rel_, 32, l, "memory")
2765ef3fe4cSWill Deacon __CMPXCHG_CASE(x, , rel_, 64, l, "memory")
2775ef3fe4cSWill Deacon __CMPXCHG_CASE(w, b, mb_, 8, al, "memory")
2785ef3fe4cSWill Deacon __CMPXCHG_CASE(w, h, mb_, 16, al, "memory")
2795ef3fe4cSWill Deacon __CMPXCHG_CASE(w, , mb_, 32, al, "memory")
2805ef3fe4cSWill Deacon __CMPXCHG_CASE(x, , mb_, 64, al, "memory")
281c342f782SWill Deacon
282c342f782SWill Deacon #undef __CMPXCHG_CASE
283c342f782SWill Deacon
284e9a4b795SWill Deacon #define __CMPXCHG128(name, mb, cl...) \
285a48e61deSWill Deacon static __always_inline u128 \
286a48e61deSWill Deacon __lse__cmpxchg128##name(volatile u128 *ptr, u128 old, u128 new) \
287e9a4b795SWill Deacon { \
288e9a4b795SWill Deacon union __u128_halves r, o = { .full = (old) }, \
289e9a4b795SWill Deacon n = { .full = (new) }; \
290e9a4b795SWill Deacon register unsigned long x0 asm ("x0") = o.low; \
291e9a4b795SWill Deacon register unsigned long x1 asm ("x1") = o.high; \
292e9a4b795SWill Deacon register unsigned long x2 asm ("x2") = n.low; \
293e9a4b795SWill Deacon register unsigned long x3 asm ("x3") = n.high; \
294e9a4b795SWill Deacon register unsigned long x4 asm ("x4") = (unsigned long)ptr; \
295e9a4b795SWill Deacon \
296e9a4b795SWill Deacon asm volatile( \
297e9a4b795SWill Deacon __LSE_PREAMBLE \
298e9a4b795SWill Deacon " casp" #mb "\t%[old1], %[old2], %[new1], %[new2], %[v]\n"\
299e9a4b795SWill Deacon : [old1] "+&r" (x0), [old2] "+&r" (x1), \
300addfc386SAndrew Murray [v] "+Q" (*(u128 *)ptr) \
301e0d5896bSSami Tolvanen : [new1] "r" (x2), [new2] "r" (x3), [ptr] "r" (x4), \
302e9a4b795SWill Deacon [oldval1] "r" (o.low), [oldval2] "r" (o.high) \
303e9a4b795SWill Deacon : cl); \
304e9a4b795SWill Deacon \
305addfc386SAndrew Murray r.low = x0; r.high = x1; \
30632c3fa7cSWill Deacon \
307031af500SMark Rutland return r.full; \
308e9a4b795SWill Deacon }
309e9a4b795SWill Deacon
310addfc386SAndrew Murray __CMPXCHG128( , )
311e9a4b795SWill Deacon __CMPXCHG128(_mb, al, "memory")
312e9a4b795SWill Deacon
313e9a4b795SWill Deacon #undef __CMPXCHG128
314e9a4b795SWill Deacon
315e9a4b795SWill Deacon #endif /* __ASM_ATOMIC_LSE_H */
316e9a4b795SWill Deacon