xref: /openbmc/linux/arch/riscv/include/asm/cmpxchg.h (revision 06855063)
150acfb2bSThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */
2fab957c1SPalmer Dabbelt /*
3fab957c1SPalmer Dabbelt  * Copyright (C) 2014 Regents of the University of California
4fab957c1SPalmer Dabbelt  */
5fab957c1SPalmer Dabbelt 
6fab957c1SPalmer Dabbelt #ifndef _ASM_RISCV_CMPXCHG_H
7fab957c1SPalmer Dabbelt #define _ASM_RISCV_CMPXCHG_H
8fab957c1SPalmer Dabbelt 
9fab957c1SPalmer Dabbelt #include <linux/bug.h>
10fab957c1SPalmer Dabbelt 
11fab957c1SPalmer Dabbelt #include <asm/barrier.h>
125ce6c1f3SAndrea Parri #include <asm/fence.h>
13fab957c1SPalmer Dabbelt 
145ce6c1f3SAndrea Parri #define __xchg_relaxed(ptr, new, size)					\
15fab957c1SPalmer Dabbelt ({									\
16fab957c1SPalmer Dabbelt 	__typeof__(ptr) __ptr = (ptr);					\
17fab957c1SPalmer Dabbelt 	__typeof__(new) __new = (new);					\
18fab957c1SPalmer Dabbelt 	__typeof__(*(ptr)) __ret;					\
19fab957c1SPalmer Dabbelt 	switch (size) {							\
20fab957c1SPalmer Dabbelt 	case 4:								\
21fab957c1SPalmer Dabbelt 		__asm__ __volatile__ (					\
225ce6c1f3SAndrea Parri 			"	amoswap.w %0, %2, %1\n"			\
23fab957c1SPalmer Dabbelt 			: "=r" (__ret), "+A" (*__ptr)			\
24fab957c1SPalmer Dabbelt 			: "r" (__new)					\
25fab957c1SPalmer Dabbelt 			: "memory");					\
26fab957c1SPalmer Dabbelt 		break;							\
27fab957c1SPalmer Dabbelt 	case 8:								\
28fab957c1SPalmer Dabbelt 		__asm__ __volatile__ (					\
295ce6c1f3SAndrea Parri 			"	amoswap.d %0, %2, %1\n"			\
30fab957c1SPalmer Dabbelt 			: "=r" (__ret), "+A" (*__ptr)			\
31fab957c1SPalmer Dabbelt 			: "r" (__new)					\
32fab957c1SPalmer Dabbelt 			: "memory");					\
33fab957c1SPalmer Dabbelt 		break;							\
34fab957c1SPalmer Dabbelt 	default:							\
35fab957c1SPalmer Dabbelt 		BUILD_BUG();						\
36fab957c1SPalmer Dabbelt 	}								\
37fab957c1SPalmer Dabbelt 	__ret;								\
38fab957c1SPalmer Dabbelt })
39fab957c1SPalmer Dabbelt 
409efbb355SMark Rutland #define arch_xchg_relaxed(ptr, x)					\
415ce6c1f3SAndrea Parri ({									\
425ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _x_ = (x);					\
435ce6c1f3SAndrea Parri 	(__typeof__(*(ptr))) __xchg_relaxed((ptr),			\
445ce6c1f3SAndrea Parri 					    _x_, sizeof(*(ptr)));	\
455ce6c1f3SAndrea Parri })
465ce6c1f3SAndrea Parri 
475ce6c1f3SAndrea Parri #define __xchg_acquire(ptr, new, size)					\
485ce6c1f3SAndrea Parri ({									\
495ce6c1f3SAndrea Parri 	__typeof__(ptr) __ptr = (ptr);					\
505ce6c1f3SAndrea Parri 	__typeof__(new) __new = (new);					\
515ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __ret;					\
525ce6c1f3SAndrea Parri 	switch (size) {							\
535ce6c1f3SAndrea Parri 	case 4:								\
545ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
555ce6c1f3SAndrea Parri 			"	amoswap.w %0, %2, %1\n"			\
565ce6c1f3SAndrea Parri 			RISCV_ACQUIRE_BARRIER				\
575ce6c1f3SAndrea Parri 			: "=r" (__ret), "+A" (*__ptr)			\
585ce6c1f3SAndrea Parri 			: "r" (__new)					\
595ce6c1f3SAndrea Parri 			: "memory");					\
605ce6c1f3SAndrea Parri 		break;							\
615ce6c1f3SAndrea Parri 	case 8:								\
625ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
635ce6c1f3SAndrea Parri 			"	amoswap.d %0, %2, %1\n"			\
645ce6c1f3SAndrea Parri 			RISCV_ACQUIRE_BARRIER				\
655ce6c1f3SAndrea Parri 			: "=r" (__ret), "+A" (*__ptr)			\
665ce6c1f3SAndrea Parri 			: "r" (__new)					\
675ce6c1f3SAndrea Parri 			: "memory");					\
685ce6c1f3SAndrea Parri 		break;							\
695ce6c1f3SAndrea Parri 	default:							\
705ce6c1f3SAndrea Parri 		BUILD_BUG();						\
715ce6c1f3SAndrea Parri 	}								\
725ce6c1f3SAndrea Parri 	__ret;								\
735ce6c1f3SAndrea Parri })
745ce6c1f3SAndrea Parri 
759efbb355SMark Rutland #define arch_xchg_acquire(ptr, x)					\
765ce6c1f3SAndrea Parri ({									\
775ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _x_ = (x);					\
785ce6c1f3SAndrea Parri 	(__typeof__(*(ptr))) __xchg_acquire((ptr),			\
795ce6c1f3SAndrea Parri 					    _x_, sizeof(*(ptr)));	\
805ce6c1f3SAndrea Parri })
815ce6c1f3SAndrea Parri 
825ce6c1f3SAndrea Parri #define __xchg_release(ptr, new, size)					\
835ce6c1f3SAndrea Parri ({									\
845ce6c1f3SAndrea Parri 	__typeof__(ptr) __ptr = (ptr);					\
855ce6c1f3SAndrea Parri 	__typeof__(new) __new = (new);					\
865ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __ret;					\
875ce6c1f3SAndrea Parri 	switch (size) {							\
885ce6c1f3SAndrea Parri 	case 4:								\
895ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
905ce6c1f3SAndrea Parri 			RISCV_RELEASE_BARRIER				\
915ce6c1f3SAndrea Parri 			"	amoswap.w %0, %2, %1\n"			\
925ce6c1f3SAndrea Parri 			: "=r" (__ret), "+A" (*__ptr)			\
935ce6c1f3SAndrea Parri 			: "r" (__new)					\
945ce6c1f3SAndrea Parri 			: "memory");					\
955ce6c1f3SAndrea Parri 		break;							\
965ce6c1f3SAndrea Parri 	case 8:								\
975ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
985ce6c1f3SAndrea Parri 			RISCV_RELEASE_BARRIER				\
995ce6c1f3SAndrea Parri 			"	amoswap.d %0, %2, %1\n"			\
1005ce6c1f3SAndrea Parri 			: "=r" (__ret), "+A" (*__ptr)			\
1015ce6c1f3SAndrea Parri 			: "r" (__new)					\
1025ce6c1f3SAndrea Parri 			: "memory");					\
1035ce6c1f3SAndrea Parri 		break;							\
1045ce6c1f3SAndrea Parri 	default:							\
1055ce6c1f3SAndrea Parri 		BUILD_BUG();						\
1065ce6c1f3SAndrea Parri 	}								\
1075ce6c1f3SAndrea Parri 	__ret;								\
1085ce6c1f3SAndrea Parri })
1095ce6c1f3SAndrea Parri 
1109efbb355SMark Rutland #define arch_xchg_release(ptr, x)					\
1115ce6c1f3SAndrea Parri ({									\
1125ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _x_ = (x);					\
1135ce6c1f3SAndrea Parri 	(__typeof__(*(ptr))) __xchg_release((ptr),			\
1145ce6c1f3SAndrea Parri 					    _x_, sizeof(*(ptr)));	\
1155ce6c1f3SAndrea Parri })
1165ce6c1f3SAndrea Parri 
117*06855063SAndrzej Hajda #define __arch_xchg(ptr, new, size)					\
1185ce6c1f3SAndrea Parri ({									\
1195ce6c1f3SAndrea Parri 	__typeof__(ptr) __ptr = (ptr);					\
1205ce6c1f3SAndrea Parri 	__typeof__(new) __new = (new);					\
1215ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __ret;					\
1225ce6c1f3SAndrea Parri 	switch (size) {							\
1235ce6c1f3SAndrea Parri 	case 4:								\
1245ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
1255ce6c1f3SAndrea Parri 			"	amoswap.w.aqrl %0, %2, %1\n"		\
1265ce6c1f3SAndrea Parri 			: "=r" (__ret), "+A" (*__ptr)			\
1275ce6c1f3SAndrea Parri 			: "r" (__new)					\
1285ce6c1f3SAndrea Parri 			: "memory");					\
1295ce6c1f3SAndrea Parri 		break;							\
1305ce6c1f3SAndrea Parri 	case 8:								\
1315ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
1325ce6c1f3SAndrea Parri 			"	amoswap.d.aqrl %0, %2, %1\n"		\
1335ce6c1f3SAndrea Parri 			: "=r" (__ret), "+A" (*__ptr)			\
1345ce6c1f3SAndrea Parri 			: "r" (__new)					\
1355ce6c1f3SAndrea Parri 			: "memory");					\
1365ce6c1f3SAndrea Parri 		break;							\
1375ce6c1f3SAndrea Parri 	default:							\
1385ce6c1f3SAndrea Parri 		BUILD_BUG();						\
1395ce6c1f3SAndrea Parri 	}								\
1405ce6c1f3SAndrea Parri 	__ret;								\
1415ce6c1f3SAndrea Parri })
1425ce6c1f3SAndrea Parri 
1439efbb355SMark Rutland #define arch_xchg(ptr, x)						\
1445ce6c1f3SAndrea Parri ({									\
1455ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _x_ = (x);					\
146*06855063SAndrzej Hajda 	(__typeof__(*(ptr))) __arch_xchg((ptr), _x_, sizeof(*(ptr)));	\
1475ce6c1f3SAndrea Parri })
148fab957c1SPalmer Dabbelt 
149fab957c1SPalmer Dabbelt #define xchg32(ptr, x)							\
150fab957c1SPalmer Dabbelt ({									\
151fab957c1SPalmer Dabbelt 	BUILD_BUG_ON(sizeof(*(ptr)) != 4);				\
1529efbb355SMark Rutland 	arch_xchg((ptr), (x));						\
153fab957c1SPalmer Dabbelt })
154fab957c1SPalmer Dabbelt 
155fab957c1SPalmer Dabbelt #define xchg64(ptr, x)							\
156fab957c1SPalmer Dabbelt ({									\
157fab957c1SPalmer Dabbelt 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
1589efbb355SMark Rutland 	arch_xchg((ptr), (x));						\
159fab957c1SPalmer Dabbelt })
160fab957c1SPalmer Dabbelt 
161fab957c1SPalmer Dabbelt /*
162fab957c1SPalmer Dabbelt  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
163fab957c1SPalmer Dabbelt  * store NEW in MEM.  Return the initial value in MEM.  Success is
164fab957c1SPalmer Dabbelt  * indicated by comparing RETURN with OLD.
165fab957c1SPalmer Dabbelt  */
1665ce6c1f3SAndrea Parri #define __cmpxchg_relaxed(ptr, old, new, size)				\
167fab957c1SPalmer Dabbelt ({									\
168fab957c1SPalmer Dabbelt 	__typeof__(ptr) __ptr = (ptr);					\
169fab957c1SPalmer Dabbelt 	__typeof__(*(ptr)) __old = (old);				\
170fab957c1SPalmer Dabbelt 	__typeof__(*(ptr)) __new = (new);				\
171fab957c1SPalmer Dabbelt 	__typeof__(*(ptr)) __ret;					\
172fab957c1SPalmer Dabbelt 	register unsigned int __rc;					\
173fab957c1SPalmer Dabbelt 	switch (size) {							\
174fab957c1SPalmer Dabbelt 	case 4:								\
175fab957c1SPalmer Dabbelt 		__asm__ __volatile__ (					\
1765ce6c1f3SAndrea Parri 			"0:	lr.w %0, %2\n"				\
177fab957c1SPalmer Dabbelt 			"	bne  %0, %z3, 1f\n"			\
1785ce6c1f3SAndrea Parri 			"	sc.w %1, %z4, %2\n"			\
179fab957c1SPalmer Dabbelt 			"	bnez %1, 0b\n"				\
1805ce6c1f3SAndrea Parri 			"1:\n"						\
181fab957c1SPalmer Dabbelt 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
1826c58f25eSNathan Huckleberry 			: "rJ" ((long)__old), "rJ" (__new)		\
183fab957c1SPalmer Dabbelt 			: "memory");					\
184fab957c1SPalmer Dabbelt 		break;							\
185fab957c1SPalmer Dabbelt 	case 8:								\
186fab957c1SPalmer Dabbelt 		__asm__ __volatile__ (					\
1875ce6c1f3SAndrea Parri 			"0:	lr.d %0, %2\n"				\
188fab957c1SPalmer Dabbelt 			"	bne %0, %z3, 1f\n"			\
1895ce6c1f3SAndrea Parri 			"	sc.d %1, %z4, %2\n"			\
190fab957c1SPalmer Dabbelt 			"	bnez %1, 0b\n"				\
1915ce6c1f3SAndrea Parri 			"1:\n"						\
1925ce6c1f3SAndrea Parri 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
1935ce6c1f3SAndrea Parri 			: "rJ" (__old), "rJ" (__new)			\
1945ce6c1f3SAndrea Parri 			: "memory");					\
1955ce6c1f3SAndrea Parri 		break;							\
1965ce6c1f3SAndrea Parri 	default:							\
1975ce6c1f3SAndrea Parri 		BUILD_BUG();						\
1985ce6c1f3SAndrea Parri 	}								\
1995ce6c1f3SAndrea Parri 	__ret;								\
2005ce6c1f3SAndrea Parri })
2015ce6c1f3SAndrea Parri 
2029efbb355SMark Rutland #define arch_cmpxchg_relaxed(ptr, o, n)					\
2035ce6c1f3SAndrea Parri ({									\
2045ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _o_ = (o);					\
2055ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _n_ = (n);					\
2065ce6c1f3SAndrea Parri 	(__typeof__(*(ptr))) __cmpxchg_relaxed((ptr),			\
2075ce6c1f3SAndrea Parri 					_o_, _n_, sizeof(*(ptr)));	\
2085ce6c1f3SAndrea Parri })
2095ce6c1f3SAndrea Parri 
2105ce6c1f3SAndrea Parri #define __cmpxchg_acquire(ptr, old, new, size)				\
2115ce6c1f3SAndrea Parri ({									\
2125ce6c1f3SAndrea Parri 	__typeof__(ptr) __ptr = (ptr);					\
2135ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __old = (old);				\
2145ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __new = (new);				\
2155ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __ret;					\
2165ce6c1f3SAndrea Parri 	register unsigned int __rc;					\
2175ce6c1f3SAndrea Parri 	switch (size) {							\
2185ce6c1f3SAndrea Parri 	case 4:								\
2195ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
2205ce6c1f3SAndrea Parri 			"0:	lr.w %0, %2\n"				\
2215ce6c1f3SAndrea Parri 			"	bne  %0, %z3, 1f\n"			\
2225ce6c1f3SAndrea Parri 			"	sc.w %1, %z4, %2\n"			\
2235ce6c1f3SAndrea Parri 			"	bnez %1, 0b\n"				\
2245ce6c1f3SAndrea Parri 			RISCV_ACQUIRE_BARRIER				\
2255ce6c1f3SAndrea Parri 			"1:\n"						\
2265ce6c1f3SAndrea Parri 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
2276c58f25eSNathan Huckleberry 			: "rJ" ((long)__old), "rJ" (__new)		\
2285ce6c1f3SAndrea Parri 			: "memory");					\
2295ce6c1f3SAndrea Parri 		break;							\
2305ce6c1f3SAndrea Parri 	case 8:								\
2315ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
2325ce6c1f3SAndrea Parri 			"0:	lr.d %0, %2\n"				\
2335ce6c1f3SAndrea Parri 			"	bne %0, %z3, 1f\n"			\
2345ce6c1f3SAndrea Parri 			"	sc.d %1, %z4, %2\n"			\
2355ce6c1f3SAndrea Parri 			"	bnez %1, 0b\n"				\
2365ce6c1f3SAndrea Parri 			RISCV_ACQUIRE_BARRIER				\
2375ce6c1f3SAndrea Parri 			"1:\n"						\
2385ce6c1f3SAndrea Parri 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
2395ce6c1f3SAndrea Parri 			: "rJ" (__old), "rJ" (__new)			\
2405ce6c1f3SAndrea Parri 			: "memory");					\
2415ce6c1f3SAndrea Parri 		break;							\
2425ce6c1f3SAndrea Parri 	default:							\
2435ce6c1f3SAndrea Parri 		BUILD_BUG();						\
2445ce6c1f3SAndrea Parri 	}								\
2455ce6c1f3SAndrea Parri 	__ret;								\
2465ce6c1f3SAndrea Parri })
2475ce6c1f3SAndrea Parri 
2489efbb355SMark Rutland #define arch_cmpxchg_acquire(ptr, o, n)					\
2495ce6c1f3SAndrea Parri ({									\
2505ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _o_ = (o);					\
2515ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _n_ = (n);					\
2525ce6c1f3SAndrea Parri 	(__typeof__(*(ptr))) __cmpxchg_acquire((ptr),			\
2535ce6c1f3SAndrea Parri 					_o_, _n_, sizeof(*(ptr)));	\
2545ce6c1f3SAndrea Parri })
2555ce6c1f3SAndrea Parri 
2565ce6c1f3SAndrea Parri #define __cmpxchg_release(ptr, old, new, size)				\
2575ce6c1f3SAndrea Parri ({									\
2585ce6c1f3SAndrea Parri 	__typeof__(ptr) __ptr = (ptr);					\
2595ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __old = (old);				\
2605ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __new = (new);				\
2615ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __ret;					\
2625ce6c1f3SAndrea Parri 	register unsigned int __rc;					\
2635ce6c1f3SAndrea Parri 	switch (size) {							\
2645ce6c1f3SAndrea Parri 	case 4:								\
2655ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
2665ce6c1f3SAndrea Parri 			RISCV_RELEASE_BARRIER				\
2675ce6c1f3SAndrea Parri 			"0:	lr.w %0, %2\n"				\
2685ce6c1f3SAndrea Parri 			"	bne  %0, %z3, 1f\n"			\
2695ce6c1f3SAndrea Parri 			"	sc.w %1, %z4, %2\n"			\
2705ce6c1f3SAndrea Parri 			"	bnez %1, 0b\n"				\
2715ce6c1f3SAndrea Parri 			"1:\n"						\
2725ce6c1f3SAndrea Parri 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
2736c58f25eSNathan Huckleberry 			: "rJ" ((long)__old), "rJ" (__new)		\
2745ce6c1f3SAndrea Parri 			: "memory");					\
2755ce6c1f3SAndrea Parri 		break;							\
2765ce6c1f3SAndrea Parri 	case 8:								\
2775ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
2785ce6c1f3SAndrea Parri 			RISCV_RELEASE_BARRIER				\
2795ce6c1f3SAndrea Parri 			"0:	lr.d %0, %2\n"				\
2805ce6c1f3SAndrea Parri 			"	bne %0, %z3, 1f\n"			\
2815ce6c1f3SAndrea Parri 			"	sc.d %1, %z4, %2\n"			\
2825ce6c1f3SAndrea Parri 			"	bnez %1, 0b\n"				\
2835ce6c1f3SAndrea Parri 			"1:\n"						\
2845ce6c1f3SAndrea Parri 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
2855ce6c1f3SAndrea Parri 			: "rJ" (__old), "rJ" (__new)			\
2865ce6c1f3SAndrea Parri 			: "memory");					\
2875ce6c1f3SAndrea Parri 		break;							\
2885ce6c1f3SAndrea Parri 	default:							\
2895ce6c1f3SAndrea Parri 		BUILD_BUG();						\
2905ce6c1f3SAndrea Parri 	}								\
2915ce6c1f3SAndrea Parri 	__ret;								\
2925ce6c1f3SAndrea Parri })
2935ce6c1f3SAndrea Parri 
2949efbb355SMark Rutland #define arch_cmpxchg_release(ptr, o, n)					\
2955ce6c1f3SAndrea Parri ({									\
2965ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _o_ = (o);					\
2975ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _n_ = (n);					\
2985ce6c1f3SAndrea Parri 	(__typeof__(*(ptr))) __cmpxchg_release((ptr),			\
2995ce6c1f3SAndrea Parri 					_o_, _n_, sizeof(*(ptr)));	\
3005ce6c1f3SAndrea Parri })
3015ce6c1f3SAndrea Parri 
3025ce6c1f3SAndrea Parri #define __cmpxchg(ptr, old, new, size)					\
3035ce6c1f3SAndrea Parri ({									\
3045ce6c1f3SAndrea Parri 	__typeof__(ptr) __ptr = (ptr);					\
3055ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __old = (old);				\
3065ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __new = (new);				\
3075ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) __ret;					\
3085ce6c1f3SAndrea Parri 	register unsigned int __rc;					\
3095ce6c1f3SAndrea Parri 	switch (size) {							\
3105ce6c1f3SAndrea Parri 	case 4:								\
3115ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
3125ce6c1f3SAndrea Parri 			"0:	lr.w %0, %2\n"				\
3135ce6c1f3SAndrea Parri 			"	bne  %0, %z3, 1f\n"			\
3145ce6c1f3SAndrea Parri 			"	sc.w.rl %1, %z4, %2\n"			\
3155ce6c1f3SAndrea Parri 			"	bnez %1, 0b\n"				\
3165ce6c1f3SAndrea Parri 			"	fence rw, rw\n"				\
3175ce6c1f3SAndrea Parri 			"1:\n"						\
3185ce6c1f3SAndrea Parri 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
3196c58f25eSNathan Huckleberry 			: "rJ" ((long)__old), "rJ" (__new)		\
3205ce6c1f3SAndrea Parri 			: "memory");					\
3215ce6c1f3SAndrea Parri 		break;							\
3225ce6c1f3SAndrea Parri 	case 8:								\
3235ce6c1f3SAndrea Parri 		__asm__ __volatile__ (					\
3245ce6c1f3SAndrea Parri 			"0:	lr.d %0, %2\n"				\
3255ce6c1f3SAndrea Parri 			"	bne %0, %z3, 1f\n"			\
3265ce6c1f3SAndrea Parri 			"	sc.d.rl %1, %z4, %2\n"			\
3275ce6c1f3SAndrea Parri 			"	bnez %1, 0b\n"				\
3285ce6c1f3SAndrea Parri 			"	fence rw, rw\n"				\
3295ce6c1f3SAndrea Parri 			"1:\n"						\
330fab957c1SPalmer Dabbelt 			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
331fab957c1SPalmer Dabbelt 			: "rJ" (__old), "rJ" (__new)			\
332fab957c1SPalmer Dabbelt 			: "memory");					\
333fab957c1SPalmer Dabbelt 		break;							\
334fab957c1SPalmer Dabbelt 	default:							\
335fab957c1SPalmer Dabbelt 		BUILD_BUG();						\
336fab957c1SPalmer Dabbelt 	}								\
337fab957c1SPalmer Dabbelt 	__ret;								\
338fab957c1SPalmer Dabbelt })
339fab957c1SPalmer Dabbelt 
3409efbb355SMark Rutland #define arch_cmpxchg(ptr, o, n)						\
3415ce6c1f3SAndrea Parri ({									\
3425ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _o_ = (o);					\
3435ce6c1f3SAndrea Parri 	__typeof__(*(ptr)) _n_ = (n);					\
3445ce6c1f3SAndrea Parri 	(__typeof__(*(ptr))) __cmpxchg((ptr),				\
3455ce6c1f3SAndrea Parri 				       _o_, _n_, sizeof(*(ptr)));	\
3465ce6c1f3SAndrea Parri })
347fab957c1SPalmer Dabbelt 
3489efbb355SMark Rutland #define arch_cmpxchg_local(ptr, o, n)					\
3495ce6c1f3SAndrea Parri 	(__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
350fab957c1SPalmer Dabbelt 
3519efbb355SMark Rutland #define arch_cmpxchg64(ptr, o, n)					\
352fab957c1SPalmer Dabbelt ({									\
353fab957c1SPalmer Dabbelt 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
3549efbb355SMark Rutland 	arch_cmpxchg((ptr), (o), (n));					\
355fab957c1SPalmer Dabbelt })
356fab957c1SPalmer Dabbelt 
3579efbb355SMark Rutland #define arch_cmpxchg64_local(ptr, o, n)					\
358fab957c1SPalmer Dabbelt ({									\
359fab957c1SPalmer Dabbelt 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
3609efbb355SMark Rutland 	arch_cmpxchg_relaxed((ptr), (o), (n));				\
361fab957c1SPalmer Dabbelt })
362fab957c1SPalmer Dabbelt 
363fab957c1SPalmer Dabbelt #endif /* _ASM_RISCV_CMPXCHG_H */
364