xref: /openbmc/linux/arch/sh/include/asm/cmpxchg-llsc.h (revision ee43a844)
1ee43a844SPaul Mundt #ifndef __ASM_SH_CMPXCHG_LLSC_H
2ee43a844SPaul Mundt #define __ASM_SH_CMPXCHG_LLSC_H
3ee43a844SPaul Mundt 
4ee43a844SPaul Mundt static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
5ee43a844SPaul Mundt {
6ee43a844SPaul Mundt 	unsigned long retval;
7ee43a844SPaul Mundt 	unsigned long tmp;
8ee43a844SPaul Mundt 
9ee43a844SPaul Mundt 	__asm__ __volatile__ (
10ee43a844SPaul Mundt 		"1:					\n\t"
11ee43a844SPaul Mundt 		"movli.l	@%1, %0	! xchg_u32	\n\t"
12ee43a844SPaul Mundt 		"mov		%0, %2			\n\t"
13ee43a844SPaul Mundt 		"mov		%4, %0			\n\t"
14ee43a844SPaul Mundt 		"movco.l	%0, @%1			\n\t"
15ee43a844SPaul Mundt 		"bf		1b			\n\t"
16ee43a844SPaul Mundt 		"synco					\n\t"
17ee43a844SPaul Mundt 		: "=&z"(tmp), "=r" (m), "=&r" (retval)
18ee43a844SPaul Mundt 		: "1" (m), "r" (val)
19ee43a844SPaul Mundt 		: "t", "memory"
20ee43a844SPaul Mundt 	);
21ee43a844SPaul Mundt 
22ee43a844SPaul Mundt 	return retval;
23ee43a844SPaul Mundt }
24ee43a844SPaul Mundt 
25ee43a844SPaul Mundt static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
26ee43a844SPaul Mundt {
27ee43a844SPaul Mundt 	unsigned long retval;
28ee43a844SPaul Mundt 	unsigned long tmp;
29ee43a844SPaul Mundt 
30ee43a844SPaul Mundt 	__asm__ __volatile__ (
31ee43a844SPaul Mundt 		"1:					\n\t"
32ee43a844SPaul Mundt 		"movli.l	@%1, %0	! xchg_u8	\n\t"
33ee43a844SPaul Mundt 		"mov		%0, %2			\n\t"
34ee43a844SPaul Mundt 		"mov		%4, %0			\n\t"
35ee43a844SPaul Mundt 		"movco.l	%0, @%1			\n\t"
36ee43a844SPaul Mundt 		"bf		1b			\n\t"
37ee43a844SPaul Mundt 		"synco					\n\t"
38ee43a844SPaul Mundt 		: "=&z"(tmp), "=r" (m), "=&r" (retval)
39ee43a844SPaul Mundt 		: "1" (m), "r" (val & 0xff)
40ee43a844SPaul Mundt 		: "t", "memory"
41ee43a844SPaul Mundt 	);
42ee43a844SPaul Mundt 
43ee43a844SPaul Mundt 	return retval;
44ee43a844SPaul Mundt }
45ee43a844SPaul Mundt 
46ee43a844SPaul Mundt static inline unsigned long
47ee43a844SPaul Mundt __cmpxchg_u32(volatile int *m, unsigned long old, unsigned long new)
48ee43a844SPaul Mundt {
49ee43a844SPaul Mundt 	unsigned long retval;
50ee43a844SPaul Mundt 	unsigned long tmp;
51ee43a844SPaul Mundt 
52ee43a844SPaul Mundt 	__asm__ __volatile__ (
53ee43a844SPaul Mundt 		"1:						\n\t"
54ee43a844SPaul Mundt 		"movli.l	@%1, %0	! __cmpxchg_u32		\n\t"
55ee43a844SPaul Mundt 		"mov		%0, %2				\n\t"
56ee43a844SPaul Mundt 		"cmp/eq		%2, %4				\n\t"
57ee43a844SPaul Mundt 		"bf		2f				\n\t"
58ee43a844SPaul Mundt 		"mov		%5, %0				\n\t"
59ee43a844SPaul Mundt 		"2:						\n\t"
60ee43a844SPaul Mundt 		"movco.l	%0, @%1				\n\t"
61ee43a844SPaul Mundt 		"bf		1b				\n\t"
62ee43a844SPaul Mundt 		"synco						\n\t"
63ee43a844SPaul Mundt 		: "=&z" (tmp), "=r" (m), "=&r" (retval)
64ee43a844SPaul Mundt 		: "1" (m), "r" (old), "r" (new)
65ee43a844SPaul Mundt 		: "t", "memory"
66ee43a844SPaul Mundt 	);
67ee43a844SPaul Mundt 
68ee43a844SPaul Mundt 	return retval;
69ee43a844SPaul Mundt }
70ee43a844SPaul Mundt 
71ee43a844SPaul Mundt #endif /* __ASM_SH_CMPXCHG_LLSC_H */
72