xref: /openbmc/linux/arch/sh/include/asm/atomic-llsc.h (revision 7d9794e7523798e1b9422ad9f4e4d808ae5d5932)
1f15cbe6fSPaul Mundt #ifndef __ASM_SH_ATOMIC_LLSC_H
2f15cbe6fSPaul Mundt #define __ASM_SH_ATOMIC_LLSC_H
3f15cbe6fSPaul Mundt 
4f15cbe6fSPaul Mundt /*
5f15cbe6fSPaul Mundt  * SH-4A note:
6f15cbe6fSPaul Mundt  *
7f15cbe6fSPaul Mundt  * We basically get atomic_xxx_return() for free compared with
8f15cbe6fSPaul Mundt  * atomic_xxx(). movli.l/movco.l require r0 due to the instruction
9f15cbe6fSPaul Mundt  * encoding, so the retval is automatically set without having to
10f15cbe6fSPaul Mundt  * do any special work.
11f15cbe6fSPaul Mundt  */
12c6470150SPeter Zijlstra /*
13c6470150SPeter Zijlstra  * To get proper branch prediction for the main line, we must branch
14c6470150SPeter Zijlstra  * forward to code at the end of this object's .text section, then
15c6470150SPeter Zijlstra  * branch back to restart the operation.
16c6470150SPeter Zijlstra  */
17f15cbe6fSPaul Mundt 
18c6470150SPeter Zijlstra #define ATOMIC_OP(op)							\
19c6470150SPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
20c6470150SPeter Zijlstra {									\
21c6470150SPeter Zijlstra 	unsigned long tmp;						\
22c6470150SPeter Zijlstra 									\
23c6470150SPeter Zijlstra 	__asm__ __volatile__ (						\
24c6470150SPeter Zijlstra "1:	movli.l @%2, %0		! atomic_" #op "\n"			\
25c6470150SPeter Zijlstra "	" #op "	%1, %0				\n"			\
26c6470150SPeter Zijlstra "	movco.l	%0, @%2				\n"			\
27c6470150SPeter Zijlstra "	bf	1b				\n"			\
28c6470150SPeter Zijlstra 	: "=&z" (tmp)							\
29c6470150SPeter Zijlstra 	: "r" (i), "r" (&v->counter)					\
30c6470150SPeter Zijlstra 	: "t");								\
31f15cbe6fSPaul Mundt }
32f15cbe6fSPaul Mundt 
33c6470150SPeter Zijlstra #define ATOMIC_OP_RETURN(op)						\
34c6470150SPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v)		\
35c6470150SPeter Zijlstra {									\
36c6470150SPeter Zijlstra 	unsigned long temp;						\
37c6470150SPeter Zijlstra 									\
38c6470150SPeter Zijlstra 	__asm__ __volatile__ (						\
39c6470150SPeter Zijlstra "1:	movli.l @%2, %0		! atomic_" #op "_return	\n"		\
40c6470150SPeter Zijlstra "	" #op "	%1, %0					\n"		\
41c6470150SPeter Zijlstra "	movco.l	%0, @%2					\n"		\
42c6470150SPeter Zijlstra "	bf	1b					\n"		\
43c6470150SPeter Zijlstra "	synco						\n"		\
44c6470150SPeter Zijlstra 	: "=&z" (temp)							\
45c6470150SPeter Zijlstra 	: "r" (i), "r" (&v->counter)					\
46c6470150SPeter Zijlstra 	: "t");								\
47c6470150SPeter Zijlstra 									\
48c6470150SPeter Zijlstra 	return temp;							\
49f15cbe6fSPaul Mundt }
50f15cbe6fSPaul Mundt 
51*7d9794e7SPeter Zijlstra #define ATOMIC_FETCH_OP(op)						\
52*7d9794e7SPeter Zijlstra static inline int atomic_fetch_##op(int i, atomic_t *v)			\
53*7d9794e7SPeter Zijlstra {									\
54*7d9794e7SPeter Zijlstra 	unsigned long res, temp;					\
55*7d9794e7SPeter Zijlstra 									\
56*7d9794e7SPeter Zijlstra 	__asm__ __volatile__ (						\
57*7d9794e7SPeter Zijlstra "1:	movli.l @%3, %0		! atomic_fetch_" #op "	\n"		\
58*7d9794e7SPeter Zijlstra "	mov %0, %1					\n"		\
59*7d9794e7SPeter Zijlstra "	" #op "	%2, %0					\n"		\
60*7d9794e7SPeter Zijlstra "	movco.l	%0, @%3					\n"		\
61*7d9794e7SPeter Zijlstra "	bf	1b					\n"		\
62*7d9794e7SPeter Zijlstra "	synco						\n"		\
63*7d9794e7SPeter Zijlstra 	: "=&z" (temp), "=&z" (res)					\
64*7d9794e7SPeter Zijlstra 	: "r" (i), "r" (&v->counter)					\
65*7d9794e7SPeter Zijlstra 	: "t");								\
66*7d9794e7SPeter Zijlstra 									\
67*7d9794e7SPeter Zijlstra 	return res;							\
68*7d9794e7SPeter Zijlstra }
69*7d9794e7SPeter Zijlstra 
70*7d9794e7SPeter Zijlstra #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
71c6470150SPeter Zijlstra 
72c6470150SPeter Zijlstra ATOMIC_OPS(add)
73c6470150SPeter Zijlstra ATOMIC_OPS(sub)
74c6470150SPeter Zijlstra 
75c6470150SPeter Zijlstra #undef ATOMIC_OPS
76*7d9794e7SPeter Zijlstra #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
77*7d9794e7SPeter Zijlstra 
78*7d9794e7SPeter Zijlstra ATOMIC_OPS(and)
79*7d9794e7SPeter Zijlstra ATOMIC_OPS(or)
80*7d9794e7SPeter Zijlstra ATOMIC_OPS(xor)
81*7d9794e7SPeter Zijlstra 
82*7d9794e7SPeter Zijlstra #undef ATOMIC_OPS
83*7d9794e7SPeter Zijlstra #undef ATOMIC_FETCH_OP
84c6470150SPeter Zijlstra #undef ATOMIC_OP_RETURN
85c6470150SPeter Zijlstra #undef ATOMIC_OP
86c6470150SPeter Zijlstra 
87f15cbe6fSPaul Mundt #endif /* __ASM_SH_ATOMIC_LLSC_H */
88