xref: /openbmc/linux/arch/sh/include/asm/atomic-llsc.h (revision 9bf6ffdabdd6e70a0b69d032a0aff091afe1773e)
1 f15cbe6fSPaul Mundt #ifndef __ASM_SH_ATOMIC_LLSC_H
2 f15cbe6fSPaul Mundt #define __ASM_SH_ATOMIC_LLSC_H
3 f15cbe6fSPaul Mundt 
4 f15cbe6fSPaul Mundt /*
5 f15cbe6fSPaul Mundt  * SH-4A note:
6 f15cbe6fSPaul Mundt  *
7 f15cbe6fSPaul Mundt  * We basically get atomic_xxx_return() for free compared with
8 f15cbe6fSPaul Mundt  * atomic_xxx(). movli.l/movco.l require r0 due to the instruction
9 f15cbe6fSPaul Mundt  * encoding, so the retval is automatically set without having to
10 f15cbe6fSPaul Mundt  * do any special work.
11 f15cbe6fSPaul Mundt  */
12 c6470150SPeter Zijlstra /*
13 c6470150SPeter Zijlstra  * To get proper branch prediction for the main line, we must branch
14 c6470150SPeter Zijlstra  * forward to code at the end of this object's .text section, then
15 c6470150SPeter Zijlstra  * branch back to restart the operation.
16 c6470150SPeter Zijlstra  */
17 f15cbe6fSPaul Mundt 
18 c6470150SPeter Zijlstra #define ATOMIC_OP(op)							\
19 c6470150SPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
20 c6470150SPeter Zijlstra {									\
21 c6470150SPeter Zijlstra 	unsigned long tmp;						\
22 c6470150SPeter Zijlstra 									\
23 c6470150SPeter Zijlstra 	__asm__ __volatile__ (						\
24 c6470150SPeter Zijlstra "1:	movli.l @%2, %0		! atomic_" #op "\n"			\
25 c6470150SPeter Zijlstra "	" #op "	%1, %0				\n"			\
26 c6470150SPeter Zijlstra "	movco.l	%0, @%2				\n"			\
27 c6470150SPeter Zijlstra "	bf	1b				\n"			\
28 c6470150SPeter Zijlstra 	: "=&z" (tmp)							\
29 c6470150SPeter Zijlstra 	: "r" (i), "r" (&v->counter)					\
30 c6470150SPeter Zijlstra 	: "t");								\
31 f15cbe6fSPaul Mundt }
32 f15cbe6fSPaul Mundt 
33 c6470150SPeter Zijlstra #define ATOMIC_OP_RETURN(op)						\
34 c6470150SPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v)		\
35 c6470150SPeter Zijlstra {									\
36 c6470150SPeter Zijlstra 	unsigned long temp;						\
37 c6470150SPeter Zijlstra 									\
38 c6470150SPeter Zijlstra 	__asm__ __volatile__ (						\
39 c6470150SPeter Zijlstra "1:	movli.l @%2, %0		! atomic_" #op "_return	\n"		\
40 c6470150SPeter Zijlstra "	" #op "	%1, %0					\n"		\
41 c6470150SPeter Zijlstra "	movco.l	%0, @%2					\n"		\
42 c6470150SPeter Zijlstra "	bf	1b					\n"		\
43 c6470150SPeter Zijlstra "	synco						\n"		\
44 c6470150SPeter Zijlstra 	: "=&z" (temp)							\
45 c6470150SPeter Zijlstra 	: "r" (i), "r" (&v->counter)					\
46 c6470150SPeter Zijlstra 	: "t");								\
47 c6470150SPeter Zijlstra 									\
48 c6470150SPeter Zijlstra 	return temp;							\
49 f15cbe6fSPaul Mundt }
50 f15cbe6fSPaul Mundt 
51 7d9794e7SPeter Zijlstra #define ATOMIC_FETCH_OP(op)						\
52 7d9794e7SPeter Zijlstra static inline int atomic_fetch_##op(int i, atomic_t *v)			\
53 7d9794e7SPeter Zijlstra {									\
54 7d9794e7SPeter Zijlstra 	unsigned long res, temp;					\
55 7d9794e7SPeter Zijlstra 									\
56 7d9794e7SPeter Zijlstra 	__asm__ __volatile__ (						\
57 7d9794e7SPeter Zijlstra "1:	movli.l @%3, %0		! atomic_fetch_" #op "	\n"		\
58 7d9794e7SPeter Zijlstra "	mov %0, %1					\n"		\
59 7d9794e7SPeter Zijlstra "	" #op "	%2, %0					\n"		\
60 7d9794e7SPeter Zijlstra "	movco.l	%0, @%3					\n"		\
61 7d9794e7SPeter Zijlstra "	bf	1b					\n"		\
62 7d9794e7SPeter Zijlstra "	synco						\n"		\
63 *9bf6ffdaSPeter Zijlstra 	: "=&z" (temp), "=&r" (res)					\
64 7d9794e7SPeter Zijlstra 	: "r" (i), "r" (&v->counter)					\
65 7d9794e7SPeter Zijlstra 	: "t");								\
66 7d9794e7SPeter Zijlstra 									\
67 7d9794e7SPeter Zijlstra 	return res;							\
68 7d9794e7SPeter Zijlstra }
69 7d9794e7SPeter Zijlstra 
70 7d9794e7SPeter Zijlstra #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
71 c6470150SPeter Zijlstra 
72 c6470150SPeter Zijlstra ATOMIC_OPS(add)
73 c6470150SPeter Zijlstra ATOMIC_OPS(sub)
74 c6470150SPeter Zijlstra 
75 c6470150SPeter Zijlstra #undef ATOMIC_OPS
76 7d9794e7SPeter Zijlstra #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
77 7d9794e7SPeter Zijlstra 
78 7d9794e7SPeter Zijlstra ATOMIC_OPS(and)
79 7d9794e7SPeter Zijlstra ATOMIC_OPS(or)
80 7d9794e7SPeter Zijlstra ATOMIC_OPS(xor)
81 7d9794e7SPeter Zijlstra 
82 7d9794e7SPeter Zijlstra #undef ATOMIC_OPS
83 7d9794e7SPeter Zijlstra #undef ATOMIC_FETCH_OP
84 c6470150SPeter Zijlstra #undef ATOMIC_OP_RETURN
85 c6470150SPeter Zijlstra #undef ATOMIC_OP
86 c6470150SPeter Zijlstra 
87 f15cbe6fSPaul Mundt #endif /* __ASM_SH_ATOMIC_LLSC_H */
88