xref: /openbmc/linux/arch/sh/include/asm/atomic-llsc.h (revision c6470150dff9aff682063890c9b8eac71b695def)
1 f15cbe6fSPaul Mundt #ifndef __ASM_SH_ATOMIC_LLSC_H
2 f15cbe6fSPaul Mundt #define __ASM_SH_ATOMIC_LLSC_H
3 f15cbe6fSPaul Mundt 
4 f15cbe6fSPaul Mundt /*
5 f15cbe6fSPaul Mundt  * SH-4A note:
6 f15cbe6fSPaul Mundt  *
7 f15cbe6fSPaul Mundt  * We basically get atomic_xxx_return() for free compared with
8 f15cbe6fSPaul Mundt  * atomic_xxx(). movli.l/movco.l require r0 due to the instruction
9 f15cbe6fSPaul Mundt  * encoding, so the retval is automatically set without having to
10 f15cbe6fSPaul Mundt  * do any special work.
11 f15cbe6fSPaul Mundt  */
12 *c6470150SPeter Zijlstra /*
13 *c6470150SPeter Zijlstra  * To get proper branch prediction for the main line, we must branch
14 *c6470150SPeter Zijlstra  * forward to code at the end of this object's .text section, then
15 *c6470150SPeter Zijlstra  * branch back to restart the operation.
16 *c6470150SPeter Zijlstra  */
17 f15cbe6fSPaul Mundt 
18 *c6470150SPeter Zijlstra #define ATOMIC_OP(op)							\
19 *c6470150SPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
20 *c6470150SPeter Zijlstra {									\
21 *c6470150SPeter Zijlstra 	unsigned long tmp;						\
22 *c6470150SPeter Zijlstra 									\
23 *c6470150SPeter Zijlstra 	__asm__ __volatile__ (						\
24 *c6470150SPeter Zijlstra "1:	movli.l @%2, %0		! atomic_" #op "\n"			\
25 *c6470150SPeter Zijlstra "	" #op "	%1, %0				\n"			\
26 *c6470150SPeter Zijlstra "	movco.l	%0, @%2				\n"			\
27 *c6470150SPeter Zijlstra "	bf	1b				\n"			\
28 *c6470150SPeter Zijlstra 	: "=&z" (tmp)							\
29 *c6470150SPeter Zijlstra 	: "r" (i), "r" (&v->counter)					\
30 *c6470150SPeter Zijlstra 	: "t");								\
31 f15cbe6fSPaul Mundt }
32 f15cbe6fSPaul Mundt 
33 *c6470150SPeter Zijlstra #define ATOMIC_OP_RETURN(op)						\
34 *c6470150SPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v)		\
35 *c6470150SPeter Zijlstra {									\
36 *c6470150SPeter Zijlstra 	unsigned long temp;						\
37 *c6470150SPeter Zijlstra 									\
38 *c6470150SPeter Zijlstra 	__asm__ __volatile__ (						\
39 *c6470150SPeter Zijlstra "1:	movli.l @%2, %0		! atomic_" #op "_return	\n"		\
40 *c6470150SPeter Zijlstra "	" #op "	%1, %0					\n"		\
41 *c6470150SPeter Zijlstra "	movco.l	%0, @%2					\n"		\
42 *c6470150SPeter Zijlstra "	bf	1b					\n"		\
43 *c6470150SPeter Zijlstra "	synco						\n"		\
44 *c6470150SPeter Zijlstra 	: "=&z" (temp)							\
45 *c6470150SPeter Zijlstra 	: "r" (i), "r" (&v->counter)					\
46 *c6470150SPeter Zijlstra 	: "t");								\
47 *c6470150SPeter Zijlstra 									\
48 *c6470150SPeter Zijlstra 	return temp;							\
49 f15cbe6fSPaul Mundt }
50 f15cbe6fSPaul Mundt 
51 *c6470150SPeter Zijlstra #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
52 *c6470150SPeter Zijlstra 
53 *c6470150SPeter Zijlstra ATOMIC_OPS(add)
54 *c6470150SPeter Zijlstra ATOMIC_OPS(sub)
55 *c6470150SPeter Zijlstra 
56 *c6470150SPeter Zijlstra #undef ATOMIC_OPS
57 *c6470150SPeter Zijlstra #undef ATOMIC_OP_RETURN
58 *c6470150SPeter Zijlstra #undef ATOMIC_OP
59 *c6470150SPeter Zijlstra 
60 f15cbe6fSPaul Mundt static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
61 f15cbe6fSPaul Mundt {
62 f15cbe6fSPaul Mundt 	unsigned long tmp;
63 f15cbe6fSPaul Mundt 
64 f15cbe6fSPaul Mundt 	__asm__ __volatile__ (
65 f15cbe6fSPaul Mundt "1:	movli.l @%2, %0		! atomic_clear_mask	\n"
66 f15cbe6fSPaul Mundt "	and	%1, %0					\n"
67 f15cbe6fSPaul Mundt "	movco.l	%0, @%2					\n"
68 f15cbe6fSPaul Mundt "	bf	1b					\n"
69 f15cbe6fSPaul Mundt 	: "=&z" (tmp)
70 f15cbe6fSPaul Mundt 	: "r" (~mask), "r" (&v->counter)
71 f15cbe6fSPaul Mundt 	: "t");
72 f15cbe6fSPaul Mundt }
73 f15cbe6fSPaul Mundt 
74 f15cbe6fSPaul Mundt static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
75 f15cbe6fSPaul Mundt {
76 f15cbe6fSPaul Mundt 	unsigned long tmp;
77 f15cbe6fSPaul Mundt 
78 f15cbe6fSPaul Mundt 	__asm__ __volatile__ (
79 f15cbe6fSPaul Mundt "1:	movli.l @%2, %0		! atomic_set_mask	\n"
80 f15cbe6fSPaul Mundt "	or	%1, %0					\n"
81 f15cbe6fSPaul Mundt "	movco.l	%0, @%2					\n"
82 f15cbe6fSPaul Mundt "	bf	1b					\n"
83 f15cbe6fSPaul Mundt 	: "=&z" (tmp)
84 f15cbe6fSPaul Mundt 	: "r" (mask), "r" (&v->counter)
85 f15cbe6fSPaul Mundt 	: "t");
86 f15cbe6fSPaul Mundt }
87 f15cbe6fSPaul Mundt 
88 f15cbe6fSPaul Mundt #endif /* __ASM_SH_ATOMIC_LLSC_H */
89