xref: /openbmc/linux/arch/xtensa/include/asm/atomic.h (revision 7c7084f3)
1367b8112SChris Zankel /*
2367b8112SChris Zankel  * include/asm-xtensa/atomic.h
3367b8112SChris Zankel  *
4367b8112SChris Zankel  * Atomic operations that C can't guarantee us.  Useful for resource counting..
5367b8112SChris Zankel  *
6367b8112SChris Zankel  * This file is subject to the terms and conditions of the GNU General Public
7367b8112SChris Zankel  * License.  See the file "COPYING" in the main directory of this archive
8367b8112SChris Zankel  * for more details.
9367b8112SChris Zankel  *
102d1c645cSMarc Gauthier  * Copyright (C) 2001 - 2008 Tensilica Inc.
11367b8112SChris Zankel  */
12367b8112SChris Zankel 
13367b8112SChris Zankel #ifndef _XTENSA_ATOMIC_H
14367b8112SChris Zankel #define _XTENSA_ATOMIC_H
15367b8112SChris Zankel 
16367b8112SChris Zankel #include <linux/stringify.h>
1752fefcecSLinus Torvalds #include <linux/types.h>
18367b8112SChris Zankel #include <asm/processor.h>
19f9aa7e18SDavid Howells #include <asm/cmpxchg.h>
2009a01c0cSPeter Zijlstra #include <asm/barrier.h>
21367b8112SChris Zankel 
22367b8112SChris Zankel /*
23367b8112SChris Zankel  * This Xtensa implementation assumes that the right mechanism
242d1c645cSMarc Gauthier  * for exclusion is for locking interrupts to level EXCM_LEVEL.
25367b8112SChris Zankel  *
26367b8112SChris Zankel  * Locking interrupts looks like this:
27367b8112SChris Zankel  *
28eda8dd12SMax Filippov  *    rsil a14, TOPLEVEL
29367b8112SChris Zankel  *    <code>
30eda8dd12SMax Filippov  *    wsr  a14, PS
31367b8112SChris Zankel  *    rsync
32367b8112SChris Zankel  *
33eda8dd12SMax Filippov  * Note that a14 is used here because the register allocation
34367b8112SChris Zankel  * done by the compiler is not guaranteed and a window overflow
35367b8112SChris Zankel  * may not occur between the rsil and wsr instructions. By using
36eda8dd12SMax Filippov  * a14 in the rsil, the machine is guaranteed to be in a state
37367b8112SChris Zankel  * where no register reference will cause an overflow.
38367b8112SChris Zankel  */
39367b8112SChris Zankel 
40367b8112SChris Zankel /**
41367b8112SChris Zankel  * atomic_read - read atomic variable
42367b8112SChris Zankel  * @v: pointer of type atomic_t
43367b8112SChris Zankel  *
44367b8112SChris Zankel  * Atomically reads the value of @v.
45367b8112SChris Zankel  */
46b9b12978SMark Rutland #define arch_atomic_read(v)		READ_ONCE((v)->counter)
47367b8112SChris Zankel 
48367b8112SChris Zankel /**
49367b8112SChris Zankel  * atomic_set - set atomic variable
50367b8112SChris Zankel  * @v: pointer of type atomic_t
51367b8112SChris Zankel  * @i: required value
52367b8112SChris Zankel  *
53367b8112SChris Zankel  * Atomically sets the value of @v to @i.
54367b8112SChris Zankel  */
55b9b12978SMark Rutland #define arch_atomic_set(v,i)		WRITE_ONCE((v)->counter, (i))
56367b8112SChris Zankel 
57f7c34874SMax Filippov #if XCHAL_HAVE_EXCLUSIVE
58f7c34874SMax Filippov #define ATOMIC_OP(op)							\
59b9b12978SMark Rutland static inline void arch_atomic_##op(int i, atomic_t *v)			\
60f7c34874SMax Filippov {									\
61f7c34874SMax Filippov 	unsigned long tmp;						\
62f7c34874SMax Filippov 	int result;							\
63f7c34874SMax Filippov 									\
64f7c34874SMax Filippov 	__asm__ __volatile__(						\
65643d6976SMax Filippov 			"1:     l32ex   %[tmp], %[addr]\n"		\
66643d6976SMax Filippov 			"       " #op " %[result], %[tmp], %[i]\n"	\
67643d6976SMax Filippov 			"       s32ex   %[result], %[addr]\n"		\
68643d6976SMax Filippov 			"       getex   %[result]\n"			\
69643d6976SMax Filippov 			"       beqz    %[result], 1b\n"		\
70643d6976SMax Filippov 			: [result] "=&a" (result), [tmp] "=&a" (tmp)	\
71643d6976SMax Filippov 			: [i] "a" (i), [addr] "a" (v)			\
72f7c34874SMax Filippov 			: "memory"					\
73f7c34874SMax Filippov 			);						\
74f7c34874SMax Filippov }									\
75f7c34874SMax Filippov 
76f7c34874SMax Filippov #define ATOMIC_OP_RETURN(op)						\
77b9b12978SMark Rutland static inline int arch_atomic_##op##_return(int i, atomic_t *v)		\
78f7c34874SMax Filippov {									\
79f7c34874SMax Filippov 	unsigned long tmp;						\
80f7c34874SMax Filippov 	int result;							\
81f7c34874SMax Filippov 									\
82f7c34874SMax Filippov 	__asm__ __volatile__(						\
83643d6976SMax Filippov 			"1:     l32ex   %[tmp], %[addr]\n"		\
84643d6976SMax Filippov 			"       " #op " %[result], %[tmp], %[i]\n"	\
85643d6976SMax Filippov 			"       s32ex   %[result], %[addr]\n"		\
86643d6976SMax Filippov 			"       getex   %[result]\n"			\
87643d6976SMax Filippov 			"       beqz    %[result], 1b\n"		\
88643d6976SMax Filippov 			"       " #op " %[result], %[tmp], %[i]\n"	\
89643d6976SMax Filippov 			: [result] "=&a" (result), [tmp] "=&a" (tmp)	\
90643d6976SMax Filippov 			: [i] "a" (i), [addr] "a" (v)			\
91f7c34874SMax Filippov 			: "memory"					\
92f7c34874SMax Filippov 			);						\
93f7c34874SMax Filippov 									\
94f7c34874SMax Filippov 	return result;							\
95f7c34874SMax Filippov }
96f7c34874SMax Filippov 
97f7c34874SMax Filippov #define ATOMIC_FETCH_OP(op)						\
98b9b12978SMark Rutland static inline int arch_atomic_fetch_##op(int i, atomic_t *v)		\
99f7c34874SMax Filippov {									\
100f7c34874SMax Filippov 	unsigned long tmp;						\
101f7c34874SMax Filippov 	int result;							\
102f7c34874SMax Filippov 									\
103f7c34874SMax Filippov 	__asm__ __volatile__(						\
104643d6976SMax Filippov 			"1:     l32ex   %[tmp], %[addr]\n"		\
105643d6976SMax Filippov 			"       " #op " %[result], %[tmp], %[i]\n"	\
106643d6976SMax Filippov 			"       s32ex   %[result], %[addr]\n"		\
107643d6976SMax Filippov 			"       getex   %[result]\n"			\
108643d6976SMax Filippov 			"       beqz    %[result], 1b\n"		\
109643d6976SMax Filippov 			: [result] "=&a" (result), [tmp] "=&a" (tmp)	\
110643d6976SMax Filippov 			: [i] "a" (i), [addr] "a" (v)			\
111f7c34874SMax Filippov 			: "memory"					\
112f7c34874SMax Filippov 			);						\
113f7c34874SMax Filippov 									\
114f7c34874SMax Filippov 	return tmp;							\
115f7c34874SMax Filippov }
116f7c34874SMax Filippov 
117f7c34874SMax Filippov #elif XCHAL_HAVE_S32C1I
118d4608dd5SPeter Zijlstra #define ATOMIC_OP(op)							\
119b9b12978SMark Rutland static inline void arch_atomic_##op(int i, atomic_t * v)		\
120d4608dd5SPeter Zijlstra {									\
121d4608dd5SPeter Zijlstra 	unsigned long tmp;						\
122d4608dd5SPeter Zijlstra 	int result;							\
123d4608dd5SPeter Zijlstra 									\
124d4608dd5SPeter Zijlstra 	__asm__ __volatile__(						\
12513e28135SMax Filippov 			"1:     l32i    %[tmp], %[mem]\n"		\
126643d6976SMax Filippov 			"       wsr     %[tmp], scompare1\n"		\
127643d6976SMax Filippov 			"       " #op " %[result], %[tmp], %[i]\n"	\
12813e28135SMax Filippov 			"       s32c1i  %[result], %[mem]\n"		\
129643d6976SMax Filippov 			"       bne     %[result], %[tmp], 1b\n"	\
13013e28135SMax Filippov 			: [result] "=&a" (result), [tmp] "=&a" (tmp),	\
13113e28135SMax Filippov 			  [mem] "+m" (*v)				\
13213e28135SMax Filippov 			: [i] "a" (i)					\
133d4608dd5SPeter Zijlstra 			: "memory"					\
134d4608dd5SPeter Zijlstra 			);						\
135d4608dd5SPeter Zijlstra }									\
136219b1e4cSMax Filippov 
137d4608dd5SPeter Zijlstra #define ATOMIC_OP_RETURN(op)						\
138b9b12978SMark Rutland static inline int arch_atomic_##op##_return(int i, atomic_t * v)	\
139d4608dd5SPeter Zijlstra {									\
140d4608dd5SPeter Zijlstra 	unsigned long tmp;						\
141d4608dd5SPeter Zijlstra 	int result;							\
142d4608dd5SPeter Zijlstra 									\
143d4608dd5SPeter Zijlstra 	__asm__ __volatile__(						\
14413e28135SMax Filippov 			"1:     l32i    %[tmp], %[mem]\n"		\
145643d6976SMax Filippov 			"       wsr     %[tmp], scompare1\n"		\
146643d6976SMax Filippov 			"       " #op " %[result], %[tmp], %[i]\n"	\
14713e28135SMax Filippov 			"       s32c1i  %[result], %[mem]\n"		\
148643d6976SMax Filippov 			"       bne     %[result], %[tmp], 1b\n"	\
149643d6976SMax Filippov 			"       " #op " %[result], %[result], %[i]\n"	\
15013e28135SMax Filippov 			: [result] "=&a" (result), [tmp] "=&a" (tmp),	\
15113e28135SMax Filippov 			  [mem] "+m" (*v)				\
15213e28135SMax Filippov 			: [i] "a" (i)					\
153d4608dd5SPeter Zijlstra 			: "memory"					\
154d4608dd5SPeter Zijlstra 			);						\
155d4608dd5SPeter Zijlstra 									\
156d4608dd5SPeter Zijlstra 	return result;							\
157367b8112SChris Zankel }
158367b8112SChris Zankel 
1596dc25876SPeter Zijlstra #define ATOMIC_FETCH_OP(op)						\
160b9b12978SMark Rutland static inline int arch_atomic_fetch_##op(int i, atomic_t * v)		\
1616dc25876SPeter Zijlstra {									\
1626dc25876SPeter Zijlstra 	unsigned long tmp;						\
1636dc25876SPeter Zijlstra 	int result;							\
1646dc25876SPeter Zijlstra 									\
1656dc25876SPeter Zijlstra 	__asm__ __volatile__(						\
16613e28135SMax Filippov 			"1:     l32i    %[tmp], %[mem]\n"		\
167643d6976SMax Filippov 			"       wsr     %[tmp], scompare1\n"		\
168643d6976SMax Filippov 			"       " #op " %[result], %[tmp], %[i]\n"	\
16913e28135SMax Filippov 			"       s32c1i  %[result], %[mem]\n"		\
170643d6976SMax Filippov 			"       bne     %[result], %[tmp], 1b\n"	\
17113e28135SMax Filippov 			: [result] "=&a" (result), [tmp] "=&a" (tmp),	\
17213e28135SMax Filippov 			  [mem] "+m" (*v)				\
17313e28135SMax Filippov 			: [i] "a" (i)					\
1746dc25876SPeter Zijlstra 			: "memory"					\
1756dc25876SPeter Zijlstra 			);						\
1766dc25876SPeter Zijlstra 									\
1776dc25876SPeter Zijlstra 	return result;							\
1786dc25876SPeter Zijlstra }
1796dc25876SPeter Zijlstra 
180d4608dd5SPeter Zijlstra #else /* XCHAL_HAVE_S32C1I */
181219b1e4cSMax Filippov 
182d4608dd5SPeter Zijlstra #define ATOMIC_OP(op)							\
183b9b12978SMark Rutland static inline void arch_atomic_##op(int i, atomic_t * v)		\
184d4608dd5SPeter Zijlstra {									\
185d4608dd5SPeter Zijlstra 	unsigned int vval;						\
186d4608dd5SPeter Zijlstra 									\
187d4608dd5SPeter Zijlstra 	__asm__ __volatile__(						\
188eda8dd12SMax Filippov 			"       rsil    a14, "__stringify(TOPLEVEL)"\n"	\
18913e28135SMax Filippov 			"       l32i    %[result], %[mem]\n"		\
190643d6976SMax Filippov 			"       " #op " %[result], %[result], %[i]\n"	\
19113e28135SMax Filippov 			"       s32i    %[result], %[mem]\n"		\
192eda8dd12SMax Filippov 			"       wsr     a14, ps\n"			\
193d4608dd5SPeter Zijlstra 			"       rsync\n"				\
19413e28135SMax Filippov 			: [result] "=&a" (vval), [mem] "+m" (*v)	\
19513e28135SMax Filippov 			: [i] "a" (i)					\
196eda8dd12SMax Filippov 			: "a14", "memory"				\
197d4608dd5SPeter Zijlstra 			);						\
198d4608dd5SPeter Zijlstra }									\
199367b8112SChris Zankel 
200d4608dd5SPeter Zijlstra #define ATOMIC_OP_RETURN(op)						\
201b9b12978SMark Rutland static inline int arch_atomic_##op##_return(int i, atomic_t * v)	\
202d4608dd5SPeter Zijlstra {									\
203d4608dd5SPeter Zijlstra 	unsigned int vval;						\
204d4608dd5SPeter Zijlstra 									\
205d4608dd5SPeter Zijlstra 	__asm__ __volatile__(						\
206eda8dd12SMax Filippov 			"       rsil    a14,"__stringify(TOPLEVEL)"\n"	\
20713e28135SMax Filippov 			"       l32i    %[result], %[mem]\n"		\
208643d6976SMax Filippov 			"       " #op " %[result], %[result], %[i]\n"	\
20913e28135SMax Filippov 			"       s32i    %[result], %[mem]\n"		\
210eda8dd12SMax Filippov 			"       wsr     a14, ps\n"			\
211d4608dd5SPeter Zijlstra 			"       rsync\n"				\
21213e28135SMax Filippov 			: [result] "=&a" (vval), [mem] "+m" (*v)	\
21313e28135SMax Filippov 			: [i] "a" (i)					\
214eda8dd12SMax Filippov 			: "a14", "memory"				\
215d4608dd5SPeter Zijlstra 			);						\
216d4608dd5SPeter Zijlstra 									\
217d4608dd5SPeter Zijlstra 	return vval;							\
218367b8112SChris Zankel }
219367b8112SChris Zankel 
2206dc25876SPeter Zijlstra #define ATOMIC_FETCH_OP(op)						\
221b9b12978SMark Rutland static inline int arch_atomic_fetch_##op(int i, atomic_t * v)		\
2226dc25876SPeter Zijlstra {									\
2236dc25876SPeter Zijlstra 	unsigned int tmp, vval;						\
2246dc25876SPeter Zijlstra 									\
2256dc25876SPeter Zijlstra 	__asm__ __volatile__(						\
226eda8dd12SMax Filippov 			"       rsil    a14,"__stringify(TOPLEVEL)"\n"	\
22713e28135SMax Filippov 			"       l32i    %[result], %[mem]\n"		\
228643d6976SMax Filippov 			"       " #op " %[tmp], %[result], %[i]\n"	\
22913e28135SMax Filippov 			"       s32i    %[tmp], %[mem]\n"		\
230eda8dd12SMax Filippov 			"       wsr     a14, ps\n"			\
2316dc25876SPeter Zijlstra 			"       rsync\n"				\
23213e28135SMax Filippov 			: [result] "=&a" (vval), [tmp] "=&a" (tmp),	\
23313e28135SMax Filippov 			  [mem] "+m" (*v)				\
23413e28135SMax Filippov 			: [i] "a" (i)					\
235eda8dd12SMax Filippov 			: "a14", "memory"				\
2366dc25876SPeter Zijlstra 			);						\
2376dc25876SPeter Zijlstra 									\
2386dc25876SPeter Zijlstra 	return vval;							\
2396dc25876SPeter Zijlstra }
2406dc25876SPeter Zijlstra 
241d4608dd5SPeter Zijlstra #endif /* XCHAL_HAVE_S32C1I */
242367b8112SChris Zankel 
2436dc25876SPeter Zijlstra #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op) ATOMIC_OP_RETURN(op)
244219b1e4cSMax Filippov 
245d4608dd5SPeter Zijlstra ATOMIC_OPS(add)
246d4608dd5SPeter Zijlstra ATOMIC_OPS(sub)
247219b1e4cSMax Filippov 
248*7c7084f3SMark Rutland #define arch_atomic_add_return			arch_atomic_add_return
249*7c7084f3SMark Rutland #define arch_atomic_sub_return			arch_atomic_sub_return
250*7c7084f3SMark Rutland #define arch_atomic_fetch_add			arch_atomic_fetch_add
251*7c7084f3SMark Rutland #define arch_atomic_fetch_sub			arch_atomic_fetch_sub
252*7c7084f3SMark Rutland 
2536dc25876SPeter Zijlstra #undef ATOMIC_OPS
2546dc25876SPeter Zijlstra #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
2556dc25876SPeter Zijlstra 
2566dc25876SPeter Zijlstra ATOMIC_OPS(and)
2576dc25876SPeter Zijlstra ATOMIC_OPS(or)
2586dc25876SPeter Zijlstra ATOMIC_OPS(xor)
2592a3ed90fSPeter Zijlstra 
260*7c7084f3SMark Rutland #define arch_atomic_fetch_and			arch_atomic_fetch_and
261*7c7084f3SMark Rutland #define arch_atomic_fetch_or			arch_atomic_fetch_or
262*7c7084f3SMark Rutland #define arch_atomic_fetch_xor			arch_atomic_fetch_xor
263*7c7084f3SMark Rutland 
264d4608dd5SPeter Zijlstra #undef ATOMIC_OPS
2656dc25876SPeter Zijlstra #undef ATOMIC_FETCH_OP
266d4608dd5SPeter Zijlstra #undef ATOMIC_OP_RETURN
267d4608dd5SPeter Zijlstra #undef ATOMIC_OP
268367b8112SChris Zankel 
269367b8112SChris Zankel #endif /* _XTENSA_ATOMIC_H */
270