xref: /openbmc/linux/arch/mips/include/asm/local.h (revision ba61bb17)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ARCH_MIPS_LOCAL_H
3 #define _ARCH_MIPS_LOCAL_H
4 
5 #include <linux/percpu.h>
6 #include <linux/bitops.h>
7 #include <linux/atomic.h>
8 #include <asm/cmpxchg.h>
9 #include <asm/compiler.h>
10 #include <asm/war.h>
11 
12 typedef struct
13 {
14 	atomic_long_t a;
15 } local_t;
16 
17 #define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
18 
19 #define local_read(l)	atomic_long_read(&(l)->a)
20 #define local_set(l, i) atomic_long_set(&(l)->a, (i))
21 
22 #define local_add(i, l) atomic_long_add((i), (&(l)->a))
23 #define local_sub(i, l) atomic_long_sub((i), (&(l)->a))
24 #define local_inc(l)	atomic_long_inc(&(l)->a)
25 #define local_dec(l)	atomic_long_dec(&(l)->a)
26 
27 /*
28  * Same as above, but return the result value
29  */
30 static __inline__ long local_add_return(long i, local_t * l)
31 {
32 	unsigned long result;
33 
34 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
35 		unsigned long temp;
36 
37 		__asm__ __volatile__(
38 		"	.set	arch=r4000				\n"
39 		"1:"	__LL	"%1, %2		# local_add_return	\n"
40 		"	addu	%0, %1, %3				\n"
41 			__SC	"%0, %2					\n"
42 		"	beqzl	%0, 1b					\n"
43 		"	addu	%0, %1, %3				\n"
44 		"	.set	mips0					\n"
45 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
46 		: "Ir" (i), "m" (l->a.counter)
47 		: "memory");
48 	} else if (kernel_uses_llsc) {
49 		unsigned long temp;
50 
51 		__asm__ __volatile__(
52 		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
53 		"1:"	__LL	"%1, %2		# local_add_return	\n"
54 		"	addu	%0, %1, %3				\n"
55 			__SC	"%0, %2					\n"
56 		"	beqz	%0, 1b					\n"
57 		"	addu	%0, %1, %3				\n"
58 		"	.set	mips0					\n"
59 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
60 		: "Ir" (i), "m" (l->a.counter)
61 		: "memory");
62 	} else {
63 		unsigned long flags;
64 
65 		local_irq_save(flags);
66 		result = l->a.counter;
67 		result += i;
68 		l->a.counter = result;
69 		local_irq_restore(flags);
70 	}
71 
72 	return result;
73 }
74 
75 static __inline__ long local_sub_return(long i, local_t * l)
76 {
77 	unsigned long result;
78 
79 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
80 		unsigned long temp;
81 
82 		__asm__ __volatile__(
83 		"	.set	arch=r4000				\n"
84 		"1:"	__LL	"%1, %2		# local_sub_return	\n"
85 		"	subu	%0, %1, %3				\n"
86 			__SC	"%0, %2					\n"
87 		"	beqzl	%0, 1b					\n"
88 		"	subu	%0, %1, %3				\n"
89 		"	.set	mips0					\n"
90 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
91 		: "Ir" (i), "m" (l->a.counter)
92 		: "memory");
93 	} else if (kernel_uses_llsc) {
94 		unsigned long temp;
95 
96 		__asm__ __volatile__(
97 		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
98 		"1:"	__LL	"%1, %2		# local_sub_return	\n"
99 		"	subu	%0, %1, %3				\n"
100 			__SC	"%0, %2					\n"
101 		"	beqz	%0, 1b					\n"
102 		"	subu	%0, %1, %3				\n"
103 		"	.set	mips0					\n"
104 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
105 		: "Ir" (i), "m" (l->a.counter)
106 		: "memory");
107 	} else {
108 		unsigned long flags;
109 
110 		local_irq_save(flags);
111 		result = l->a.counter;
112 		result -= i;
113 		l->a.counter = result;
114 		local_irq_restore(flags);
115 	}
116 
117 	return result;
118 }
119 
120 #define local_cmpxchg(l, o, n) \
121 	((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
122 #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
123 
124 /**
125  * local_add_unless - add unless the number is a given value
126  * @l: pointer of type local_t
127  * @a: the amount to add to l...
128  * @u: ...unless l is equal to u.
129  *
130  * Atomically adds @a to @l, so long as it was not @u.
131  * Returns non-zero if @l was not @u, and zero otherwise.
132  */
133 #define local_add_unless(l, a, u)				\
134 ({								\
135 	long c, old;						\
136 	c = local_read(l);					\
137 	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
138 		c = old;					\
139 	c != (u);						\
140 })
141 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
142 
143 #define local_dec_return(l) local_sub_return(1, (l))
144 #define local_inc_return(l) local_add_return(1, (l))
145 
146 /*
147  * local_sub_and_test - subtract value from variable and test result
148  * @i: integer value to subtract
149  * @l: pointer of type local_t
150  *
151  * Atomically subtracts @i from @l and returns
152  * true if the result is zero, or false for all
153  * other cases.
154  */
155 #define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
156 
157 /*
158  * local_inc_and_test - increment and test
159  * @l: pointer of type local_t
160  *
161  * Atomically increments @l by 1
162  * and returns true if the result is zero, or false for all
163  * other cases.
164  */
165 #define local_inc_and_test(l) (local_inc_return(l) == 0)
166 
167 /*
168  * local_dec_and_test - decrement by 1 and test
169  * @l: pointer of type local_t
170  *
171  * Atomically decrements @l by 1 and
172  * returns true if the result is 0, or false for all other
173  * cases.
174  */
175 #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
176 
177 /*
178  * local_add_negative - add and test if negative
179  * @l: pointer of type local_t
180  * @i: integer value to add
181  *
182  * Atomically adds @i to @l and returns true
183  * if the result is negative, or false when
184  * result is greater than or equal to zero.
185  */
186 #define local_add_negative(i, l) (local_add_return(i, (l)) < 0)
187 
188 /* Use these for per-cpu local_t variables: on some archs they are
189  * much more efficient than these naive implementations.  Note they take
190  * a variable, not an address.
191  */
192 
193 #define __local_inc(l)		((l)->a.counter++)
194 #define __local_dec(l)		((l)->a.counter++)
195 #define __local_add(i, l)	((l)->a.counter+=(i))
196 #define __local_sub(i, l)	((l)->a.counter-=(i))
197 
198 #endif /* _ARCH_MIPS_LOCAL_H */
199