xref: /openbmc/linux/arch/powerpc/include/asm/local.h (revision d3964221)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ARCH_POWERPC_LOCAL_H
3 #define _ARCH_POWERPC_LOCAL_H
4 
5 #include <linux/percpu.h>
6 #include <linux/atomic.h>
7 
8 typedef struct
9 {
10 	atomic_long_t a;
11 } local_t;
12 
13 #define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
14 
15 #define local_read(l)	atomic_long_read(&(l)->a)
16 #define local_set(l,i)	atomic_long_set(&(l)->a, (i))
17 
18 #define local_add(i,l)	atomic_long_add((i),(&(l)->a))
19 #define local_sub(i,l)	atomic_long_sub((i),(&(l)->a))
20 #define local_inc(l)	atomic_long_inc(&(l)->a)
21 #define local_dec(l)	atomic_long_dec(&(l)->a)
22 
23 static __inline__ long local_add_return(long a, local_t *l)
24 {
25 	long t;
26 
27 	__asm__ __volatile__(
28 "1:"	PPC_LLARX(%0,0,%2,0) "			# local_add_return\n\
29 	add	%0,%1,%0\n"
30 	PPC405_ERR77(0,%2)
31 	PPC_STLCX	"%0,0,%2 \n\
32 	bne-	1b"
33 	: "=&r" (t)
34 	: "r" (a), "r" (&(l->a.counter))
35 	: "cc", "memory");
36 
37 	return t;
38 }
39 
40 #define local_add_negative(a, l)	(local_add_return((a), (l)) < 0)
41 
42 static __inline__ long local_sub_return(long a, local_t *l)
43 {
44 	long t;
45 
46 	__asm__ __volatile__(
47 "1:"	PPC_LLARX(%0,0,%2,0) "			# local_sub_return\n\
48 	subf	%0,%1,%0\n"
49 	PPC405_ERR77(0,%2)
50 	PPC_STLCX	"%0,0,%2 \n\
51 	bne-	1b"
52 	: "=&r" (t)
53 	: "r" (a), "r" (&(l->a.counter))
54 	: "cc", "memory");
55 
56 	return t;
57 }
58 
59 static __inline__ long local_inc_return(local_t *l)
60 {
61 	long t;
62 
63 	__asm__ __volatile__(
64 "1:"	PPC_LLARX(%0,0,%1,0) "			# local_inc_return\n\
65 	addic	%0,%0,1\n"
66 	PPC405_ERR77(0,%1)
67 	PPC_STLCX	"%0,0,%1 \n\
68 	bne-	1b"
69 	: "=&r" (t)
70 	: "r" (&(l->a.counter))
71 	: "cc", "xer", "memory");
72 
73 	return t;
74 }
75 
76 /*
77  * local_inc_and_test - increment and test
78  * @l: pointer of type local_t
79  *
80  * Atomically increments @l by 1
81  * and returns true if the result is zero, or false for all
82  * other cases.
83  */
84 #define local_inc_and_test(l) (local_inc_return(l) == 0)
85 
86 static __inline__ long local_dec_return(local_t *l)
87 {
88 	long t;
89 
90 	__asm__ __volatile__(
91 "1:"	PPC_LLARX(%0,0,%1,0) "			# local_dec_return\n\
92 	addic	%0,%0,-1\n"
93 	PPC405_ERR77(0,%1)
94 	PPC_STLCX	"%0,0,%1\n\
95 	bne-	1b"
96 	: "=&r" (t)
97 	: "r" (&(l->a.counter))
98 	: "cc", "xer", "memory");
99 
100 	return t;
101 }
102 
103 #define local_cmpxchg(l, o, n) \
104 	(cmpxchg_local(&((l)->a.counter), (o), (n)))
105 #define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
106 
107 /**
108  * local_add_unless - add unless the number is a given value
109  * @l: pointer of type local_t
110  * @a: the amount to add to v...
111  * @u: ...unless v is equal to u.
112  *
113  * Atomically adds @a to @l, so long as it was not @u.
114  * Returns non-zero if @l was not @u, and zero otherwise.
115  */
116 static __inline__ int local_add_unless(local_t *l, long a, long u)
117 {
118 	long t;
119 
120 	__asm__ __volatile__ (
121 "1:"	PPC_LLARX(%0,0,%1,0) "			# local_add_unless\n\
122 	cmpw	0,%0,%3 \n\
123 	beq-	2f \n\
124 	add	%0,%2,%0 \n"
125 	PPC405_ERR77(0,%2)
126 	PPC_STLCX	"%0,0,%1 \n\
127 	bne-	1b \n"
128 "	subf	%0,%2,%0 \n\
129 2:"
130 	: "=&r" (t)
131 	: "r" (&(l->a.counter)), "r" (a), "r" (u)
132 	: "cc", "memory");
133 
134 	return t != u;
135 }
136 
137 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
138 
139 #define local_sub_and_test(a, l)	(local_sub_return((a), (l)) == 0)
140 #define local_dec_and_test(l)		(local_dec_return((l)) == 0)
141 
142 /*
143  * Atomically test *l and decrement if it is greater than 0.
144  * The function returns the old value of *l minus 1.
145  */
146 static __inline__ long local_dec_if_positive(local_t *l)
147 {
148 	long t;
149 
150 	__asm__ __volatile__(
151 "1:"	PPC_LLARX(%0,0,%1,0) "			# local_dec_if_positive\n\
152 	cmpwi	%0,1\n\
153 	addi	%0,%0,-1\n\
154 	blt-	2f\n"
155 	PPC405_ERR77(0,%1)
156 	PPC_STLCX	"%0,0,%1\n\
157 	bne-	1b"
158 	"\n\
159 2:"	: "=&b" (t)
160 	: "r" (&(l->a.counter))
161 	: "cc", "memory");
162 
163 	return t;
164 }
165 
166 /* Use these for per-cpu local_t variables: on some archs they are
167  * much more efficient than these naive implementations.  Note they take
168  * a variable, not an address.
169  */
170 
171 #define __local_inc(l)		((l)->a.counter++)
172 #define __local_dec(l)		((l)->a.counter++)
173 #define __local_add(i,l)	((l)->a.counter+=(i))
174 #define __local_sub(i,l)	((l)->a.counter-=(i))
175 
176 #endif /* _ARCH_POWERPC_LOCAL_H */
177