xref: /openbmc/linux/arch/x86/include/asm/local.h (revision 9f380456)
1 #ifndef _ASM_X86_LOCAL_H
2 #define _ASM_X86_LOCAL_H
3 
4 #include <linux/percpu.h>
5 
6 #include <linux/atomic.h>
7 #include <asm/asm.h>
8 
9 typedef struct {
10 	atomic_long_t a;
11 } local_t;
12 
13 #define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
14 
15 #define local_read(l)	atomic_long_read(&(l)->a)
16 #define local_set(l, i)	atomic_long_set(&(l)->a, (i))
17 
18 static inline void local_inc(local_t *l)
19 {
20 	asm volatile(_ASM_INC "%0"
21 		     : "+m" (l->a.counter));
22 }
23 
24 static inline void local_dec(local_t *l)
25 {
26 	asm volatile(_ASM_DEC "%0"
27 		     : "+m" (l->a.counter));
28 }
29 
30 static inline void local_add(long i, local_t *l)
31 {
32 	asm volatile(_ASM_ADD "%1,%0"
33 		     : "+m" (l->a.counter)
34 		     : "ir" (i));
35 }
36 
37 static inline void local_sub(long i, local_t *l)
38 {
39 	asm volatile(_ASM_SUB "%1,%0"
40 		     : "+m" (l->a.counter)
41 		     : "ir" (i));
42 }
43 
44 /**
45  * local_sub_and_test - subtract value from variable and test result
46  * @i: integer value to subtract
47  * @l: pointer to type local_t
48  *
49  * Atomically subtracts @i from @l and returns
50  * true if the result is zero, or false for all
51  * other cases.
52  */
53 static inline int local_sub_and_test(long i, local_t *l)
54 {
55 	unsigned char c;
56 
57 	asm volatile(_ASM_SUB "%2,%0; sete %1"
58 		     : "+m" (l->a.counter), "=qm" (c)
59 		     : "ir" (i) : "memory");
60 	return c;
61 }
62 
63 /**
64  * local_dec_and_test - decrement and test
65  * @l: pointer to type local_t
66  *
67  * Atomically decrements @l by 1 and
68  * returns true if the result is 0, or false for all other
69  * cases.
70  */
71 static inline int local_dec_and_test(local_t *l)
72 {
73 	unsigned char c;
74 
75 	asm volatile(_ASM_DEC "%0; sete %1"
76 		     : "+m" (l->a.counter), "=qm" (c)
77 		     : : "memory");
78 	return c != 0;
79 }
80 
81 /**
82  * local_inc_and_test - increment and test
83  * @l: pointer to type local_t
84  *
85  * Atomically increments @l by 1
86  * and returns true if the result is zero, or false for all
87  * other cases.
88  */
89 static inline int local_inc_and_test(local_t *l)
90 {
91 	unsigned char c;
92 
93 	asm volatile(_ASM_INC "%0; sete %1"
94 		     : "+m" (l->a.counter), "=qm" (c)
95 		     : : "memory");
96 	return c != 0;
97 }
98 
99 /**
100  * local_add_negative - add and test if negative
101  * @i: integer value to add
102  * @l: pointer to type local_t
103  *
104  * Atomically adds @i to @l and returns true
105  * if the result is negative, or false when
106  * result is greater than or equal to zero.
107  */
108 static inline int local_add_negative(long i, local_t *l)
109 {
110 	unsigned char c;
111 
112 	asm volatile(_ASM_ADD "%2,%0; sets %1"
113 		     : "+m" (l->a.counter), "=qm" (c)
114 		     : "ir" (i) : "memory");
115 	return c;
116 }
117 
118 /**
119  * local_add_return - add and return
120  * @i: integer value to add
121  * @l: pointer to type local_t
122  *
123  * Atomically adds @i to @l and returns @i + @l
124  */
125 static inline long local_add_return(long i, local_t *l)
126 {
127 	long __i;
128 #ifdef CONFIG_M386
129 	unsigned long flags;
130 	if (unlikely(boot_cpu_data.x86 <= 3))
131 		goto no_xadd;
132 #endif
133 	/* Modern 486+ processor */
134 	__i = i;
135 	asm volatile(_ASM_XADD "%0, %1;"
136 		     : "+r" (i), "+m" (l->a.counter)
137 		     : : "memory");
138 	return i + __i;
139 
140 #ifdef CONFIG_M386
141 no_xadd: /* Legacy 386 processor */
142 	local_irq_save(flags);
143 	__i = local_read(l);
144 	local_set(l, i + __i);
145 	local_irq_restore(flags);
146 	return i + __i;
147 #endif
148 }
149 
150 static inline long local_sub_return(long i, local_t *l)
151 {
152 	return local_add_return(-i, l);
153 }
154 
155 #define local_inc_return(l)  (local_add_return(1, l))
156 #define local_dec_return(l)  (local_sub_return(1, l))
157 
158 #define local_cmpxchg(l, o, n) \
159 	(cmpxchg_local(&((l)->a.counter), (o), (n)))
160 /* Always has a lock prefix */
161 #define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
162 
163 /**
164  * local_add_unless - add unless the number is a given value
165  * @l: pointer of type local_t
166  * @a: the amount to add to l...
167  * @u: ...unless l is equal to u.
168  *
169  * Atomically adds @a to @l, so long as it was not @u.
170  * Returns non-zero if @l was not @u, and zero otherwise.
171  */
172 #define local_add_unless(l, a, u)				\
173 ({								\
174 	long c, old;						\
175 	c = local_read((l));					\
176 	for (;;) {						\
177 		if (unlikely(c == (u)))				\
178 			break;					\
179 		old = local_cmpxchg((l), c, c + (a));		\
180 		if (likely(old == c))				\
181 			break;					\
182 		c = old;					\
183 	}							\
184 	c != (u);						\
185 })
186 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
187 
188 /* On x86_32, these are no better than the atomic variants.
189  * On x86-64 these are better than the atomic variants on SMP kernels
190  * because they dont use a lock prefix.
191  */
192 #define __local_inc(l)		local_inc(l)
193 #define __local_dec(l)		local_dec(l)
194 #define __local_add(i, l)	local_add((i), (l))
195 #define __local_sub(i, l)	local_sub((i), (l))
196 
197 #endif /* _ASM_X86_LOCAL_H */
198