xref: /openbmc/linux/arch/mips/include/asm/bitops.h (revision c4f7ac64)
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (c) 1994 - 1997, 99, 2000, 06, 07  Ralf Baechle (ralf@linux-mips.org)
7  * Copyright (c) 1999, 2000  Silicon Graphics, Inc.
8  */
9 #ifndef _ASM_BITOPS_H
10 #define _ASM_BITOPS_H
11 
12 #ifndef _LINUX_BITOPS_H
13 #error only <linux/bitops.h> can be included directly
14 #endif
15 
16 #include <linux/bits.h>
17 #include <linux/compiler.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/byteorder.h>		/* sigh ... */
21 #include <asm/compiler.h>
22 #include <asm/cpu-features.h>
23 #include <asm/isa-rev.h>
24 #include <asm/llsc.h>
25 #include <asm/sgidefs.h>
26 #include <asm/war.h>
27 
28 #define __bit_op(mem, insn, inputs...) do {			\
29 	unsigned long __temp;					\
30 								\
31 	asm volatile(						\
32 	"	.set		push			\n"	\
33 	"	.set		" MIPS_ISA_LEVEL "	\n"	\
34 	"	" __SYNC(full, loongson3_war) "		\n"	\
35 	"1:	" __LL		"%0, %1			\n"	\
36 	"	" insn		"			\n"	\
37 	"	" __SC		"%0, %1			\n"	\
38 	"	" __SC_BEQZ	"%0, 1b			\n"	\
39 	"	.set		pop			\n"	\
40 	: "=&r"(__temp), "+" GCC_OFF_SMALL_ASM()(mem)		\
41 	: inputs						\
42 	: __LLSC_CLOBBER);					\
43 } while (0)
44 
45 #define __test_bit_op(mem, ll_dst, insn, inputs...) ({		\
46 	unsigned long __orig, __temp;				\
47 								\
48 	asm volatile(						\
49 	"	.set		push			\n"	\
50 	"	.set		" MIPS_ISA_LEVEL "	\n"	\
51 	"	" __SYNC(full, loongson3_war) "		\n"	\
52 	"1:	" __LL		ll_dst ", %2		\n"	\
53 	"	" insn		"			\n"	\
54 	"	" __SC		"%1, %2			\n"	\
55 	"	" __SC_BEQZ	"%1, 1b			\n"	\
56 	"	.set		pop			\n"	\
57 	: "=&r"(__orig), "=&r"(__temp),				\
58 	  "+" GCC_OFF_SMALL_ASM()(mem)				\
59 	: inputs						\
60 	: __LLSC_CLOBBER);					\
61 								\
62 	__orig;							\
63 })
64 
65 /*
66  * These are the "slower" versions of the functions and are in bitops.c.
67  * These functions call raw_local_irq_{save,restore}().
68  */
69 void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
70 void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
71 void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
72 int __mips_test_and_set_bit_lock(unsigned long nr,
73 				 volatile unsigned long *addr);
74 int __mips_test_and_clear_bit(unsigned long nr,
75 			      volatile unsigned long *addr);
76 int __mips_test_and_change_bit(unsigned long nr,
77 			       volatile unsigned long *addr);
78 
79 
80 /*
81  * set_bit - Atomically set a bit in memory
82  * @nr: the bit to set
83  * @addr: the address to start counting from
84  *
85  * This function is atomic and may not be reordered.  See __set_bit()
86  * if you do not require the atomic guarantees.
87  * Note that @nr may be almost arbitrarily large; this function is not
88  * restricted to acting on a single-word quantity.
89  */
90 static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
91 {
92 	volatile unsigned long *m = &addr[BIT_WORD(nr)];
93 	int bit = nr % BITS_PER_LONG;
94 
95 	if (!kernel_uses_llsc) {
96 		__mips_set_bit(nr, addr);
97 		return;
98 	}
99 
100 	if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) {
101 		__bit_op(*m, __INS "%0, %3, %2, 1", "i"(bit), "r"(~0));
102 		return;
103 	}
104 
105 	__bit_op(*m, "or\t%0, %2", "ir"(BIT(bit)));
106 }
107 
108 /*
109  * clear_bit - Clears a bit in memory
110  * @nr: Bit to clear
111  * @addr: Address to start counting from
112  *
113  * clear_bit() is atomic and may not be reordered.  However, it does
114  * not contain a memory barrier, so if it is used for locking purposes,
115  * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
116  * in order to ensure changes are visible on other processors.
117  */
118 static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
119 {
120 	volatile unsigned long *m = &addr[BIT_WORD(nr)];
121 	int bit = nr % BITS_PER_LONG;
122 
123 	if (!kernel_uses_llsc) {
124 		__mips_clear_bit(nr, addr);
125 		return;
126 	}
127 
128 	if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) {
129 		__bit_op(*m, __INS "%0, $0, %2, 1", "i"(bit));
130 		return;
131 	}
132 
133 	__bit_op(*m, "and\t%0, %2", "ir"(~BIT(bit)));
134 }
135 
136 /*
137  * clear_bit_unlock - Clears a bit in memory
138  * @nr: Bit to clear
139  * @addr: Address to start counting from
140  *
141  * clear_bit() is atomic and implies release semantics before the memory
142  * operation. It can be used for an unlock.
143  */
144 static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
145 {
146 	smp_mb__before_atomic();
147 	clear_bit(nr, addr);
148 }
149 
150 /*
151  * change_bit - Toggle a bit in memory
152  * @nr: Bit to change
153  * @addr: Address to start counting from
154  *
155  * change_bit() is atomic and may not be reordered.
156  * Note that @nr may be almost arbitrarily large; this function is not
157  * restricted to acting on a single-word quantity.
158  */
159 static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
160 {
161 	volatile unsigned long *m = &addr[BIT_WORD(nr)];
162 	int bit = nr % BITS_PER_LONG;
163 
164 	if (!kernel_uses_llsc) {
165 		__mips_change_bit(nr, addr);
166 		return;
167 	}
168 
169 	__bit_op(*m, "xor\t%0, %2", "ir"(BIT(bit)));
170 }
171 
172 /*
173  * test_and_set_bit_lock - Set a bit and return its old value
174  * @nr: Bit to set
175  * @addr: Address to count from
176  *
177  * This operation is atomic and implies acquire ordering semantics
178  * after the memory operation.
179  */
180 static inline int test_and_set_bit_lock(unsigned long nr,
181 	volatile unsigned long *addr)
182 {
183 	volatile unsigned long *m = &addr[BIT_WORD(nr)];
184 	int bit = nr % BITS_PER_LONG;
185 	unsigned long res, orig;
186 
187 	if (!kernel_uses_llsc) {
188 		res = __mips_test_and_set_bit_lock(nr, addr);
189 	} else {
190 		orig = __test_bit_op(*m, "%0",
191 				     "or\t%1, %0, %3",
192 				     "ir"(BIT(bit)));
193 		res = (orig & BIT(bit)) != 0;
194 	}
195 
196 	smp_llsc_mb();
197 
198 	return res;
199 }
200 
201 /*
202  * test_and_set_bit - Set a bit and return its old value
203  * @nr: Bit to set
204  * @addr: Address to count from
205  *
206  * This operation is atomic and cannot be reordered.
207  * It also implies a memory barrier.
208  */
209 static inline int test_and_set_bit(unsigned long nr,
210 	volatile unsigned long *addr)
211 {
212 	smp_mb__before_atomic();
213 	return test_and_set_bit_lock(nr, addr);
214 }
215 
216 /*
217  * test_and_clear_bit - Clear a bit and return its old value
218  * @nr: Bit to clear
219  * @addr: Address to count from
220  *
221  * This operation is atomic and cannot be reordered.
222  * It also implies a memory barrier.
223  */
224 static inline int test_and_clear_bit(unsigned long nr,
225 	volatile unsigned long *addr)
226 {
227 	volatile unsigned long *m = &addr[BIT_WORD(nr)];
228 	int bit = nr % BITS_PER_LONG;
229 	unsigned long res, orig;
230 
231 	smp_mb__before_atomic();
232 
233 	if (!kernel_uses_llsc) {
234 		res = __mips_test_and_clear_bit(nr, addr);
235 	} else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
236 		res = __test_bit_op(*m, "%1",
237 				    __EXT "%0, %1, %3, 1;"
238 				    __INS "%1, $0, %3, 1",
239 				    "i"(bit));
240 	} else {
241 		orig = __test_bit_op(*m, "%0",
242 				     "or\t%1, %0, %3;"
243 				     "xor\t%1, %1, %3",
244 				     "ir"(BIT(bit)));
245 		res = (orig & BIT(bit)) != 0;
246 	}
247 
248 	smp_llsc_mb();
249 
250 	return res;
251 }
252 
253 /*
254  * test_and_change_bit - Change a bit and return its old value
255  * @nr: Bit to change
256  * @addr: Address to count from
257  *
258  * This operation is atomic and cannot be reordered.
259  * It also implies a memory barrier.
260  */
261 static inline int test_and_change_bit(unsigned long nr,
262 	volatile unsigned long *addr)
263 {
264 	volatile unsigned long *m = &addr[BIT_WORD(nr)];
265 	int bit = nr % BITS_PER_LONG;
266 	unsigned long res, orig;
267 
268 	smp_mb__before_atomic();
269 
270 	if (!kernel_uses_llsc) {
271 		res = __mips_test_and_change_bit(nr, addr);
272 	} else {
273 		orig = __test_bit_op(*m, "%0",
274 				     "xor\t%1, %0, %3",
275 				     "ir"(BIT(bit)));
276 		res = (orig & BIT(bit)) != 0;
277 	}
278 
279 	smp_llsc_mb();
280 
281 	return res;
282 }
283 
284 #undef __bit_op
285 #undef __test_bit_op
286 
287 #include <asm-generic/bitops/non-atomic.h>
288 
289 /*
290  * __clear_bit_unlock - Clears a bit in memory
291  * @nr: Bit to clear
292  * @addr: Address to start counting from
293  *
294  * __clear_bit() is non-atomic and implies release semantics before the memory
295  * operation. It can be used for an unlock if no other CPUs can concurrently
296  * modify other bits in the word.
297  */
298 static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
299 {
300 	smp_mb__before_llsc();
301 	__clear_bit(nr, addr);
302 	nudge_writes();
303 }
304 
305 /*
306  * Return the bit position (0..63) of the most significant 1 bit in a word
307  * Returns -1 if no 1 bit exists
308  */
309 static __always_inline unsigned long __fls(unsigned long word)
310 {
311 	int num;
312 
313 	if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
314 	    __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
315 		__asm__(
316 		"	.set	push					\n"
317 		"	.set	"MIPS_ISA_LEVEL"			\n"
318 		"	clz	%0, %1					\n"
319 		"	.set	pop					\n"
320 		: "=r" (num)
321 		: "r" (word));
322 
323 		return 31 - num;
324 	}
325 
326 	if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
327 	    __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
328 		__asm__(
329 		"	.set	push					\n"
330 		"	.set	"MIPS_ISA_LEVEL"			\n"
331 		"	dclz	%0, %1					\n"
332 		"	.set	pop					\n"
333 		: "=r" (num)
334 		: "r" (word));
335 
336 		return 63 - num;
337 	}
338 
339 	num = BITS_PER_LONG - 1;
340 
341 #if BITS_PER_LONG == 64
342 	if (!(word & (~0ul << 32))) {
343 		num -= 32;
344 		word <<= 32;
345 	}
346 #endif
347 	if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
348 		num -= 16;
349 		word <<= 16;
350 	}
351 	if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
352 		num -= 8;
353 		word <<= 8;
354 	}
355 	if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
356 		num -= 4;
357 		word <<= 4;
358 	}
359 	if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
360 		num -= 2;
361 		word <<= 2;
362 	}
363 	if (!(word & (~0ul << (BITS_PER_LONG-1))))
364 		num -= 1;
365 	return num;
366 }
367 
368 /*
369  * __ffs - find first bit in word.
370  * @word: The word to search
371  *
372  * Returns 0..SZLONG-1
373  * Undefined if no bit exists, so code should check against 0 first.
374  */
375 static __always_inline unsigned long __ffs(unsigned long word)
376 {
377 	return __fls(word & -word);
378 }
379 
380 /*
381  * fls - find last bit set.
382  * @word: The word to search
383  *
384  * This is defined the same way as ffs.
385  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
386  */
387 static inline int fls(unsigned int x)
388 {
389 	int r;
390 
391 	if (!__builtin_constant_p(x) &&
392 	    __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
393 		__asm__(
394 		"	.set	push					\n"
395 		"	.set	"MIPS_ISA_LEVEL"			\n"
396 		"	clz	%0, %1					\n"
397 		"	.set	pop					\n"
398 		: "=r" (x)
399 		: "r" (x));
400 
401 		return 32 - x;
402 	}
403 
404 	r = 32;
405 	if (!x)
406 		return 0;
407 	if (!(x & 0xffff0000u)) {
408 		x <<= 16;
409 		r -= 16;
410 	}
411 	if (!(x & 0xff000000u)) {
412 		x <<= 8;
413 		r -= 8;
414 	}
415 	if (!(x & 0xf0000000u)) {
416 		x <<= 4;
417 		r -= 4;
418 	}
419 	if (!(x & 0xc0000000u)) {
420 		x <<= 2;
421 		r -= 2;
422 	}
423 	if (!(x & 0x80000000u)) {
424 		x <<= 1;
425 		r -= 1;
426 	}
427 	return r;
428 }
429 
430 #include <asm-generic/bitops/fls64.h>
431 
432 /*
433  * ffs - find first bit set.
434  * @word: The word to search
435  *
436  * This is defined the same way as
437  * the libc and compiler builtin ffs routines, therefore
438  * differs in spirit from the below ffz (man ffs).
439  */
440 static inline int ffs(int word)
441 {
442 	if (!word)
443 		return 0;
444 
445 	return fls(word & -word);
446 }
447 
448 #include <asm-generic/bitops/ffz.h>
449 #include <asm-generic/bitops/find.h>
450 
451 #ifdef __KERNEL__
452 
453 #include <asm-generic/bitops/sched.h>
454 
455 #include <asm/arch_hweight.h>
456 #include <asm-generic/bitops/const_hweight.h>
457 
458 #include <asm-generic/bitops/le.h>
459 #include <asm-generic/bitops/ext2-atomic.h>
460 
461 #endif /* __KERNEL__ */
462 
463 #endif /* _ASM_BITOPS_H */
464