xref: /openbmc/linux/arch/s390/include/asm/bitops.h (revision fb8d6c8d)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  *    Copyright IBM Corp. 1999,2013
4  *
5  *    Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
6  *
7  * The description below was taken in large parts from the powerpc
8  * bitops header file:
9  * Within a word, bits are numbered LSB first.  Lot's of places make
10  * this assumption by directly testing bits with (val & (1<<nr)).
11  * This can cause confusion for large (> 1 word) bitmaps on a
12  * big-endian system because, unlike little endian, the number of each
13  * bit depends on the word size.
14  *
15  * The bitop functions are defined to work on unsigned longs, so the bits
16  * end up numbered:
17  *   |63..............0|127............64|191...........128|255...........192|
18  *
19  * We also have special functions which work with an MSB0 encoding.
20  * The bits are numbered:
21  *   |0..............63|64............127|128...........191|192...........255|
22  *
23  * The main difference is that bit 0-63 in the bit number field needs to be
24  * reversed compared to the LSB0 encoded bit fields. This can be achieved by
25  * XOR with 0x3f.
26  *
27  */
28 
29 #ifndef _S390_BITOPS_H
30 #define _S390_BITOPS_H
31 
32 #ifndef _LINUX_BITOPS_H
33 #error only <linux/bitops.h> can be included directly
34 #endif
35 
36 #include <linux/typecheck.h>
37 #include <linux/compiler.h>
38 #include <linux/types.h>
39 #include <asm/atomic_ops.h>
40 #include <asm/barrier.h>
41 
42 #define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
43 
44 static inline unsigned long *
45 __bitops_word(unsigned long nr, volatile unsigned long *ptr)
46 {
47 	unsigned long addr;
48 
49 	addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
50 	return (unsigned long *)addr;
51 }
52 
53 static inline unsigned char *
54 __bitops_byte(unsigned long nr, volatile unsigned long *ptr)
55 {
56 	return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
57 }
58 
59 static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr)
60 {
61 	unsigned long *addr = __bitops_word(nr, ptr);
62 	unsigned long mask;
63 
64 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
65 	if (__builtin_constant_p(nr)) {
66 		unsigned char *caddr = __bitops_byte(nr, ptr);
67 
68 		asm volatile(
69 			"oi	%0,%b1\n"
70 			: "+Q" (*caddr)
71 			: "i" (1 << (nr & 7))
72 			: "cc", "memory");
73 		return;
74 	}
75 #endif
76 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
77 	__atomic64_or(mask, (long *)addr);
78 }
79 
80 static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr)
81 {
82 	unsigned long *addr = __bitops_word(nr, ptr);
83 	unsigned long mask;
84 
85 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
86 	if (__builtin_constant_p(nr)) {
87 		unsigned char *caddr = __bitops_byte(nr, ptr);
88 
89 		asm volatile(
90 			"ni	%0,%b1\n"
91 			: "+Q" (*caddr)
92 			: "i" (~(1 << (nr & 7)))
93 			: "cc", "memory");
94 		return;
95 	}
96 #endif
97 	mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
98 	__atomic64_and(mask, (long *)addr);
99 }
100 
101 static __always_inline void arch_change_bit(unsigned long nr,
102 					    volatile unsigned long *ptr)
103 {
104 	unsigned long *addr = __bitops_word(nr, ptr);
105 	unsigned long mask;
106 
107 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
108 	if (__builtin_constant_p(nr)) {
109 		unsigned char *caddr = __bitops_byte(nr, ptr);
110 
111 		asm volatile(
112 			"xi	%0,%b1\n"
113 			: "+Q" (*caddr)
114 			: "i" (1 << (nr & 7))
115 			: "cc", "memory");
116 		return;
117 	}
118 #endif
119 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
120 	__atomic64_xor(mask, (long *)addr);
121 }
122 
123 static inline bool arch_test_and_set_bit(unsigned long nr,
124 					 volatile unsigned long *ptr)
125 {
126 	unsigned long *addr = __bitops_word(nr, ptr);
127 	unsigned long old, mask;
128 
129 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
130 	old = __atomic64_or_barrier(mask, (long *)addr);
131 	return (old & mask) != 0;
132 }
133 
134 static inline bool arch_test_and_clear_bit(unsigned long nr,
135 					   volatile unsigned long *ptr)
136 {
137 	unsigned long *addr = __bitops_word(nr, ptr);
138 	unsigned long old, mask;
139 
140 	mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
141 	old = __atomic64_and_barrier(mask, (long *)addr);
142 	return (old & ~mask) != 0;
143 }
144 
145 static inline bool arch_test_and_change_bit(unsigned long nr,
146 					    volatile unsigned long *ptr)
147 {
148 	unsigned long *addr = __bitops_word(nr, ptr);
149 	unsigned long old, mask;
150 
151 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
152 	old = __atomic64_xor_barrier(mask, (long *)addr);
153 	return (old & mask) != 0;
154 }
155 
156 static inline void arch___set_bit(unsigned long nr, volatile unsigned long *ptr)
157 {
158 	unsigned char *addr = __bitops_byte(nr, ptr);
159 
160 	*addr |= 1 << (nr & 7);
161 }
162 
163 static inline void arch___clear_bit(unsigned long nr,
164 				    volatile unsigned long *ptr)
165 {
166 	unsigned char *addr = __bitops_byte(nr, ptr);
167 
168 	*addr &= ~(1 << (nr & 7));
169 }
170 
171 static inline void arch___change_bit(unsigned long nr,
172 				     volatile unsigned long *ptr)
173 {
174 	unsigned char *addr = __bitops_byte(nr, ptr);
175 
176 	*addr ^= 1 << (nr & 7);
177 }
178 
179 static inline bool arch___test_and_set_bit(unsigned long nr,
180 					   volatile unsigned long *ptr)
181 {
182 	unsigned char *addr = __bitops_byte(nr, ptr);
183 	unsigned char ch;
184 
185 	ch = *addr;
186 	*addr |= 1 << (nr & 7);
187 	return (ch >> (nr & 7)) & 1;
188 }
189 
190 static inline bool arch___test_and_clear_bit(unsigned long nr,
191 					     volatile unsigned long *ptr)
192 {
193 	unsigned char *addr = __bitops_byte(nr, ptr);
194 	unsigned char ch;
195 
196 	ch = *addr;
197 	*addr &= ~(1 << (nr & 7));
198 	return (ch >> (nr & 7)) & 1;
199 }
200 
201 static inline bool arch___test_and_change_bit(unsigned long nr,
202 					      volatile unsigned long *ptr)
203 {
204 	unsigned char *addr = __bitops_byte(nr, ptr);
205 	unsigned char ch;
206 
207 	ch = *addr;
208 	*addr ^= 1 << (nr & 7);
209 	return (ch >> (nr & 7)) & 1;
210 }
211 
212 static inline bool arch_test_bit(unsigned long nr,
213 				 const volatile unsigned long *ptr)
214 {
215 	const volatile unsigned char *addr;
216 
217 	addr = ((const volatile unsigned char *)ptr);
218 	addr += (nr ^ (BITS_PER_LONG - 8)) >> 3;
219 	return (*addr >> (nr & 7)) & 1;
220 }
221 
222 static inline bool arch_test_and_set_bit_lock(unsigned long nr,
223 					      volatile unsigned long *ptr)
224 {
225 	if (arch_test_bit(nr, ptr))
226 		return 1;
227 	return arch_test_and_set_bit(nr, ptr);
228 }
229 
230 static inline void arch_clear_bit_unlock(unsigned long nr,
231 					 volatile unsigned long *ptr)
232 {
233 	smp_mb__before_atomic();
234 	arch_clear_bit(nr, ptr);
235 }
236 
237 static inline void arch___clear_bit_unlock(unsigned long nr,
238 					   volatile unsigned long *ptr)
239 {
240 	smp_mb();
241 	arch___clear_bit(nr, ptr);
242 }
243 
244 #include <asm-generic/bitops-instrumented.h>
245 
246 /*
247  * Functions which use MSB0 bit numbering.
248  * The bits are numbered:
249  *   |0..............63|64............127|128...........191|192...........255|
250  */
251 unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size);
252 unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
253 				unsigned long offset);
254 
255 #define for_each_set_bit_inv(bit, addr, size)				\
256 	for ((bit) = find_first_bit_inv((addr), (size));		\
257 	     (bit) < (size);						\
258 	     (bit) = find_next_bit_inv((addr), (size), (bit) + 1))
259 
260 static inline void set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
261 {
262 	return set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
263 }
264 
265 static inline void clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
266 {
267 	return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
268 }
269 
270 static inline bool test_and_clear_bit_inv(unsigned long nr,
271 					  volatile unsigned long *ptr)
272 {
273 	return test_and_clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
274 }
275 
276 static inline void __set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
277 {
278 	return __set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
279 }
280 
281 static inline void __clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
282 {
283 	return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
284 }
285 
286 static inline bool test_bit_inv(unsigned long nr,
287 				const volatile unsigned long *ptr)
288 {
289 	return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
290 }
291 
292 #ifdef CONFIG_HAVE_MARCH_Z9_109_FEATURES
293 
294 /**
295  * __flogr - find leftmost one
296  * @word - The word to search
297  *
298  * Returns the bit number of the most significant bit set,
299  * where the most significant bit has bit number 0.
300  * If no bit is set this function returns 64.
301  */
302 static inline unsigned char __flogr(unsigned long word)
303 {
304 	if (__builtin_constant_p(word)) {
305 		unsigned long bit = 0;
306 
307 		if (!word)
308 			return 64;
309 		if (!(word & 0xffffffff00000000UL)) {
310 			word <<= 32;
311 			bit += 32;
312 		}
313 		if (!(word & 0xffff000000000000UL)) {
314 			word <<= 16;
315 			bit += 16;
316 		}
317 		if (!(word & 0xff00000000000000UL)) {
318 			word <<= 8;
319 			bit += 8;
320 		}
321 		if (!(word & 0xf000000000000000UL)) {
322 			word <<= 4;
323 			bit += 4;
324 		}
325 		if (!(word & 0xc000000000000000UL)) {
326 			word <<= 2;
327 			bit += 2;
328 		}
329 		if (!(word & 0x8000000000000000UL)) {
330 			word <<= 1;
331 			bit += 1;
332 		}
333 		return bit;
334 	} else {
335 		register unsigned long bit asm("4") = word;
336 		register unsigned long out asm("5");
337 
338 		asm volatile(
339 			"       flogr   %[bit],%[bit]\n"
340 			: [bit] "+d" (bit), [out] "=d" (out) : : "cc");
341 		return bit;
342 	}
343 }
344 
345 /**
346  * __ffs - find first bit in word.
347  * @word: The word to search
348  *
349  * Undefined if no bit exists, so code should check against 0 first.
350  */
351 static inline unsigned long __ffs(unsigned long word)
352 {
353 	return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
354 }
355 
356 /**
357  * ffs - find first bit set
358  * @word: the word to search
359  *
360  * This is defined the same way as the libc and
361  * compiler builtin ffs routines (man ffs).
362  */
363 static inline int ffs(int word)
364 {
365 	unsigned long mask = 2 * BITS_PER_LONG - 1;
366 	unsigned int val = (unsigned int)word;
367 
368 	return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
369 }
370 
371 /**
372  * __fls - find last (most-significant) set bit in a long word
373  * @word: the word to search
374  *
375  * Undefined if no set bit exists, so code should check against 0 first.
376  */
377 static inline unsigned long __fls(unsigned long word)
378 {
379 	return __flogr(word) ^ (BITS_PER_LONG - 1);
380 }
381 
382 /**
383  * fls64 - find last set bit in a 64-bit word
384  * @word: the word to search
385  *
386  * This is defined in a similar way as the libc and compiler builtin
387  * ffsll, but returns the position of the most significant set bit.
388  *
389  * fls64(value) returns 0 if value is 0 or the position of the last
390  * set bit if value is nonzero. The last (most significant) bit is
391  * at position 64.
392  */
393 static inline int fls64(unsigned long word)
394 {
395 	unsigned long mask = 2 * BITS_PER_LONG - 1;
396 
397 	return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
398 }
399 
400 /**
401  * fls - find last (most-significant) bit set
402  * @word: the word to search
403  *
404  * This is defined the same way as ffs.
405  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
406  */
407 static inline int fls(unsigned int word)
408 {
409 	return fls64(word);
410 }
411 
412 #else /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
413 
414 #include <asm-generic/bitops/__ffs.h>
415 #include <asm-generic/bitops/ffs.h>
416 #include <asm-generic/bitops/__fls.h>
417 #include <asm-generic/bitops/fls.h>
418 #include <asm-generic/bitops/fls64.h>
419 
420 #endif /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
421 
422 #include <asm-generic/bitops/ffz.h>
423 #include <asm-generic/bitops/find.h>
424 #include <asm-generic/bitops/hweight.h>
425 #include <asm-generic/bitops/sched.h>
426 #include <asm-generic/bitops/le.h>
427 #include <asm-generic/bitops/ext2-atomic-setbit.h>
428 
429 #endif /* _S390_BITOPS_H */
430