xref: /openbmc/linux/arch/s390/include/asm/bitops.h (revision 4e1a33b1)
1 /*
2  *    Copyright IBM Corp. 1999,2013
3  *
4  *    Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5  *
6  * The description below was taken in large parts from the powerpc
7  * bitops header file:
8  * Within a word, bits are numbered LSB first.  Lot's of places make
9  * this assumption by directly testing bits with (val & (1<<nr)).
10  * This can cause confusion for large (> 1 word) bitmaps on a
11  * big-endian system because, unlike little endian, the number of each
12  * bit depends on the word size.
13  *
14  * The bitop functions are defined to work on unsigned longs, so the bits
15  * end up numbered:
16  *   |63..............0|127............64|191...........128|255...........192|
17  *
18  * There are a few little-endian macros used mostly for filesystem
19  * bitmaps, these work on similar bit array layouts, but byte-oriented:
20  *   |7...0|15...8|23...16|31...24|39...32|47...40|55...48|63...56|
21  *
22  * The main difference is that bit 3-5 in the bit number field needs to be
23  * reversed compared to the big-endian bit fields. This can be achieved by
24  * XOR with 0x38.
25  *
26  * We also have special functions which work with an MSB0 encoding.
27  * The bits are numbered:
28  *   |0..............63|64............127|128...........191|192...........255|
29  *
30  * The main difference is that bit 0-63 in the bit number field needs to be
31  * reversed compared to the LSB0 encoded bit fields. This can be achieved by
32  * XOR with 0x3f.
33  *
34  */
35 
36 #ifndef _S390_BITOPS_H
37 #define _S390_BITOPS_H
38 
39 #ifndef _LINUX_BITOPS_H
40 #error only <linux/bitops.h> can be included directly
41 #endif
42 
43 #include <linux/typecheck.h>
44 #include <linux/compiler.h>
45 #include <asm/atomic_ops.h>
46 #include <asm/barrier.h>
47 
48 #define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
49 
50 static inline unsigned long *
51 __bitops_word(unsigned long nr, volatile unsigned long *ptr)
52 {
53 	unsigned long addr;
54 
55 	addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
56 	return (unsigned long *)addr;
57 }
58 
59 static inline unsigned char *
60 __bitops_byte(unsigned long nr, volatile unsigned long *ptr)
61 {
62 	return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
63 }
64 
65 static inline void set_bit(unsigned long nr, volatile unsigned long *ptr)
66 {
67 	unsigned long *addr = __bitops_word(nr, ptr);
68 	unsigned long mask;
69 
70 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
71 	if (__builtin_constant_p(nr)) {
72 		unsigned char *caddr = __bitops_byte(nr, ptr);
73 
74 		asm volatile(
75 			"oi	%0,%b1\n"
76 			: "+Q" (*caddr)
77 			: "i" (1 << (nr & 7))
78 			: "cc", "memory");
79 		return;
80 	}
81 #endif
82 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
83 	__atomic64_or(mask, addr);
84 }
85 
86 static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr)
87 {
88 	unsigned long *addr = __bitops_word(nr, ptr);
89 	unsigned long mask;
90 
91 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
92 	if (__builtin_constant_p(nr)) {
93 		unsigned char *caddr = __bitops_byte(nr, ptr);
94 
95 		asm volatile(
96 			"ni	%0,%b1\n"
97 			: "+Q" (*caddr)
98 			: "i" (~(1 << (nr & 7)))
99 			: "cc", "memory");
100 		return;
101 	}
102 #endif
103 	mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
104 	__atomic64_and(mask, addr);
105 }
106 
107 static inline void change_bit(unsigned long nr, volatile unsigned long *ptr)
108 {
109 	unsigned long *addr = __bitops_word(nr, ptr);
110 	unsigned long mask;
111 
112 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
113 	if (__builtin_constant_p(nr)) {
114 		unsigned char *caddr = __bitops_byte(nr, ptr);
115 
116 		asm volatile(
117 			"xi	%0,%b1\n"
118 			: "+Q" (*caddr)
119 			: "i" (1 << (nr & 7))
120 			: "cc", "memory");
121 		return;
122 	}
123 #endif
124 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
125 	__atomic64_xor(mask, addr);
126 }
127 
128 static inline int
129 test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
130 {
131 	unsigned long *addr = __bitops_word(nr, ptr);
132 	unsigned long old, mask;
133 
134 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
135 	old = __atomic64_or_barrier(mask, addr);
136 	return (old & mask) != 0;
137 }
138 
139 static inline int
140 test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
141 {
142 	unsigned long *addr = __bitops_word(nr, ptr);
143 	unsigned long old, mask;
144 
145 	mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
146 	old = __atomic64_and_barrier(mask, addr);
147 	return (old & ~mask) != 0;
148 }
149 
150 static inline int
151 test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
152 {
153 	unsigned long *addr = __bitops_word(nr, ptr);
154 	unsigned long old, mask;
155 
156 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
157 	old = __atomic64_xor_barrier(mask, addr);
158 	return (old & mask) != 0;
159 }
160 
161 static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
162 {
163 	unsigned char *addr = __bitops_byte(nr, ptr);
164 
165 	*addr |= 1 << (nr & 7);
166 }
167 
168 static inline void
169 __clear_bit(unsigned long nr, volatile unsigned long *ptr)
170 {
171 	unsigned char *addr = __bitops_byte(nr, ptr);
172 
173 	*addr &= ~(1 << (nr & 7));
174 }
175 
176 static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
177 {
178 	unsigned char *addr = __bitops_byte(nr, ptr);
179 
180 	*addr ^= 1 << (nr & 7);
181 }
182 
183 static inline int
184 __test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
185 {
186 	unsigned char *addr = __bitops_byte(nr, ptr);
187 	unsigned char ch;
188 
189 	ch = *addr;
190 	*addr |= 1 << (nr & 7);
191 	return (ch >> (nr & 7)) & 1;
192 }
193 
194 static inline int
195 __test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
196 {
197 	unsigned char *addr = __bitops_byte(nr, ptr);
198 	unsigned char ch;
199 
200 	ch = *addr;
201 	*addr &= ~(1 << (nr & 7));
202 	return (ch >> (nr & 7)) & 1;
203 }
204 
205 static inline int
206 __test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
207 {
208 	unsigned char *addr = __bitops_byte(nr, ptr);
209 	unsigned char ch;
210 
211 	ch = *addr;
212 	*addr ^= 1 << (nr & 7);
213 	return (ch >> (nr & 7)) & 1;
214 }
215 
216 static inline int test_bit(unsigned long nr, const volatile unsigned long *ptr)
217 {
218 	const volatile unsigned char *addr;
219 
220 	addr = ((const volatile unsigned char *)ptr);
221 	addr += (nr ^ (BITS_PER_LONG - 8)) >> 3;
222 	return (*addr >> (nr & 7)) & 1;
223 }
224 
225 static inline int test_and_set_bit_lock(unsigned long nr,
226 					volatile unsigned long *ptr)
227 {
228 	if (test_bit(nr, ptr))
229 		return 1;
230 	return test_and_set_bit(nr, ptr);
231 }
232 
233 static inline void clear_bit_unlock(unsigned long nr,
234 				    volatile unsigned long *ptr)
235 {
236 	smp_mb__before_atomic();
237 	clear_bit(nr, ptr);
238 }
239 
240 static inline void __clear_bit_unlock(unsigned long nr,
241 				      volatile unsigned long *ptr)
242 {
243 	smp_mb();
244 	__clear_bit(nr, ptr);
245 }
246 
247 /*
248  * Functions which use MSB0 bit numbering.
249  * The bits are numbered:
250  *   |0..............63|64............127|128...........191|192...........255|
251  */
252 unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size);
253 unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
254 				unsigned long offset);
255 
256 static inline void set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
257 {
258 	return set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
259 }
260 
261 static inline void clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
262 {
263 	return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
264 }
265 
266 static inline void __set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
267 {
268 	return __set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
269 }
270 
271 static inline void __clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
272 {
273 	return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
274 }
275 
276 static inline int test_bit_inv(unsigned long nr,
277 			       const volatile unsigned long *ptr)
278 {
279 	return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
280 }
281 
282 #ifdef CONFIG_HAVE_MARCH_Z9_109_FEATURES
283 
284 /**
285  * __flogr - find leftmost one
286  * @word - The word to search
287  *
288  * Returns the bit number of the most significant bit set,
289  * where the most significant bit has bit number 0.
290  * If no bit is set this function returns 64.
291  */
292 static inline unsigned char __flogr(unsigned long word)
293 {
294 	if (__builtin_constant_p(word)) {
295 		unsigned long bit = 0;
296 
297 		if (!word)
298 			return 64;
299 		if (!(word & 0xffffffff00000000UL)) {
300 			word <<= 32;
301 			bit += 32;
302 		}
303 		if (!(word & 0xffff000000000000UL)) {
304 			word <<= 16;
305 			bit += 16;
306 		}
307 		if (!(word & 0xff00000000000000UL)) {
308 			word <<= 8;
309 			bit += 8;
310 		}
311 		if (!(word & 0xf000000000000000UL)) {
312 			word <<= 4;
313 			bit += 4;
314 		}
315 		if (!(word & 0xc000000000000000UL)) {
316 			word <<= 2;
317 			bit += 2;
318 		}
319 		if (!(word & 0x8000000000000000UL)) {
320 			word <<= 1;
321 			bit += 1;
322 		}
323 		return bit;
324 	} else {
325 		register unsigned long bit asm("4") = word;
326 		register unsigned long out asm("5");
327 
328 		asm volatile(
329 			"       flogr   %[bit],%[bit]\n"
330 			: [bit] "+d" (bit), [out] "=d" (out) : : "cc");
331 		return bit;
332 	}
333 }
334 
335 /**
336  * __ffs - find first bit in word.
337  * @word: The word to search
338  *
339  * Undefined if no bit exists, so code should check against 0 first.
340  */
341 static inline unsigned long __ffs(unsigned long word)
342 {
343 	return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
344 }
345 
346 /**
347  * ffs - find first bit set
348  * @word: the word to search
349  *
350  * This is defined the same way as the libc and
351  * compiler builtin ffs routines (man ffs).
352  */
353 static inline int ffs(int word)
354 {
355 	unsigned long mask = 2 * BITS_PER_LONG - 1;
356 	unsigned int val = (unsigned int)word;
357 
358 	return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
359 }
360 
361 /**
362  * __fls - find last (most-significant) set bit in a long word
363  * @word: the word to search
364  *
365  * Undefined if no set bit exists, so code should check against 0 first.
366  */
367 static inline unsigned long __fls(unsigned long word)
368 {
369 	return __flogr(word) ^ (BITS_PER_LONG - 1);
370 }
371 
372 /**
373  * fls64 - find last set bit in a 64-bit word
374  * @word: the word to search
375  *
376  * This is defined in a similar way as the libc and compiler builtin
377  * ffsll, but returns the position of the most significant set bit.
378  *
379  * fls64(value) returns 0 if value is 0 or the position of the last
380  * set bit if value is nonzero. The last (most significant) bit is
381  * at position 64.
382  */
383 static inline int fls64(unsigned long word)
384 {
385 	unsigned long mask = 2 * BITS_PER_LONG - 1;
386 
387 	return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
388 }
389 
390 /**
391  * fls - find last (most-significant) bit set
392  * @word: the word to search
393  *
394  * This is defined the same way as ffs.
395  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
396  */
397 static inline int fls(int word)
398 {
399 	return fls64((unsigned int)word);
400 }
401 
402 #else /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
403 
404 #include <asm-generic/bitops/__ffs.h>
405 #include <asm-generic/bitops/ffs.h>
406 #include <asm-generic/bitops/__fls.h>
407 #include <asm-generic/bitops/fls.h>
408 #include <asm-generic/bitops/fls64.h>
409 
410 #endif /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
411 
412 #include <asm-generic/bitops/ffz.h>
413 #include <asm-generic/bitops/find.h>
414 #include <asm-generic/bitops/hweight.h>
415 #include <asm-generic/bitops/sched.h>
416 #include <asm-generic/bitops/le.h>
417 #include <asm-generic/bitops/ext2-atomic-setbit.h>
418 
419 #endif /* _S390_BITOPS_H */
420