xref: /openbmc/linux/arch/s390/include/asm/bitops.h (revision 293d5b43)
1 /*
2  *    Copyright IBM Corp. 1999,2013
3  *
4  *    Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5  *
6  * The description below was taken in large parts from the powerpc
7  * bitops header file:
8  * Within a word, bits are numbered LSB first.  Lot's of places make
9  * this assumption by directly testing bits with (val & (1<<nr)).
10  * This can cause confusion for large (> 1 word) bitmaps on a
11  * big-endian system because, unlike little endian, the number of each
12  * bit depends on the word size.
13  *
14  * The bitop functions are defined to work on unsigned longs, so the bits
15  * end up numbered:
16  *   |63..............0|127............64|191...........128|255...........192|
17  *
18  * There are a few little-endian macros used mostly for filesystem
19  * bitmaps, these work on similar bit array layouts, but byte-oriented:
20  *   |7...0|15...8|23...16|31...24|39...32|47...40|55...48|63...56|
21  *
22  * The main difference is that bit 3-5 in the bit number field needs to be
23  * reversed compared to the big-endian bit fields. This can be achieved by
24  * XOR with 0x38.
25  *
26  * We also have special functions which work with an MSB0 encoding.
27  * The bits are numbered:
28  *   |0..............63|64............127|128...........191|192...........255|
29  *
30  * The main difference is that bit 0-63 in the bit number field needs to be
31  * reversed compared to the LSB0 encoded bit fields. This can be achieved by
32  * XOR with 0x3f.
33  *
34  */
35 
36 #ifndef _S390_BITOPS_H
37 #define _S390_BITOPS_H
38 
39 #ifndef _LINUX_BITOPS_H
40 #error only <linux/bitops.h> can be included directly
41 #endif
42 
43 #include <linux/typecheck.h>
44 #include <linux/compiler.h>
45 #include <asm/barrier.h>
46 
47 #define __BITOPS_NO_BARRIER	"\n"
48 
49 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
50 
51 #define __BITOPS_OR		"laog"
52 #define __BITOPS_AND		"lang"
53 #define __BITOPS_XOR		"laxg"
54 #define __BITOPS_BARRIER	"bcr	14,0\n"
55 
56 #define __BITOPS_LOOP(__addr, __val, __op_string, __barrier)	\
57 ({								\
58 	unsigned long __old;					\
59 								\
60 	typecheck(unsigned long *, (__addr));			\
61 	asm volatile(						\
62 		__op_string "	%0,%2,%1\n"			\
63 		__barrier					\
64 		: "=d" (__old),	"+Q" (*(__addr))		\
65 		: "d" (__val)					\
66 		: "cc", "memory");				\
67 	__old;							\
68 })
69 
70 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
71 
72 #define __BITOPS_OR		"ogr"
73 #define __BITOPS_AND		"ngr"
74 #define __BITOPS_XOR		"xgr"
75 #define __BITOPS_BARRIER	"\n"
76 
77 #define __BITOPS_LOOP(__addr, __val, __op_string, __barrier)	\
78 ({								\
79 	unsigned long __old, __new;				\
80 								\
81 	typecheck(unsigned long *, (__addr));			\
82 	asm volatile(						\
83 		"	lg	%0,%2\n"			\
84 		"0:	lgr	%1,%0\n"			\
85 		__op_string "	%1,%3\n"			\
86 		"	csg	%0,%1,%2\n"			\
87 		"	jl	0b"				\
88 		: "=&d" (__old), "=&d" (__new), "+Q" (*(__addr))\
89 		: "d" (__val)					\
90 		: "cc", "memory");				\
91 	__old;							\
92 })
93 
94 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
95 
96 #define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
97 
98 static inline unsigned long *
99 __bitops_word(unsigned long nr, volatile unsigned long *ptr)
100 {
101 	unsigned long addr;
102 
103 	addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
104 	return (unsigned long *)addr;
105 }
106 
107 static inline unsigned char *
108 __bitops_byte(unsigned long nr, volatile unsigned long *ptr)
109 {
110 	return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
111 }
112 
113 static inline void set_bit(unsigned long nr, volatile unsigned long *ptr)
114 {
115 	unsigned long *addr = __bitops_word(nr, ptr);
116 	unsigned long mask;
117 
118 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
119 	if (__builtin_constant_p(nr)) {
120 		unsigned char *caddr = __bitops_byte(nr, ptr);
121 
122 		asm volatile(
123 			"oi	%0,%b1\n"
124 			: "+Q" (*caddr)
125 			: "i" (1 << (nr & 7))
126 			: "cc", "memory");
127 		return;
128 	}
129 #endif
130 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
131 	__BITOPS_LOOP(addr, mask, __BITOPS_OR, __BITOPS_NO_BARRIER);
132 }
133 
134 static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr)
135 {
136 	unsigned long *addr = __bitops_word(nr, ptr);
137 	unsigned long mask;
138 
139 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
140 	if (__builtin_constant_p(nr)) {
141 		unsigned char *caddr = __bitops_byte(nr, ptr);
142 
143 		asm volatile(
144 			"ni	%0,%b1\n"
145 			: "+Q" (*caddr)
146 			: "i" (~(1 << (nr & 7)))
147 			: "cc", "memory");
148 		return;
149 	}
150 #endif
151 	mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
152 	__BITOPS_LOOP(addr, mask, __BITOPS_AND, __BITOPS_NO_BARRIER);
153 }
154 
155 static inline void change_bit(unsigned long nr, volatile unsigned long *ptr)
156 {
157 	unsigned long *addr = __bitops_word(nr, ptr);
158 	unsigned long mask;
159 
160 #ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
161 	if (__builtin_constant_p(nr)) {
162 		unsigned char *caddr = __bitops_byte(nr, ptr);
163 
164 		asm volatile(
165 			"xi	%0,%b1\n"
166 			: "+Q" (*caddr)
167 			: "i" (1 << (nr & 7))
168 			: "cc", "memory");
169 		return;
170 	}
171 #endif
172 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
173 	__BITOPS_LOOP(addr, mask, __BITOPS_XOR, __BITOPS_NO_BARRIER);
174 }
175 
176 static inline int
177 test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
178 {
179 	unsigned long *addr = __bitops_word(nr, ptr);
180 	unsigned long old, mask;
181 
182 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
183 	old = __BITOPS_LOOP(addr, mask, __BITOPS_OR, __BITOPS_BARRIER);
184 	return (old & mask) != 0;
185 }
186 
187 static inline int
188 test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
189 {
190 	unsigned long *addr = __bitops_word(nr, ptr);
191 	unsigned long old, mask;
192 
193 	mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
194 	old = __BITOPS_LOOP(addr, mask, __BITOPS_AND, __BITOPS_BARRIER);
195 	return (old & ~mask) != 0;
196 }
197 
198 static inline int
199 test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
200 {
201 	unsigned long *addr = __bitops_word(nr, ptr);
202 	unsigned long old, mask;
203 
204 	mask = 1UL << (nr & (BITS_PER_LONG - 1));
205 	old = __BITOPS_LOOP(addr, mask, __BITOPS_XOR, __BITOPS_BARRIER);
206 	return (old & mask) != 0;
207 }
208 
209 static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
210 {
211 	unsigned char *addr = __bitops_byte(nr, ptr);
212 
213 	*addr |= 1 << (nr & 7);
214 }
215 
216 static inline void
217 __clear_bit(unsigned long nr, volatile unsigned long *ptr)
218 {
219 	unsigned char *addr = __bitops_byte(nr, ptr);
220 
221 	*addr &= ~(1 << (nr & 7));
222 }
223 
224 static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
225 {
226 	unsigned char *addr = __bitops_byte(nr, ptr);
227 
228 	*addr ^= 1 << (nr & 7);
229 }
230 
231 static inline int
232 __test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
233 {
234 	unsigned char *addr = __bitops_byte(nr, ptr);
235 	unsigned char ch;
236 
237 	ch = *addr;
238 	*addr |= 1 << (nr & 7);
239 	return (ch >> (nr & 7)) & 1;
240 }
241 
242 static inline int
243 __test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
244 {
245 	unsigned char *addr = __bitops_byte(nr, ptr);
246 	unsigned char ch;
247 
248 	ch = *addr;
249 	*addr &= ~(1 << (nr & 7));
250 	return (ch >> (nr & 7)) & 1;
251 }
252 
253 static inline int
254 __test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
255 {
256 	unsigned char *addr = __bitops_byte(nr, ptr);
257 	unsigned char ch;
258 
259 	ch = *addr;
260 	*addr ^= 1 << (nr & 7);
261 	return (ch >> (nr & 7)) & 1;
262 }
263 
264 static inline int test_bit(unsigned long nr, const volatile unsigned long *ptr)
265 {
266 	const volatile unsigned char *addr;
267 
268 	addr = ((const volatile unsigned char *)ptr);
269 	addr += (nr ^ (BITS_PER_LONG - 8)) >> 3;
270 	return (*addr >> (nr & 7)) & 1;
271 }
272 
273 static inline int test_and_set_bit_lock(unsigned long nr,
274 					volatile unsigned long *ptr)
275 {
276 	if (test_bit(nr, ptr))
277 		return 1;
278 	return test_and_set_bit(nr, ptr);
279 }
280 
281 static inline void clear_bit_unlock(unsigned long nr,
282 				    volatile unsigned long *ptr)
283 {
284 	smp_mb__before_atomic();
285 	clear_bit(nr, ptr);
286 }
287 
288 static inline void __clear_bit_unlock(unsigned long nr,
289 				      volatile unsigned long *ptr)
290 {
291 	smp_mb();
292 	__clear_bit(nr, ptr);
293 }
294 
295 /*
296  * Functions which use MSB0 bit numbering.
297  * The bits are numbered:
298  *   |0..............63|64............127|128...........191|192...........255|
299  */
300 unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size);
301 unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
302 				unsigned long offset);
303 
304 static inline void set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
305 {
306 	return set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
307 }
308 
309 static inline void clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
310 {
311 	return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
312 }
313 
314 static inline void __set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
315 {
316 	return __set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
317 }
318 
319 static inline void __clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
320 {
321 	return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
322 }
323 
324 static inline int test_bit_inv(unsigned long nr,
325 			       const volatile unsigned long *ptr)
326 {
327 	return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
328 }
329 
330 #ifdef CONFIG_HAVE_MARCH_Z9_109_FEATURES
331 
332 /**
333  * __flogr - find leftmost one
334  * @word - The word to search
335  *
336  * Returns the bit number of the most significant bit set,
337  * where the most significant bit has bit number 0.
338  * If no bit is set this function returns 64.
339  */
340 static inline unsigned char __flogr(unsigned long word)
341 {
342 	if (__builtin_constant_p(word)) {
343 		unsigned long bit = 0;
344 
345 		if (!word)
346 			return 64;
347 		if (!(word & 0xffffffff00000000UL)) {
348 			word <<= 32;
349 			bit += 32;
350 		}
351 		if (!(word & 0xffff000000000000UL)) {
352 			word <<= 16;
353 			bit += 16;
354 		}
355 		if (!(word & 0xff00000000000000UL)) {
356 			word <<= 8;
357 			bit += 8;
358 		}
359 		if (!(word & 0xf000000000000000UL)) {
360 			word <<= 4;
361 			bit += 4;
362 		}
363 		if (!(word & 0xc000000000000000UL)) {
364 			word <<= 2;
365 			bit += 2;
366 		}
367 		if (!(word & 0x8000000000000000UL)) {
368 			word <<= 1;
369 			bit += 1;
370 		}
371 		return bit;
372 	} else {
373 		register unsigned long bit asm("4") = word;
374 		register unsigned long out asm("5");
375 
376 		asm volatile(
377 			"       flogr   %[bit],%[bit]\n"
378 			: [bit] "+d" (bit), [out] "=d" (out) : : "cc");
379 		return bit;
380 	}
381 }
382 
383 /**
384  * __ffs - find first bit in word.
385  * @word: The word to search
386  *
387  * Undefined if no bit exists, so code should check against 0 first.
388  */
389 static inline unsigned long __ffs(unsigned long word)
390 {
391 	return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
392 }
393 
394 /**
395  * ffs - find first bit set
396  * @word: the word to search
397  *
398  * This is defined the same way as the libc and
399  * compiler builtin ffs routines (man ffs).
400  */
401 static inline int ffs(int word)
402 {
403 	unsigned long mask = 2 * BITS_PER_LONG - 1;
404 	unsigned int val = (unsigned int)word;
405 
406 	return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
407 }
408 
409 /**
410  * __fls - find last (most-significant) set bit in a long word
411  * @word: the word to search
412  *
413  * Undefined if no set bit exists, so code should check against 0 first.
414  */
415 static inline unsigned long __fls(unsigned long word)
416 {
417 	return __flogr(word) ^ (BITS_PER_LONG - 1);
418 }
419 
420 /**
421  * fls64 - find last set bit in a 64-bit word
422  * @word: the word to search
423  *
424  * This is defined in a similar way as the libc and compiler builtin
425  * ffsll, but returns the position of the most significant set bit.
426  *
427  * fls64(value) returns 0 if value is 0 or the position of the last
428  * set bit if value is nonzero. The last (most significant) bit is
429  * at position 64.
430  */
431 static inline int fls64(unsigned long word)
432 {
433 	unsigned long mask = 2 * BITS_PER_LONG - 1;
434 
435 	return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
436 }
437 
438 /**
439  * fls - find last (most-significant) bit set
440  * @word: the word to search
441  *
442  * This is defined the same way as ffs.
443  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
444  */
445 static inline int fls(int word)
446 {
447 	return fls64((unsigned int)word);
448 }
449 
450 #else /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
451 
452 #include <asm-generic/bitops/__ffs.h>
453 #include <asm-generic/bitops/ffs.h>
454 #include <asm-generic/bitops/__fls.h>
455 #include <asm-generic/bitops/fls.h>
456 #include <asm-generic/bitops/fls64.h>
457 
458 #endif /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
459 
460 #include <asm-generic/bitops/ffz.h>
461 #include <asm-generic/bitops/find.h>
462 #include <asm-generic/bitops/hweight.h>
463 #include <asm-generic/bitops/sched.h>
464 #include <asm-generic/bitops/le.h>
465 #include <asm-generic/bitops/ext2-atomic-setbit.h>
466 
467 #endif /* _S390_BITOPS_H */
468