xref: /openbmc/qemu/include/qemu/bitmap.h (revision e6459afb1ff4d86b361b14f4a2fc43f0d2b4d679)
1 /*
2  * Bitmap Module
3  *
4  * Copyright (C) 2010 Corentin Chary <corentin.chary@gmail.com>
5  *
6  * Mostly inspired by (stolen from) linux/bitmap.h and linux/bitops.h
7  *
8  * This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
9  * See the COPYING.LIB file in the top-level directory.
10  */
11 
12 #ifndef BITMAP_H
13 #define BITMAP_H
14 
15 
16 #include "qemu/bitops.h"
17 
18 /*
19  * The available bitmap operations and their rough meaning in the
20  * case that the bitmap is a single unsigned long are thus:
21  *
22  * Note that nbits should be always a compile time evaluable constant.
23  * Otherwise many inlines will generate horrible code.
24  *
25  * bitmap_zero(dst, nbits)                      *dst = 0UL
26  * bitmap_fill(dst, nbits)                      *dst = ~0UL
27  * bitmap_copy(dst, src, nbits)                 *dst = *src
28  * bitmap_and(dst, src1, src2, nbits)           *dst = *src1 & *src2
29  * bitmap_or(dst, src1, src2, nbits)            *dst = *src1 | *src2
30  * bitmap_xor(dst, src1, src2, nbits)           *dst = *src1 ^ *src2
31  * bitmap_andnot(dst, src1, src2, nbits)        *dst = *src1 & ~(*src2)
32  * bitmap_complement(dst, src, nbits)           *dst = ~(*src)
33  * bitmap_equal(src1, src2, nbits)              Are *src1 and *src2 equal?
34  * bitmap_intersects(src1, src2, nbits)         Do *src1 and *src2 overlap?
35  * bitmap_empty(src, nbits)                     Are all bits zero in *src?
36  * bitmap_full(src, nbits)                      Are all bits set in *src?
37  * bitmap_set(dst, pos, nbits)                  Set specified bit area
38  * bitmap_set_atomic(dst, pos, nbits)           Set specified bit area with atomic ops
39  * bitmap_clear(dst, pos, nbits)                Clear specified bit area
40  * bitmap_test_and_clear_atomic(dst, pos, nbits)    Test and clear area
41  * bitmap_find_next_zero_area(buf, len, pos, n, mask)  Find bit free area
42  * bitmap_to_le(dst, src, nbits)      Convert bitmap to little endian
43  * bitmap_from_le(dst, src, nbits)    Convert bitmap from little endian
44  * bitmap_copy_with_src_offset(dst, src, offset, nbits)
45  *                                    *dst = *src (with an offset into src)
46  * bitmap_copy_with_dst_offset(dst, src, offset, nbits)
47  *                                    *dst = *src (with an offset into dst)
48  */
49 
50 /*
51  * Also the following operations apply to bitmaps.
52  *
53  * set_bit(bit, addr)               *addr |= bit
54  * clear_bit(bit, addr)             *addr &= ~bit
55  * change_bit(bit, addr)            *addr ^= bit
56  * test_bit(bit, addr)              Is bit set in *addr?
57  * test_and_set_bit(bit, addr)      Set bit and return old value
58  * test_and_clear_bit(bit, addr)    Clear bit and return old value
59  * test_and_change_bit(bit, addr)   Change bit and return old value
60  * find_first_zero_bit(addr, nbits) Position first zero bit in *addr
61  * find_first_bit(addr, nbits)      Position first set bit in *addr
62  * find_next_zero_bit(addr, nbits, bit) Position next zero bit in *addr >= bit
63  * find_next_bit(addr, nbits, bit)  Position next set bit in *addr >= bit
64  */
65 
66 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1)))
67 #define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1)))
68 
69 #define DECLARE_BITMAP(name,bits)                  \
70         unsigned long name[BITS_TO_LONGS(bits)]
71 
72 /*
73  * This is for use with the bit32 versions of set_bit() etc;
74  * we don't currently support the full range of bitmap operations
75  * on bitmaps backed by an array of uint32_t.
76  */
77 #define DECLARE_BITMAP32(name, bits)            \
78         uint32_t name[BITS_TO_U32S(bits)]
79 
80 #define small_nbits(nbits)                      \
81         ((nbits) <= BITS_PER_LONG)
82 
83 int slow_bitmap_empty(const unsigned long *bitmap, long bits);
84 int slow_bitmap_full(const unsigned long *bitmap, long bits);
85 int slow_bitmap_equal(const unsigned long *bitmap1,
86                       const unsigned long *bitmap2, long bits);
87 void slow_bitmap_complement(unsigned long *dst, const unsigned long *src,
88                             long bits);
89 int slow_bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
90                     const unsigned long *bitmap2, long bits);
91 void slow_bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
92                     const unsigned long *bitmap2, long bits);
93 void slow_bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
94                      const unsigned long *bitmap2, long bits);
95 int slow_bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
96                        const unsigned long *bitmap2, long bits);
97 int slow_bitmap_intersects(const unsigned long *bitmap1,
98                            const unsigned long *bitmap2, long bits);
99 long slow_bitmap_count_one(const unsigned long *bitmap, long nbits);
100 
bitmap_try_new(long nbits)101 static inline unsigned long *bitmap_try_new(long nbits)
102 {
103     long nelem = BITS_TO_LONGS(nbits);
104     return g_try_new0(unsigned long, nelem);
105 }
106 
bitmap_new(long nbits)107 static inline unsigned long *bitmap_new(long nbits)
108 {
109     long nelem = BITS_TO_LONGS(nbits);
110     return g_new0(unsigned long, nelem);
111 }
112 
bitmap_zero(unsigned long * dst,long nbits)113 static inline void bitmap_zero(unsigned long *dst, long nbits)
114 {
115     if (small_nbits(nbits)) {
116         *dst = 0UL;
117     } else {
118         long len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
119         memset(dst, 0, len);
120     }
121 }
122 
bitmap_fill(unsigned long * dst,long nbits)123 static inline void bitmap_fill(unsigned long *dst, long nbits)
124 {
125     size_t nlongs = BITS_TO_LONGS(nbits);
126     if (!small_nbits(nbits)) {
127         long len = (nlongs - 1) * sizeof(unsigned long);
128         memset(dst, 0xff,  len);
129     }
130     dst[nlongs - 1] = BITMAP_LAST_WORD_MASK(nbits);
131 }
132 
bitmap_copy(unsigned long * dst,const unsigned long * src,long nbits)133 static inline void bitmap_copy(unsigned long *dst, const unsigned long *src,
134                                long nbits)
135 {
136     if (small_nbits(nbits)) {
137         *dst = *src;
138     } else {
139         long len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
140         memcpy(dst, src, len);
141     }
142 }
143 
bitmap_and(unsigned long * dst,const unsigned long * src1,const unsigned long * src2,long nbits)144 static inline int bitmap_and(unsigned long *dst, const unsigned long *src1,
145                              const unsigned long *src2, long nbits)
146 {
147     if (small_nbits(nbits)) {
148         return (*dst = *src1 & *src2) != 0;
149     }
150     return slow_bitmap_and(dst, src1, src2, nbits);
151 }
152 
bitmap_or(unsigned long * dst,const unsigned long * src1,const unsigned long * src2,long nbits)153 static inline void bitmap_or(unsigned long *dst, const unsigned long *src1,
154                              const unsigned long *src2, long nbits)
155 {
156     if (small_nbits(nbits)) {
157         *dst = *src1 | *src2;
158     } else {
159         slow_bitmap_or(dst, src1, src2, nbits);
160     }
161 }
162 
bitmap_xor(unsigned long * dst,const unsigned long * src1,const unsigned long * src2,long nbits)163 static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1,
164                               const unsigned long *src2, long nbits)
165 {
166     if (small_nbits(nbits)) {
167         *dst = *src1 ^ *src2;
168     } else {
169         slow_bitmap_xor(dst, src1, src2, nbits);
170     }
171 }
172 
bitmap_andnot(unsigned long * dst,const unsigned long * src1,const unsigned long * src2,long nbits)173 static inline int bitmap_andnot(unsigned long *dst, const unsigned long *src1,
174                                 const unsigned long *src2, long nbits)
175 {
176     if (small_nbits(nbits)) {
177         return (*dst = *src1 & ~(*src2)) != 0;
178     }
179     return slow_bitmap_andnot(dst, src1, src2, nbits);
180 }
181 
bitmap_complement(unsigned long * dst,const unsigned long * src,long nbits)182 static inline void bitmap_complement(unsigned long *dst,
183                                      const unsigned long *src,
184                                      long nbits)
185 {
186     if (small_nbits(nbits)) {
187         *dst = ~(*src) & BITMAP_LAST_WORD_MASK(nbits);
188     } else {
189         slow_bitmap_complement(dst, src, nbits);
190     }
191 }
192 
bitmap_equal(const unsigned long * src1,const unsigned long * src2,long nbits)193 static inline int bitmap_equal(const unsigned long *src1,
194                                const unsigned long *src2, long nbits)
195 {
196     if (small_nbits(nbits)) {
197         return ! ((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits));
198     } else {
199         return slow_bitmap_equal(src1, src2, nbits);
200     }
201 }
202 
bitmap_empty(const unsigned long * src,long nbits)203 static inline int bitmap_empty(const unsigned long *src, long nbits)
204 {
205     if (small_nbits(nbits)) {
206         return ! (*src & BITMAP_LAST_WORD_MASK(nbits));
207     } else {
208         return slow_bitmap_empty(src, nbits);
209     }
210 }
211 
bitmap_full(const unsigned long * src,long nbits)212 static inline int bitmap_full(const unsigned long *src, long nbits)
213 {
214     if (small_nbits(nbits)) {
215         return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits));
216     } else {
217         return slow_bitmap_full(src, nbits);
218     }
219 }
220 
bitmap_intersects(const unsigned long * src1,const unsigned long * src2,long nbits)221 static inline int bitmap_intersects(const unsigned long *src1,
222                                     const unsigned long *src2, long nbits)
223 {
224     if (small_nbits(nbits)) {
225         return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0;
226     } else {
227         return slow_bitmap_intersects(src1, src2, nbits);
228     }
229 }
230 
bitmap_count_one(const unsigned long * bitmap,long nbits)231 static inline long bitmap_count_one(const unsigned long *bitmap, long nbits)
232 {
233     if (unlikely(!nbits)) {
234         return 0;
235     }
236 
237     if (small_nbits(nbits)) {
238         return ctpopl(*bitmap & BITMAP_LAST_WORD_MASK(nbits));
239     } else {
240         return slow_bitmap_count_one(bitmap, nbits);
241     }
242 }
243 
bitmap_count_one_with_offset(const unsigned long * bitmap,long offset,long nbits)244 static inline long bitmap_count_one_with_offset(const unsigned long *bitmap,
245                                                 long offset, long nbits)
246 {
247     long aligned_offset = QEMU_ALIGN_DOWN(offset, BITS_PER_LONG);
248     long redundant_bits = offset - aligned_offset;
249     long bits_to_count = nbits + redundant_bits;
250     const unsigned long *bitmap_start = bitmap +
251                                         aligned_offset / BITS_PER_LONG;
252 
253     return bitmap_count_one(bitmap_start, bits_to_count) -
254            bitmap_count_one(bitmap_start, redundant_bits);
255 }
256 
257 void bitmap_set(unsigned long *map, long i, long len);
258 void bitmap_set_atomic(unsigned long *map, long i, long len);
259 void bitmap_clear(unsigned long *map, long start, long nr);
260 bool bitmap_test_and_clear_atomic(unsigned long *map, long start, long nr);
261 bool bitmap_test_and_clear(unsigned long *map, long start, long nr);
262 void bitmap_copy_and_clear_atomic(unsigned long *dst, unsigned long *src,
263                                   long nr);
264 unsigned long bitmap_find_next_zero_area(unsigned long *map,
265                                          unsigned long size,
266                                          unsigned long start,
267                                          unsigned long nr,
268                                          unsigned long align_mask);
269 
bitmap_zero_extend(unsigned long * old,long old_nbits,long new_nbits)270 static inline unsigned long *bitmap_zero_extend(unsigned long *old,
271                                                 long old_nbits, long new_nbits)
272 {
273     long new_nelem = BITS_TO_LONGS(new_nbits);
274     unsigned long *ptr = g_renew(unsigned long, old, new_nelem);
275     bitmap_clear(ptr, old_nbits, new_nbits - old_nbits);
276     return ptr;
277 }
278 
279 void bitmap_to_le(unsigned long *dst, const unsigned long *src,
280                   long nbits);
281 void bitmap_from_le(unsigned long *dst, const unsigned long *src,
282                     long nbits);
283 
284 void bitmap_copy_with_src_offset(unsigned long *dst, const unsigned long *src,
285                                  unsigned long offset, unsigned long nbits);
286 void bitmap_copy_with_dst_offset(unsigned long *dst, const unsigned long *src,
287                                  unsigned long shift, unsigned long nbits);
288 
289 #endif /* BITMAP_H */
290