1 /* 2 * lib/bitmap.c 3 * Helper functions for bitmap.h. 4 * 5 * This source code is licensed under the GNU General Public License, 6 * Version 2. See the file COPYING for more details. 7 */ 8 #include <linux/export.h> 9 #include <linux/thread_info.h> 10 #include <linux/ctype.h> 11 #include <linux/errno.h> 12 #include <linux/bitmap.h> 13 #include <linux/bitops.h> 14 #include <linux/bug.h> 15 #include <linux/kernel.h> 16 #include <linux/string.h> 17 #include <linux/uaccess.h> 18 19 #include <asm/page.h> 20 21 /* 22 * bitmaps provide an array of bits, implemented using an an 23 * array of unsigned longs. The number of valid bits in a 24 * given bitmap does _not_ need to be an exact multiple of 25 * BITS_PER_LONG. 26 * 27 * The possible unused bits in the last, partially used word 28 * of a bitmap are 'don't care'. The implementation makes 29 * no particular effort to keep them zero. It ensures that 30 * their value will not affect the results of any operation. 31 * The bitmap operations that return Boolean (bitmap_empty, 32 * for example) or scalar (bitmap_weight, for example) results 33 * carefully filter out these unused bits from impacting their 34 * results. 35 * 36 * These operations actually hold to a slightly stronger rule: 37 * if you don't input any bitmaps to these ops that have some 38 * unused bits set, then they won't output any set unused bits 39 * in output bitmaps. 40 * 41 * The byte ordering of bitmaps is more natural on little 42 * endian architectures. See the big-endian headers 43 * include/asm-ppc64/bitops.h and include/asm-s390/bitops.h 44 * for the best explanations of this ordering. 45 */ 46 47 int __bitmap_equal(const unsigned long *bitmap1, 48 const unsigned long *bitmap2, unsigned int bits) 49 { 50 unsigned int k, lim = bits/BITS_PER_LONG; 51 for (k = 0; k < lim; ++k) 52 if (bitmap1[k] != bitmap2[k]) 53 return 0; 54 55 if (bits % BITS_PER_LONG) 56 if ((bitmap1[k] ^ bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits)) 57 return 0; 58 59 return 1; 60 } 61 EXPORT_SYMBOL(__bitmap_equal); 62 63 void __bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int bits) 64 { 65 unsigned int k, lim = bits/BITS_PER_LONG; 66 for (k = 0; k < lim; ++k) 67 dst[k] = ~src[k]; 68 69 if (bits % BITS_PER_LONG) 70 dst[k] = ~src[k]; 71 } 72 EXPORT_SYMBOL(__bitmap_complement); 73 74 /** 75 * __bitmap_shift_right - logical right shift of the bits in a bitmap 76 * @dst : destination bitmap 77 * @src : source bitmap 78 * @shift : shift by this many bits 79 * @nbits : bitmap size, in bits 80 * 81 * Shifting right (dividing) means moving bits in the MS -> LS bit 82 * direction. Zeros are fed into the vacated MS positions and the 83 * LS bits shifted off the bottom are lost. 84 */ 85 void __bitmap_shift_right(unsigned long *dst, const unsigned long *src, 86 unsigned shift, unsigned nbits) 87 { 88 unsigned k, lim = BITS_TO_LONGS(nbits); 89 unsigned off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG; 90 unsigned long mask = BITMAP_LAST_WORD_MASK(nbits); 91 for (k = 0; off + k < lim; ++k) { 92 unsigned long upper, lower; 93 94 /* 95 * If shift is not word aligned, take lower rem bits of 96 * word above and make them the top rem bits of result. 97 */ 98 if (!rem || off + k + 1 >= lim) 99 upper = 0; 100 else { 101 upper = src[off + k + 1]; 102 if (off + k + 1 == lim - 1) 103 upper &= mask; 104 upper <<= (BITS_PER_LONG - rem); 105 } 106 lower = src[off + k]; 107 if (off + k == lim - 1) 108 lower &= mask; 109 lower >>= rem; 110 dst[k] = lower | upper; 111 } 112 if (off) 113 memset(&dst[lim - off], 0, off*sizeof(unsigned long)); 114 } 115 EXPORT_SYMBOL(__bitmap_shift_right); 116 117 118 /** 119 * __bitmap_shift_left - logical left shift of the bits in a bitmap 120 * @dst : destination bitmap 121 * @src : source bitmap 122 * @shift : shift by this many bits 123 * @nbits : bitmap size, in bits 124 * 125 * Shifting left (multiplying) means moving bits in the LS -> MS 126 * direction. Zeros are fed into the vacated LS bit positions 127 * and those MS bits shifted off the top are lost. 128 */ 129 130 void __bitmap_shift_left(unsigned long *dst, const unsigned long *src, 131 unsigned int shift, unsigned int nbits) 132 { 133 int k; 134 unsigned int lim = BITS_TO_LONGS(nbits); 135 unsigned int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG; 136 for (k = lim - off - 1; k >= 0; --k) { 137 unsigned long upper, lower; 138 139 /* 140 * If shift is not word aligned, take upper rem bits of 141 * word below and make them the bottom rem bits of result. 142 */ 143 if (rem && k > 0) 144 lower = src[k - 1] >> (BITS_PER_LONG - rem); 145 else 146 lower = 0; 147 upper = src[k] << rem; 148 dst[k + off] = lower | upper; 149 } 150 if (off) 151 memset(dst, 0, off*sizeof(unsigned long)); 152 } 153 EXPORT_SYMBOL(__bitmap_shift_left); 154 155 int __bitmap_and(unsigned long *dst, const unsigned long *bitmap1, 156 const unsigned long *bitmap2, unsigned int bits) 157 { 158 unsigned int k; 159 unsigned int lim = bits/BITS_PER_LONG; 160 unsigned long result = 0; 161 162 for (k = 0; k < lim; k++) 163 result |= (dst[k] = bitmap1[k] & bitmap2[k]); 164 if (bits % BITS_PER_LONG) 165 result |= (dst[k] = bitmap1[k] & bitmap2[k] & 166 BITMAP_LAST_WORD_MASK(bits)); 167 return result != 0; 168 } 169 EXPORT_SYMBOL(__bitmap_and); 170 171 void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1, 172 const unsigned long *bitmap2, unsigned int bits) 173 { 174 unsigned int k; 175 unsigned int nr = BITS_TO_LONGS(bits); 176 177 for (k = 0; k < nr; k++) 178 dst[k] = bitmap1[k] | bitmap2[k]; 179 } 180 EXPORT_SYMBOL(__bitmap_or); 181 182 void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1, 183 const unsigned long *bitmap2, unsigned int bits) 184 { 185 unsigned int k; 186 unsigned int nr = BITS_TO_LONGS(bits); 187 188 for (k = 0; k < nr; k++) 189 dst[k] = bitmap1[k] ^ bitmap2[k]; 190 } 191 EXPORT_SYMBOL(__bitmap_xor); 192 193 int __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1, 194 const unsigned long *bitmap2, unsigned int bits) 195 { 196 unsigned int k; 197 unsigned int lim = bits/BITS_PER_LONG; 198 unsigned long result = 0; 199 200 for (k = 0; k < lim; k++) 201 result |= (dst[k] = bitmap1[k] & ~bitmap2[k]); 202 if (bits % BITS_PER_LONG) 203 result |= (dst[k] = bitmap1[k] & ~bitmap2[k] & 204 BITMAP_LAST_WORD_MASK(bits)); 205 return result != 0; 206 } 207 EXPORT_SYMBOL(__bitmap_andnot); 208 209 int __bitmap_intersects(const unsigned long *bitmap1, 210 const unsigned long *bitmap2, unsigned int bits) 211 { 212 unsigned int k, lim = bits/BITS_PER_LONG; 213 for (k = 0; k < lim; ++k) 214 if (bitmap1[k] & bitmap2[k]) 215 return 1; 216 217 if (bits % BITS_PER_LONG) 218 if ((bitmap1[k] & bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits)) 219 return 1; 220 return 0; 221 } 222 EXPORT_SYMBOL(__bitmap_intersects); 223 224 int __bitmap_subset(const unsigned long *bitmap1, 225 const unsigned long *bitmap2, unsigned int bits) 226 { 227 unsigned int k, lim = bits/BITS_PER_LONG; 228 for (k = 0; k < lim; ++k) 229 if (bitmap1[k] & ~bitmap2[k]) 230 return 0; 231 232 if (bits % BITS_PER_LONG) 233 if ((bitmap1[k] & ~bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits)) 234 return 0; 235 return 1; 236 } 237 EXPORT_SYMBOL(__bitmap_subset); 238 239 int __bitmap_weight(const unsigned long *bitmap, unsigned int bits) 240 { 241 unsigned int k, lim = bits/BITS_PER_LONG; 242 int w = 0; 243 244 for (k = 0; k < lim; k++) 245 w += hweight_long(bitmap[k]); 246 247 if (bits % BITS_PER_LONG) 248 w += hweight_long(bitmap[k] & BITMAP_LAST_WORD_MASK(bits)); 249 250 return w; 251 } 252 EXPORT_SYMBOL(__bitmap_weight); 253 254 void __bitmap_set(unsigned long *map, unsigned int start, int len) 255 { 256 unsigned long *p = map + BIT_WORD(start); 257 const unsigned int size = start + len; 258 int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG); 259 unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start); 260 261 while (len - bits_to_set >= 0) { 262 *p |= mask_to_set; 263 len -= bits_to_set; 264 bits_to_set = BITS_PER_LONG; 265 mask_to_set = ~0UL; 266 p++; 267 } 268 if (len) { 269 mask_to_set &= BITMAP_LAST_WORD_MASK(size); 270 *p |= mask_to_set; 271 } 272 } 273 EXPORT_SYMBOL(__bitmap_set); 274 275 void __bitmap_clear(unsigned long *map, unsigned int start, int len) 276 { 277 unsigned long *p = map + BIT_WORD(start); 278 const unsigned int size = start + len; 279 int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG); 280 unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start); 281 282 while (len - bits_to_clear >= 0) { 283 *p &= ~mask_to_clear; 284 len -= bits_to_clear; 285 bits_to_clear = BITS_PER_LONG; 286 mask_to_clear = ~0UL; 287 p++; 288 } 289 if (len) { 290 mask_to_clear &= BITMAP_LAST_WORD_MASK(size); 291 *p &= ~mask_to_clear; 292 } 293 } 294 EXPORT_SYMBOL(__bitmap_clear); 295 296 /** 297 * bitmap_find_next_zero_area_off - find a contiguous aligned zero area 298 * @map: The address to base the search on 299 * @size: The bitmap size in bits 300 * @start: The bitnumber to start searching at 301 * @nr: The number of zeroed bits we're looking for 302 * @align_mask: Alignment mask for zero area 303 * @align_offset: Alignment offset for zero area. 304 * 305 * The @align_mask should be one less than a power of 2; the effect is that 306 * the bit offset of all zero areas this function finds plus @align_offset 307 * is multiple of that power of 2. 308 */ 309 unsigned long bitmap_find_next_zero_area_off(unsigned long *map, 310 unsigned long size, 311 unsigned long start, 312 unsigned int nr, 313 unsigned long align_mask, 314 unsigned long align_offset) 315 { 316 unsigned long index, end, i; 317 again: 318 index = find_next_zero_bit(map, size, start); 319 320 /* Align allocation */ 321 index = __ALIGN_MASK(index + align_offset, align_mask) - align_offset; 322 323 end = index + nr; 324 if (end > size) 325 return end; 326 i = find_next_bit(map, end, index); 327 if (i < end) { 328 start = i + 1; 329 goto again; 330 } 331 return index; 332 } 333 EXPORT_SYMBOL(bitmap_find_next_zero_area_off); 334 335 /* 336 * Bitmap printing & parsing functions: first version by Nadia Yvette Chambers, 337 * second version by Paul Jackson, third by Joe Korty. 338 */ 339 340 #define CHUNKSZ 32 341 #define nbits_to_hold_value(val) fls(val) 342 #define BASEDEC 10 /* fancier cpuset lists input in decimal */ 343 344 /** 345 * __bitmap_parse - convert an ASCII hex string into a bitmap. 346 * @buf: pointer to buffer containing string. 347 * @buflen: buffer size in bytes. If string is smaller than this 348 * then it must be terminated with a \0. 349 * @is_user: location of buffer, 0 indicates kernel space 350 * @maskp: pointer to bitmap array that will contain result. 351 * @nmaskbits: size of bitmap, in bits. 352 * 353 * Commas group hex digits into chunks. Each chunk defines exactly 32 354 * bits of the resultant bitmask. No chunk may specify a value larger 355 * than 32 bits (%-EOVERFLOW), and if a chunk specifies a smaller value 356 * then leading 0-bits are prepended. %-EINVAL is returned for illegal 357 * characters and for grouping errors such as "1,,5", ",44", "," and "". 358 * Leading and trailing whitespace accepted, but not embedded whitespace. 359 */ 360 int __bitmap_parse(const char *buf, unsigned int buflen, 361 int is_user, unsigned long *maskp, 362 int nmaskbits) 363 { 364 int c, old_c, totaldigits, ndigits, nchunks, nbits; 365 u32 chunk; 366 const char __user __force *ubuf = (const char __user __force *)buf; 367 368 bitmap_zero(maskp, nmaskbits); 369 370 nchunks = nbits = totaldigits = c = 0; 371 do { 372 chunk = 0; 373 ndigits = totaldigits; 374 375 /* Get the next chunk of the bitmap */ 376 while (buflen) { 377 old_c = c; 378 if (is_user) { 379 if (__get_user(c, ubuf++)) 380 return -EFAULT; 381 } 382 else 383 c = *buf++; 384 buflen--; 385 if (isspace(c)) 386 continue; 387 388 /* 389 * If the last character was a space and the current 390 * character isn't '\0', we've got embedded whitespace. 391 * This is a no-no, so throw an error. 392 */ 393 if (totaldigits && c && isspace(old_c)) 394 return -EINVAL; 395 396 /* A '\0' or a ',' signal the end of the chunk */ 397 if (c == '\0' || c == ',') 398 break; 399 400 if (!isxdigit(c)) 401 return -EINVAL; 402 403 /* 404 * Make sure there are at least 4 free bits in 'chunk'. 405 * If not, this hexdigit will overflow 'chunk', so 406 * throw an error. 407 */ 408 if (chunk & ~((1UL << (CHUNKSZ - 4)) - 1)) 409 return -EOVERFLOW; 410 411 chunk = (chunk << 4) | hex_to_bin(c); 412 totaldigits++; 413 } 414 if (ndigits == totaldigits) 415 return -EINVAL; 416 if (nchunks == 0 && chunk == 0) 417 continue; 418 419 __bitmap_shift_left(maskp, maskp, CHUNKSZ, nmaskbits); 420 *maskp |= chunk; 421 nchunks++; 422 nbits += (nchunks == 1) ? nbits_to_hold_value(chunk) : CHUNKSZ; 423 if (nbits > nmaskbits) 424 return -EOVERFLOW; 425 } while (buflen && c == ','); 426 427 return 0; 428 } 429 EXPORT_SYMBOL(__bitmap_parse); 430 431 /** 432 * bitmap_parse_user - convert an ASCII hex string in a user buffer into a bitmap 433 * 434 * @ubuf: pointer to user buffer containing string. 435 * @ulen: buffer size in bytes. If string is smaller than this 436 * then it must be terminated with a \0. 437 * @maskp: pointer to bitmap array that will contain result. 438 * @nmaskbits: size of bitmap, in bits. 439 * 440 * Wrapper for __bitmap_parse(), providing it with user buffer. 441 * 442 * We cannot have this as an inline function in bitmap.h because it needs 443 * linux/uaccess.h to get the access_ok() declaration and this causes 444 * cyclic dependencies. 445 */ 446 int bitmap_parse_user(const char __user *ubuf, 447 unsigned int ulen, unsigned long *maskp, 448 int nmaskbits) 449 { 450 if (!access_ok(VERIFY_READ, ubuf, ulen)) 451 return -EFAULT; 452 return __bitmap_parse((const char __force *)ubuf, 453 ulen, 1, maskp, nmaskbits); 454 455 } 456 EXPORT_SYMBOL(bitmap_parse_user); 457 458 /** 459 * bitmap_print_to_pagebuf - convert bitmap to list or hex format ASCII string 460 * @list: indicates whether the bitmap must be list 461 * @buf: page aligned buffer into which string is placed 462 * @maskp: pointer to bitmap to convert 463 * @nmaskbits: size of bitmap, in bits 464 * 465 * Output format is a comma-separated list of decimal numbers and 466 * ranges if list is specified or hex digits grouped into comma-separated 467 * sets of 8 digits/set. Returns the number of characters written to buf. 468 * 469 * It is assumed that @buf is a pointer into a PAGE_SIZE area and that 470 * sufficient storage remains at @buf to accommodate the 471 * bitmap_print_to_pagebuf() output. 472 */ 473 int bitmap_print_to_pagebuf(bool list, char *buf, const unsigned long *maskp, 474 int nmaskbits) 475 { 476 ptrdiff_t len = PTR_ALIGN(buf + PAGE_SIZE - 1, PAGE_SIZE) - buf; 477 int n = 0; 478 479 if (len > 1) 480 n = list ? scnprintf(buf, len, "%*pbl\n", nmaskbits, maskp) : 481 scnprintf(buf, len, "%*pb\n", nmaskbits, maskp); 482 return n; 483 } 484 EXPORT_SYMBOL(bitmap_print_to_pagebuf); 485 486 /** 487 * __bitmap_parselist - convert list format ASCII string to bitmap 488 * @buf: read nul-terminated user string from this buffer 489 * @buflen: buffer size in bytes. If string is smaller than this 490 * then it must be terminated with a \0. 491 * @is_user: location of buffer, 0 indicates kernel space 492 * @maskp: write resulting mask here 493 * @nmaskbits: number of bits in mask to be written 494 * 495 * Input format is a comma-separated list of decimal numbers and 496 * ranges. Consecutively set bits are shown as two hyphen-separated 497 * decimal numbers, the smallest and largest bit numbers set in 498 * the range. 499 * Optionally each range can be postfixed to denote that only parts of it 500 * should be set. The range will divided to groups of specific size. 501 * From each group will be used only defined amount of bits. 502 * Syntax: range:used_size/group_size 503 * Example: 0-1023:2/256 ==> 0,1,256,257,512,513,768,769 504 * 505 * Returns: 0 on success, -errno on invalid input strings. Error values: 506 * 507 * - ``-EINVAL``: second number in range smaller than first 508 * - ``-EINVAL``: invalid character in string 509 * - ``-ERANGE``: bit number specified too large for mask 510 */ 511 static int __bitmap_parselist(const char *buf, unsigned int buflen, 512 int is_user, unsigned long *maskp, 513 int nmaskbits) 514 { 515 unsigned int a, b, old_a, old_b; 516 unsigned int group_size, used_size, off; 517 int c, old_c, totaldigits, ndigits; 518 const char __user __force *ubuf = (const char __user __force *)buf; 519 int at_start, in_range, in_partial_range; 520 521 totaldigits = c = 0; 522 old_a = old_b = 0; 523 group_size = used_size = 0; 524 bitmap_zero(maskp, nmaskbits); 525 do { 526 at_start = 1; 527 in_range = 0; 528 in_partial_range = 0; 529 a = b = 0; 530 ndigits = totaldigits; 531 532 /* Get the next cpu# or a range of cpu#'s */ 533 while (buflen) { 534 old_c = c; 535 if (is_user) { 536 if (__get_user(c, ubuf++)) 537 return -EFAULT; 538 } else 539 c = *buf++; 540 buflen--; 541 if (isspace(c)) 542 continue; 543 544 /* A '\0' or a ',' signal the end of a cpu# or range */ 545 if (c == '\0' || c == ',') 546 break; 547 /* 548 * whitespaces between digits are not allowed, 549 * but it's ok if whitespaces are on head or tail. 550 * when old_c is whilespace, 551 * if totaldigits == ndigits, whitespace is on head. 552 * if whitespace is on tail, it should not run here. 553 * as c was ',' or '\0', 554 * the last code line has broken the current loop. 555 */ 556 if ((totaldigits != ndigits) && isspace(old_c)) 557 return -EINVAL; 558 559 if (c == '/') { 560 used_size = a; 561 at_start = 1; 562 in_range = 0; 563 a = b = 0; 564 continue; 565 } 566 567 if (c == ':') { 568 old_a = a; 569 old_b = b; 570 at_start = 1; 571 in_range = 0; 572 in_partial_range = 1; 573 a = b = 0; 574 continue; 575 } 576 577 if (c == '-') { 578 if (at_start || in_range) 579 return -EINVAL; 580 b = 0; 581 in_range = 1; 582 at_start = 1; 583 continue; 584 } 585 586 if (!isdigit(c)) 587 return -EINVAL; 588 589 b = b * 10 + (c - '0'); 590 if (!in_range) 591 a = b; 592 at_start = 0; 593 totaldigits++; 594 } 595 if (ndigits == totaldigits) 596 continue; 597 if (in_partial_range) { 598 group_size = a; 599 a = old_a; 600 b = old_b; 601 old_a = old_b = 0; 602 } else { 603 used_size = group_size = b - a + 1; 604 } 605 /* if no digit is after '-', it's wrong*/ 606 if (at_start && in_range) 607 return -EINVAL; 608 if (!(a <= b) || !(used_size <= group_size)) 609 return -EINVAL; 610 if (b >= nmaskbits) 611 return -ERANGE; 612 while (a <= b) { 613 off = min(b - a + 1, used_size); 614 bitmap_set(maskp, a, off); 615 a += group_size; 616 } 617 } while (buflen && c == ','); 618 return 0; 619 } 620 621 int bitmap_parselist(const char *bp, unsigned long *maskp, int nmaskbits) 622 { 623 char *nl = strchrnul(bp, '\n'); 624 int len = nl - bp; 625 626 return __bitmap_parselist(bp, len, 0, maskp, nmaskbits); 627 } 628 EXPORT_SYMBOL(bitmap_parselist); 629 630 631 /** 632 * bitmap_parselist_user() 633 * 634 * @ubuf: pointer to user buffer containing string. 635 * @ulen: buffer size in bytes. If string is smaller than this 636 * then it must be terminated with a \0. 637 * @maskp: pointer to bitmap array that will contain result. 638 * @nmaskbits: size of bitmap, in bits. 639 * 640 * Wrapper for bitmap_parselist(), providing it with user buffer. 641 * 642 * We cannot have this as an inline function in bitmap.h because it needs 643 * linux/uaccess.h to get the access_ok() declaration and this causes 644 * cyclic dependencies. 645 */ 646 int bitmap_parselist_user(const char __user *ubuf, 647 unsigned int ulen, unsigned long *maskp, 648 int nmaskbits) 649 { 650 if (!access_ok(VERIFY_READ, ubuf, ulen)) 651 return -EFAULT; 652 return __bitmap_parselist((const char __force *)ubuf, 653 ulen, 1, maskp, nmaskbits); 654 } 655 EXPORT_SYMBOL(bitmap_parselist_user); 656 657 658 /** 659 * bitmap_pos_to_ord - find ordinal of set bit at given position in bitmap 660 * @buf: pointer to a bitmap 661 * @pos: a bit position in @buf (0 <= @pos < @nbits) 662 * @nbits: number of valid bit positions in @buf 663 * 664 * Map the bit at position @pos in @buf (of length @nbits) to the 665 * ordinal of which set bit it is. If it is not set or if @pos 666 * is not a valid bit position, map to -1. 667 * 668 * If for example, just bits 4 through 7 are set in @buf, then @pos 669 * values 4 through 7 will get mapped to 0 through 3, respectively, 670 * and other @pos values will get mapped to -1. When @pos value 7 671 * gets mapped to (returns) @ord value 3 in this example, that means 672 * that bit 7 is the 3rd (starting with 0th) set bit in @buf. 673 * 674 * The bit positions 0 through @bits are valid positions in @buf. 675 */ 676 static int bitmap_pos_to_ord(const unsigned long *buf, unsigned int pos, unsigned int nbits) 677 { 678 if (pos >= nbits || !test_bit(pos, buf)) 679 return -1; 680 681 return __bitmap_weight(buf, pos); 682 } 683 684 /** 685 * bitmap_ord_to_pos - find position of n-th set bit in bitmap 686 * @buf: pointer to bitmap 687 * @ord: ordinal bit position (n-th set bit, n >= 0) 688 * @nbits: number of valid bit positions in @buf 689 * 690 * Map the ordinal offset of bit @ord in @buf to its position in @buf. 691 * Value of @ord should be in range 0 <= @ord < weight(buf). If @ord 692 * >= weight(buf), returns @nbits. 693 * 694 * If for example, just bits 4 through 7 are set in @buf, then @ord 695 * values 0 through 3 will get mapped to 4 through 7, respectively, 696 * and all other @ord values returns @nbits. When @ord value 3 697 * gets mapped to (returns) @pos value 7 in this example, that means 698 * that the 3rd set bit (starting with 0th) is at position 7 in @buf. 699 * 700 * The bit positions 0 through @nbits-1 are valid positions in @buf. 701 */ 702 unsigned int bitmap_ord_to_pos(const unsigned long *buf, unsigned int ord, unsigned int nbits) 703 { 704 unsigned int pos; 705 706 for (pos = find_first_bit(buf, nbits); 707 pos < nbits && ord; 708 pos = find_next_bit(buf, nbits, pos + 1)) 709 ord--; 710 711 return pos; 712 } 713 714 /** 715 * bitmap_remap - Apply map defined by a pair of bitmaps to another bitmap 716 * @dst: remapped result 717 * @src: subset to be remapped 718 * @old: defines domain of map 719 * @new: defines range of map 720 * @nbits: number of bits in each of these bitmaps 721 * 722 * Let @old and @new define a mapping of bit positions, such that 723 * whatever position is held by the n-th set bit in @old is mapped 724 * to the n-th set bit in @new. In the more general case, allowing 725 * for the possibility that the weight 'w' of @new is less than the 726 * weight of @old, map the position of the n-th set bit in @old to 727 * the position of the m-th set bit in @new, where m == n % w. 728 * 729 * If either of the @old and @new bitmaps are empty, or if @src and 730 * @dst point to the same location, then this routine copies @src 731 * to @dst. 732 * 733 * The positions of unset bits in @old are mapped to themselves 734 * (the identify map). 735 * 736 * Apply the above specified mapping to @src, placing the result in 737 * @dst, clearing any bits previously set in @dst. 738 * 739 * For example, lets say that @old has bits 4 through 7 set, and 740 * @new has bits 12 through 15 set. This defines the mapping of bit 741 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other 742 * bit positions unchanged. So if say @src comes into this routine 743 * with bits 1, 5 and 7 set, then @dst should leave with bits 1, 744 * 13 and 15 set. 745 */ 746 void bitmap_remap(unsigned long *dst, const unsigned long *src, 747 const unsigned long *old, const unsigned long *new, 748 unsigned int nbits) 749 { 750 unsigned int oldbit, w; 751 752 if (dst == src) /* following doesn't handle inplace remaps */ 753 return; 754 bitmap_zero(dst, nbits); 755 756 w = bitmap_weight(new, nbits); 757 for_each_set_bit(oldbit, src, nbits) { 758 int n = bitmap_pos_to_ord(old, oldbit, nbits); 759 760 if (n < 0 || w == 0) 761 set_bit(oldbit, dst); /* identity map */ 762 else 763 set_bit(bitmap_ord_to_pos(new, n % w, nbits), dst); 764 } 765 } 766 EXPORT_SYMBOL(bitmap_remap); 767 768 /** 769 * bitmap_bitremap - Apply map defined by a pair of bitmaps to a single bit 770 * @oldbit: bit position to be mapped 771 * @old: defines domain of map 772 * @new: defines range of map 773 * @bits: number of bits in each of these bitmaps 774 * 775 * Let @old and @new define a mapping of bit positions, such that 776 * whatever position is held by the n-th set bit in @old is mapped 777 * to the n-th set bit in @new. In the more general case, allowing 778 * for the possibility that the weight 'w' of @new is less than the 779 * weight of @old, map the position of the n-th set bit in @old to 780 * the position of the m-th set bit in @new, where m == n % w. 781 * 782 * The positions of unset bits in @old are mapped to themselves 783 * (the identify map). 784 * 785 * Apply the above specified mapping to bit position @oldbit, returning 786 * the new bit position. 787 * 788 * For example, lets say that @old has bits 4 through 7 set, and 789 * @new has bits 12 through 15 set. This defines the mapping of bit 790 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other 791 * bit positions unchanged. So if say @oldbit is 5, then this routine 792 * returns 13. 793 */ 794 int bitmap_bitremap(int oldbit, const unsigned long *old, 795 const unsigned long *new, int bits) 796 { 797 int w = bitmap_weight(new, bits); 798 int n = bitmap_pos_to_ord(old, oldbit, bits); 799 if (n < 0 || w == 0) 800 return oldbit; 801 else 802 return bitmap_ord_to_pos(new, n % w, bits); 803 } 804 EXPORT_SYMBOL(bitmap_bitremap); 805 806 /** 807 * bitmap_onto - translate one bitmap relative to another 808 * @dst: resulting translated bitmap 809 * @orig: original untranslated bitmap 810 * @relmap: bitmap relative to which translated 811 * @bits: number of bits in each of these bitmaps 812 * 813 * Set the n-th bit of @dst iff there exists some m such that the 814 * n-th bit of @relmap is set, the m-th bit of @orig is set, and 815 * the n-th bit of @relmap is also the m-th _set_ bit of @relmap. 816 * (If you understood the previous sentence the first time your 817 * read it, you're overqualified for your current job.) 818 * 819 * In other words, @orig is mapped onto (surjectively) @dst, 820 * using the map { <n, m> | the n-th bit of @relmap is the 821 * m-th set bit of @relmap }. 822 * 823 * Any set bits in @orig above bit number W, where W is the 824 * weight of (number of set bits in) @relmap are mapped nowhere. 825 * In particular, if for all bits m set in @orig, m >= W, then 826 * @dst will end up empty. In situations where the possibility 827 * of such an empty result is not desired, one way to avoid it is 828 * to use the bitmap_fold() operator, below, to first fold the 829 * @orig bitmap over itself so that all its set bits x are in the 830 * range 0 <= x < W. The bitmap_fold() operator does this by 831 * setting the bit (m % W) in @dst, for each bit (m) set in @orig. 832 * 833 * Example [1] for bitmap_onto(): 834 * Let's say @relmap has bits 30-39 set, and @orig has bits 835 * 1, 3, 5, 7, 9 and 11 set. Then on return from this routine, 836 * @dst will have bits 31, 33, 35, 37 and 39 set. 837 * 838 * When bit 0 is set in @orig, it means turn on the bit in 839 * @dst corresponding to whatever is the first bit (if any) 840 * that is turned on in @relmap. Since bit 0 was off in the 841 * above example, we leave off that bit (bit 30) in @dst. 842 * 843 * When bit 1 is set in @orig (as in the above example), it 844 * means turn on the bit in @dst corresponding to whatever 845 * is the second bit that is turned on in @relmap. The second 846 * bit in @relmap that was turned on in the above example was 847 * bit 31, so we turned on bit 31 in @dst. 848 * 849 * Similarly, we turned on bits 33, 35, 37 and 39 in @dst, 850 * because they were the 4th, 6th, 8th and 10th set bits 851 * set in @relmap, and the 4th, 6th, 8th and 10th bits of 852 * @orig (i.e. bits 3, 5, 7 and 9) were also set. 853 * 854 * When bit 11 is set in @orig, it means turn on the bit in 855 * @dst corresponding to whatever is the twelfth bit that is 856 * turned on in @relmap. In the above example, there were 857 * only ten bits turned on in @relmap (30..39), so that bit 858 * 11 was set in @orig had no affect on @dst. 859 * 860 * Example [2] for bitmap_fold() + bitmap_onto(): 861 * Let's say @relmap has these ten bits set:: 862 * 863 * 40 41 42 43 45 48 53 61 74 95 864 * 865 * (for the curious, that's 40 plus the first ten terms of the 866 * Fibonacci sequence.) 867 * 868 * Further lets say we use the following code, invoking 869 * bitmap_fold() then bitmap_onto, as suggested above to 870 * avoid the possibility of an empty @dst result:: 871 * 872 * unsigned long *tmp; // a temporary bitmap's bits 873 * 874 * bitmap_fold(tmp, orig, bitmap_weight(relmap, bits), bits); 875 * bitmap_onto(dst, tmp, relmap, bits); 876 * 877 * Then this table shows what various values of @dst would be, for 878 * various @orig's. I list the zero-based positions of each set bit. 879 * The tmp column shows the intermediate result, as computed by 880 * using bitmap_fold() to fold the @orig bitmap modulo ten 881 * (the weight of @relmap): 882 * 883 * =============== ============== ================= 884 * @orig tmp @dst 885 * 0 0 40 886 * 1 1 41 887 * 9 9 95 888 * 10 0 40 [#f1]_ 889 * 1 3 5 7 1 3 5 7 41 43 48 61 890 * 0 1 2 3 4 0 1 2 3 4 40 41 42 43 45 891 * 0 9 18 27 0 9 8 7 40 61 74 95 892 * 0 10 20 30 0 40 893 * 0 11 22 33 0 1 2 3 40 41 42 43 894 * 0 12 24 36 0 2 4 6 40 42 45 53 895 * 78 102 211 1 2 8 41 42 74 [#f1]_ 896 * =============== ============== ================= 897 * 898 * .. [#f1] 899 * 900 * For these marked lines, if we hadn't first done bitmap_fold() 901 * into tmp, then the @dst result would have been empty. 902 * 903 * If either of @orig or @relmap is empty (no set bits), then @dst 904 * will be returned empty. 905 * 906 * If (as explained above) the only set bits in @orig are in positions 907 * m where m >= W, (where W is the weight of @relmap) then @dst will 908 * once again be returned empty. 909 * 910 * All bits in @dst not set by the above rule are cleared. 911 */ 912 void bitmap_onto(unsigned long *dst, const unsigned long *orig, 913 const unsigned long *relmap, unsigned int bits) 914 { 915 unsigned int n, m; /* same meaning as in above comment */ 916 917 if (dst == orig) /* following doesn't handle inplace mappings */ 918 return; 919 bitmap_zero(dst, bits); 920 921 /* 922 * The following code is a more efficient, but less 923 * obvious, equivalent to the loop: 924 * for (m = 0; m < bitmap_weight(relmap, bits); m++) { 925 * n = bitmap_ord_to_pos(orig, m, bits); 926 * if (test_bit(m, orig)) 927 * set_bit(n, dst); 928 * } 929 */ 930 931 m = 0; 932 for_each_set_bit(n, relmap, bits) { 933 /* m == bitmap_pos_to_ord(relmap, n, bits) */ 934 if (test_bit(m, orig)) 935 set_bit(n, dst); 936 m++; 937 } 938 } 939 EXPORT_SYMBOL(bitmap_onto); 940 941 /** 942 * bitmap_fold - fold larger bitmap into smaller, modulo specified size 943 * @dst: resulting smaller bitmap 944 * @orig: original larger bitmap 945 * @sz: specified size 946 * @nbits: number of bits in each of these bitmaps 947 * 948 * For each bit oldbit in @orig, set bit oldbit mod @sz in @dst. 949 * Clear all other bits in @dst. See further the comment and 950 * Example [2] for bitmap_onto() for why and how to use this. 951 */ 952 void bitmap_fold(unsigned long *dst, const unsigned long *orig, 953 unsigned int sz, unsigned int nbits) 954 { 955 unsigned int oldbit; 956 957 if (dst == orig) /* following doesn't handle inplace mappings */ 958 return; 959 bitmap_zero(dst, nbits); 960 961 for_each_set_bit(oldbit, orig, nbits) 962 set_bit(oldbit % sz, dst); 963 } 964 EXPORT_SYMBOL(bitmap_fold); 965 966 /* 967 * Common code for bitmap_*_region() routines. 968 * bitmap: array of unsigned longs corresponding to the bitmap 969 * pos: the beginning of the region 970 * order: region size (log base 2 of number of bits) 971 * reg_op: operation(s) to perform on that region of bitmap 972 * 973 * Can set, verify and/or release a region of bits in a bitmap, 974 * depending on which combination of REG_OP_* flag bits is set. 975 * 976 * A region of a bitmap is a sequence of bits in the bitmap, of 977 * some size '1 << order' (a power of two), aligned to that same 978 * '1 << order' power of two. 979 * 980 * Returns 1 if REG_OP_ISFREE succeeds (region is all zero bits). 981 * Returns 0 in all other cases and reg_ops. 982 */ 983 984 enum { 985 REG_OP_ISFREE, /* true if region is all zero bits */ 986 REG_OP_ALLOC, /* set all bits in region */ 987 REG_OP_RELEASE, /* clear all bits in region */ 988 }; 989 990 static int __reg_op(unsigned long *bitmap, unsigned int pos, int order, int reg_op) 991 { 992 int nbits_reg; /* number of bits in region */ 993 int index; /* index first long of region in bitmap */ 994 int offset; /* bit offset region in bitmap[index] */ 995 int nlongs_reg; /* num longs spanned by region in bitmap */ 996 int nbitsinlong; /* num bits of region in each spanned long */ 997 unsigned long mask; /* bitmask for one long of region */ 998 int i; /* scans bitmap by longs */ 999 int ret = 0; /* return value */ 1000 1001 /* 1002 * Either nlongs_reg == 1 (for small orders that fit in one long) 1003 * or (offset == 0 && mask == ~0UL) (for larger multiword orders.) 1004 */ 1005 nbits_reg = 1 << order; 1006 index = pos / BITS_PER_LONG; 1007 offset = pos - (index * BITS_PER_LONG); 1008 nlongs_reg = BITS_TO_LONGS(nbits_reg); 1009 nbitsinlong = min(nbits_reg, BITS_PER_LONG); 1010 1011 /* 1012 * Can't do "mask = (1UL << nbitsinlong) - 1", as that 1013 * overflows if nbitsinlong == BITS_PER_LONG. 1014 */ 1015 mask = (1UL << (nbitsinlong - 1)); 1016 mask += mask - 1; 1017 mask <<= offset; 1018 1019 switch (reg_op) { 1020 case REG_OP_ISFREE: 1021 for (i = 0; i < nlongs_reg; i++) { 1022 if (bitmap[index + i] & mask) 1023 goto done; 1024 } 1025 ret = 1; /* all bits in region free (zero) */ 1026 break; 1027 1028 case REG_OP_ALLOC: 1029 for (i = 0; i < nlongs_reg; i++) 1030 bitmap[index + i] |= mask; 1031 break; 1032 1033 case REG_OP_RELEASE: 1034 for (i = 0; i < nlongs_reg; i++) 1035 bitmap[index + i] &= ~mask; 1036 break; 1037 } 1038 done: 1039 return ret; 1040 } 1041 1042 /** 1043 * bitmap_find_free_region - find a contiguous aligned mem region 1044 * @bitmap: array of unsigned longs corresponding to the bitmap 1045 * @bits: number of bits in the bitmap 1046 * @order: region size (log base 2 of number of bits) to find 1047 * 1048 * Find a region of free (zero) bits in a @bitmap of @bits bits and 1049 * allocate them (set them to one). Only consider regions of length 1050 * a power (@order) of two, aligned to that power of two, which 1051 * makes the search algorithm much faster. 1052 * 1053 * Return the bit offset in bitmap of the allocated region, 1054 * or -errno on failure. 1055 */ 1056 int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order) 1057 { 1058 unsigned int pos, end; /* scans bitmap by regions of size order */ 1059 1060 for (pos = 0 ; (end = pos + (1U << order)) <= bits; pos = end) { 1061 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE)) 1062 continue; 1063 __reg_op(bitmap, pos, order, REG_OP_ALLOC); 1064 return pos; 1065 } 1066 return -ENOMEM; 1067 } 1068 EXPORT_SYMBOL(bitmap_find_free_region); 1069 1070 /** 1071 * bitmap_release_region - release allocated bitmap region 1072 * @bitmap: array of unsigned longs corresponding to the bitmap 1073 * @pos: beginning of bit region to release 1074 * @order: region size (log base 2 of number of bits) to release 1075 * 1076 * This is the complement to __bitmap_find_free_region() and releases 1077 * the found region (by clearing it in the bitmap). 1078 * 1079 * No return value. 1080 */ 1081 void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order) 1082 { 1083 __reg_op(bitmap, pos, order, REG_OP_RELEASE); 1084 } 1085 EXPORT_SYMBOL(bitmap_release_region); 1086 1087 /** 1088 * bitmap_allocate_region - allocate bitmap region 1089 * @bitmap: array of unsigned longs corresponding to the bitmap 1090 * @pos: beginning of bit region to allocate 1091 * @order: region size (log base 2 of number of bits) to allocate 1092 * 1093 * Allocate (set bits in) a specified region of a bitmap. 1094 * 1095 * Return 0 on success, or %-EBUSY if specified region wasn't 1096 * free (not all bits were zero). 1097 */ 1098 int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order) 1099 { 1100 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE)) 1101 return -EBUSY; 1102 return __reg_op(bitmap, pos, order, REG_OP_ALLOC); 1103 } 1104 EXPORT_SYMBOL(bitmap_allocate_region); 1105 1106 /** 1107 * bitmap_from_u32array - copy the contents of a u32 array of bits to bitmap 1108 * @bitmap: array of unsigned longs, the destination bitmap, non NULL 1109 * @nbits: number of bits in @bitmap 1110 * @buf: array of u32 (in host byte order), the source bitmap, non NULL 1111 * @nwords: number of u32 words in @buf 1112 * 1113 * copy min(nbits, 32*nwords) bits from @buf to @bitmap, remaining 1114 * bits between nword and nbits in @bitmap (if any) are cleared. In 1115 * last word of @bitmap, the bits beyond nbits (if any) are kept 1116 * unchanged. 1117 * 1118 * Return the number of bits effectively copied. 1119 */ 1120 unsigned int 1121 bitmap_from_u32array(unsigned long *bitmap, unsigned int nbits, 1122 const u32 *buf, unsigned int nwords) 1123 { 1124 unsigned int dst_idx, src_idx; 1125 1126 for (src_idx = dst_idx = 0; dst_idx < BITS_TO_LONGS(nbits); ++dst_idx) { 1127 unsigned long part = 0; 1128 1129 if (src_idx < nwords) 1130 part = buf[src_idx++]; 1131 1132 #if BITS_PER_LONG == 64 1133 if (src_idx < nwords) 1134 part |= ((unsigned long) buf[src_idx++]) << 32; 1135 #endif 1136 1137 if (dst_idx < nbits/BITS_PER_LONG) 1138 bitmap[dst_idx] = part; 1139 else { 1140 unsigned long mask = BITMAP_LAST_WORD_MASK(nbits); 1141 1142 bitmap[dst_idx] = (bitmap[dst_idx] & ~mask) 1143 | (part & mask); 1144 } 1145 } 1146 1147 return min_t(unsigned int, nbits, 32*nwords); 1148 } 1149 EXPORT_SYMBOL(bitmap_from_u32array); 1150 1151 /** 1152 * bitmap_to_u32array - copy the contents of bitmap to a u32 array of bits 1153 * @buf: array of u32 (in host byte order), the dest bitmap, non NULL 1154 * @nwords: number of u32 words in @buf 1155 * @bitmap: array of unsigned longs, the source bitmap, non NULL 1156 * @nbits: number of bits in @bitmap 1157 * 1158 * copy min(nbits, 32*nwords) bits from @bitmap to @buf. Remaining 1159 * bits after nbits in @buf (if any) are cleared. 1160 * 1161 * Return the number of bits effectively copied. 1162 */ 1163 unsigned int 1164 bitmap_to_u32array(u32 *buf, unsigned int nwords, 1165 const unsigned long *bitmap, unsigned int nbits) 1166 { 1167 unsigned int dst_idx = 0, src_idx = 0; 1168 1169 while (dst_idx < nwords) { 1170 unsigned long part = 0; 1171 1172 if (src_idx < BITS_TO_LONGS(nbits)) { 1173 part = bitmap[src_idx]; 1174 if (src_idx >= nbits/BITS_PER_LONG) 1175 part &= BITMAP_LAST_WORD_MASK(nbits); 1176 src_idx++; 1177 } 1178 1179 buf[dst_idx++] = part & 0xffffffffUL; 1180 1181 #if BITS_PER_LONG == 64 1182 if (dst_idx < nwords) { 1183 part >>= 32; 1184 buf[dst_idx++] = part & 0xffffffffUL; 1185 } 1186 #endif 1187 } 1188 1189 return min_t(unsigned int, nbits, 32*nwords); 1190 } 1191 EXPORT_SYMBOL(bitmap_to_u32array); 1192 1193 /** 1194 * bitmap_copy_le - copy a bitmap, putting the bits into little-endian order. 1195 * @dst: destination buffer 1196 * @src: bitmap to copy 1197 * @nbits: number of bits in the bitmap 1198 * 1199 * Require nbits % BITS_PER_LONG == 0. 1200 */ 1201 #ifdef __BIG_ENDIAN 1202 void bitmap_copy_le(unsigned long *dst, const unsigned long *src, unsigned int nbits) 1203 { 1204 unsigned int i; 1205 1206 for (i = 0; i < nbits/BITS_PER_LONG; i++) { 1207 if (BITS_PER_LONG == 64) 1208 dst[i] = cpu_to_le64(src[i]); 1209 else 1210 dst[i] = cpu_to_le32(src[i]); 1211 } 1212 } 1213 EXPORT_SYMBOL(bitmap_copy_le); 1214 #endif 1215