1 /* 2 * multiorder.c: Multi-order radix tree entry testing 3 * Copyright (c) 2016 Intel Corporation 4 * Author: Ross Zwisler <ross.zwisler@linux.intel.com> 5 * Author: Matthew Wilcox <matthew.r.wilcox@intel.com> 6 * 7 * This program is free software; you can redistribute it and/or modify it 8 * under the terms and conditions of the GNU General Public License, 9 * version 2, as published by the Free Software Foundation. 10 * 11 * This program is distributed in the hope it will be useful, but WITHOUT 12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for 14 * more details. 15 */ 16 #include <linux/radix-tree.h> 17 #include <linux/slab.h> 18 #include <linux/errno.h> 19 20 #include "test.h" 21 22 #define for_each_index(i, base, order) \ 23 for (i = base; i < base + (1 << order); i++) 24 25 static void __multiorder_tag_test(int index, int order) 26 { 27 RADIX_TREE(tree, GFP_KERNEL); 28 int base, err, i; 29 30 /* our canonical entry */ 31 base = index & ~((1 << order) - 1); 32 33 printv(2, "Multiorder tag test with index %d, canonical entry %d\n", 34 index, base); 35 36 err = item_insert_order(&tree, index, order); 37 assert(!err); 38 39 /* 40 * Verify we get collisions for covered indices. We try and fail to 41 * insert an exceptional entry so we don't leak memory via 42 * item_insert_order(). 43 */ 44 for_each_index(i, base, order) { 45 err = __radix_tree_insert(&tree, i, order, 46 (void *)(0xA0 | RADIX_TREE_EXCEPTIONAL_ENTRY)); 47 assert(err == -EEXIST); 48 } 49 50 for_each_index(i, base, order) { 51 assert(!radix_tree_tag_get(&tree, i, 0)); 52 assert(!radix_tree_tag_get(&tree, i, 1)); 53 } 54 55 assert(radix_tree_tag_set(&tree, index, 0)); 56 57 for_each_index(i, base, order) { 58 assert(radix_tree_tag_get(&tree, i, 0)); 59 assert(!radix_tree_tag_get(&tree, i, 1)); 60 } 61 62 assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 1); 63 assert(radix_tree_tag_clear(&tree, index, 0)); 64 65 for_each_index(i, base, order) { 66 assert(!radix_tree_tag_get(&tree, i, 0)); 67 assert(radix_tree_tag_get(&tree, i, 1)); 68 } 69 70 assert(radix_tree_tag_clear(&tree, index, 1)); 71 72 assert(!radix_tree_tagged(&tree, 0)); 73 assert(!radix_tree_tagged(&tree, 1)); 74 75 item_kill_tree(&tree); 76 } 77 78 static void __multiorder_tag_test2(unsigned order, unsigned long index2) 79 { 80 RADIX_TREE(tree, GFP_KERNEL); 81 unsigned long index = (1 << order); 82 index2 += index; 83 84 assert(item_insert_order(&tree, 0, order) == 0); 85 assert(item_insert(&tree, index2) == 0); 86 87 assert(radix_tree_tag_set(&tree, 0, 0)); 88 assert(radix_tree_tag_set(&tree, index2, 0)); 89 90 assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 2); 91 92 item_kill_tree(&tree); 93 } 94 95 static void multiorder_tag_tests(void) 96 { 97 int i, j; 98 99 /* test multi-order entry for indices 0-7 with no sibling pointers */ 100 __multiorder_tag_test(0, 3); 101 __multiorder_tag_test(5, 3); 102 103 /* test multi-order entry for indices 8-15 with no sibling pointers */ 104 __multiorder_tag_test(8, 3); 105 __multiorder_tag_test(15, 3); 106 107 /* 108 * Our order 5 entry covers indices 0-31 in a tree with height=2. 109 * This is broken up as follows: 110 * 0-7: canonical entry 111 * 8-15: sibling 1 112 * 16-23: sibling 2 113 * 24-31: sibling 3 114 */ 115 __multiorder_tag_test(0, 5); 116 __multiorder_tag_test(29, 5); 117 118 /* same test, but with indices 32-63 */ 119 __multiorder_tag_test(32, 5); 120 __multiorder_tag_test(44, 5); 121 122 /* 123 * Our order 8 entry covers indices 0-255 in a tree with height=3. 124 * This is broken up as follows: 125 * 0-63: canonical entry 126 * 64-127: sibling 1 127 * 128-191: sibling 2 128 * 192-255: sibling 3 129 */ 130 __multiorder_tag_test(0, 8); 131 __multiorder_tag_test(190, 8); 132 133 /* same test, but with indices 256-511 */ 134 __multiorder_tag_test(256, 8); 135 __multiorder_tag_test(300, 8); 136 137 __multiorder_tag_test(0x12345678UL, 8); 138 139 for (i = 1; i < 10; i++) 140 for (j = 0; j < (10 << i); j++) 141 __multiorder_tag_test2(i, j); 142 } 143 144 static void multiorder_check(unsigned long index, int order) 145 { 146 unsigned long i; 147 unsigned long min = index & ~((1UL << order) - 1); 148 unsigned long max = min + (1UL << order); 149 void **slot; 150 struct item *item2 = item_create(min, order); 151 RADIX_TREE(tree, GFP_KERNEL); 152 153 printv(2, "Multiorder index %ld, order %d\n", index, order); 154 155 assert(item_insert_order(&tree, index, order) == 0); 156 157 for (i = min; i < max; i++) { 158 struct item *item = item_lookup(&tree, i); 159 assert(item != 0); 160 assert(item->index == index); 161 } 162 for (i = 0; i < min; i++) 163 item_check_absent(&tree, i); 164 for (i = max; i < 2*max; i++) 165 item_check_absent(&tree, i); 166 for (i = min; i < max; i++) 167 assert(radix_tree_insert(&tree, i, item2) == -EEXIST); 168 169 slot = radix_tree_lookup_slot(&tree, index); 170 free(*slot); 171 radix_tree_replace_slot(&tree, slot, item2); 172 for (i = min; i < max; i++) { 173 struct item *item = item_lookup(&tree, i); 174 assert(item != 0); 175 assert(item->index == min); 176 } 177 178 assert(item_delete(&tree, min) != 0); 179 180 for (i = 0; i < 2*max; i++) 181 item_check_absent(&tree, i); 182 } 183 184 static void multiorder_shrink(unsigned long index, int order) 185 { 186 unsigned long i; 187 unsigned long max = 1 << order; 188 RADIX_TREE(tree, GFP_KERNEL); 189 struct radix_tree_node *node; 190 191 printv(2, "Multiorder shrink index %ld, order %d\n", index, order); 192 193 assert(item_insert_order(&tree, 0, order) == 0); 194 195 node = tree.rnode; 196 197 assert(item_insert(&tree, index) == 0); 198 assert(node != tree.rnode); 199 200 assert(item_delete(&tree, index) != 0); 201 assert(node == tree.rnode); 202 203 for (i = 0; i < max; i++) { 204 struct item *item = item_lookup(&tree, i); 205 assert(item != 0); 206 assert(item->index == 0); 207 } 208 for (i = max; i < 2*max; i++) 209 item_check_absent(&tree, i); 210 211 if (!item_delete(&tree, 0)) { 212 printv(2, "failed to delete index %ld (order %d)\n", index, order); 213 abort(); 214 } 215 216 for (i = 0; i < 2*max; i++) 217 item_check_absent(&tree, i); 218 } 219 220 static void multiorder_insert_bug(void) 221 { 222 RADIX_TREE(tree, GFP_KERNEL); 223 224 item_insert(&tree, 0); 225 radix_tree_tag_set(&tree, 0, 0); 226 item_insert_order(&tree, 3 << 6, 6); 227 228 item_kill_tree(&tree); 229 } 230 231 void multiorder_iteration(void) 232 { 233 RADIX_TREE(tree, GFP_KERNEL); 234 struct radix_tree_iter iter; 235 void **slot; 236 int i, j, err; 237 238 printv(1, "Multiorder iteration test\n"); 239 240 #define NUM_ENTRIES 11 241 int index[NUM_ENTRIES] = {0, 2, 4, 8, 16, 32, 34, 36, 64, 72, 128}; 242 int order[NUM_ENTRIES] = {1, 1, 2, 3, 4, 1, 0, 1, 3, 0, 7}; 243 244 for (i = 0; i < NUM_ENTRIES; i++) { 245 err = item_insert_order(&tree, index[i], order[i]); 246 assert(!err); 247 } 248 249 for (j = 0; j < 256; j++) { 250 for (i = 0; i < NUM_ENTRIES; i++) 251 if (j <= (index[i] | ((1 << order[i]) - 1))) 252 break; 253 254 radix_tree_for_each_slot(slot, &tree, &iter, j) { 255 int height = order[i] / RADIX_TREE_MAP_SHIFT; 256 int shift = height * RADIX_TREE_MAP_SHIFT; 257 unsigned long mask = (1UL << order[i]) - 1; 258 struct item *item = *slot; 259 260 assert((iter.index | mask) == (index[i] | mask)); 261 assert(iter.shift == shift); 262 assert(!radix_tree_is_internal_node(item)); 263 assert((item->index | mask) == (index[i] | mask)); 264 assert(item->order == order[i]); 265 i++; 266 } 267 } 268 269 item_kill_tree(&tree); 270 } 271 272 void multiorder_tagged_iteration(void) 273 { 274 RADIX_TREE(tree, GFP_KERNEL); 275 struct radix_tree_iter iter; 276 void **slot; 277 int i, j; 278 279 printv(1, "Multiorder tagged iteration test\n"); 280 281 #define MT_NUM_ENTRIES 9 282 int index[MT_NUM_ENTRIES] = {0, 2, 4, 16, 32, 40, 64, 72, 128}; 283 int order[MT_NUM_ENTRIES] = {1, 0, 2, 4, 3, 1, 3, 0, 7}; 284 285 #define TAG_ENTRIES 7 286 int tag_index[TAG_ENTRIES] = {0, 4, 16, 40, 64, 72, 128}; 287 288 for (i = 0; i < MT_NUM_ENTRIES; i++) 289 assert(!item_insert_order(&tree, index[i], order[i])); 290 291 assert(!radix_tree_tagged(&tree, 1)); 292 293 for (i = 0; i < TAG_ENTRIES; i++) 294 assert(radix_tree_tag_set(&tree, tag_index[i], 1)); 295 296 for (j = 0; j < 256; j++) { 297 int k; 298 299 for (i = 0; i < TAG_ENTRIES; i++) { 300 for (k = i; index[k] < tag_index[i]; k++) 301 ; 302 if (j <= (index[k] | ((1 << order[k]) - 1))) 303 break; 304 } 305 306 radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) { 307 unsigned long mask; 308 struct item *item = *slot; 309 for (k = i; index[k] < tag_index[i]; k++) 310 ; 311 mask = (1UL << order[k]) - 1; 312 313 assert((iter.index | mask) == (tag_index[i] | mask)); 314 assert(!radix_tree_is_internal_node(item)); 315 assert((item->index | mask) == (tag_index[i] | mask)); 316 assert(item->order == order[k]); 317 i++; 318 } 319 } 320 321 assert(tag_tagged_items(&tree, NULL, 0, ~0UL, TAG_ENTRIES, 1, 2) == 322 TAG_ENTRIES); 323 324 for (j = 0; j < 256; j++) { 325 int mask, k; 326 327 for (i = 0; i < TAG_ENTRIES; i++) { 328 for (k = i; index[k] < tag_index[i]; k++) 329 ; 330 if (j <= (index[k] | ((1 << order[k]) - 1))) 331 break; 332 } 333 334 radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) { 335 struct item *item = *slot; 336 for (k = i; index[k] < tag_index[i]; k++) 337 ; 338 mask = (1 << order[k]) - 1; 339 340 assert((iter.index | mask) == (tag_index[i] | mask)); 341 assert(!radix_tree_is_internal_node(item)); 342 assert((item->index | mask) == (tag_index[i] | mask)); 343 assert(item->order == order[k]); 344 i++; 345 } 346 } 347 348 assert(tag_tagged_items(&tree, NULL, 1, ~0UL, MT_NUM_ENTRIES * 2, 1, 0) 349 == TAG_ENTRIES); 350 i = 0; 351 radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) { 352 assert(iter.index == tag_index[i]); 353 i++; 354 } 355 356 item_kill_tree(&tree); 357 } 358 359 /* 360 * Basic join checks: make sure we can't find an entry in the tree after 361 * a larger entry has replaced it 362 */ 363 static void multiorder_join1(unsigned long index, 364 unsigned order1, unsigned order2) 365 { 366 unsigned long loc; 367 void *item, *item2 = item_create(index + 1, order1); 368 RADIX_TREE(tree, GFP_KERNEL); 369 370 item_insert_order(&tree, index, order2); 371 item = radix_tree_lookup(&tree, index); 372 radix_tree_join(&tree, index + 1, order1, item2); 373 loc = find_item(&tree, item); 374 if (loc == -1) 375 free(item); 376 item = radix_tree_lookup(&tree, index + 1); 377 assert(item == item2); 378 item_kill_tree(&tree); 379 } 380 381 /* 382 * Check that the accounting of exceptional entries is handled correctly 383 * by joining an exceptional entry to a normal pointer. 384 */ 385 static void multiorder_join2(unsigned order1, unsigned order2) 386 { 387 RADIX_TREE(tree, GFP_KERNEL); 388 struct radix_tree_node *node; 389 void *item1 = item_create(0, order1); 390 void *item2; 391 392 item_insert_order(&tree, 0, order2); 393 radix_tree_insert(&tree, 1 << order2, (void *)0x12UL); 394 item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL); 395 assert(item2 == (void *)0x12UL); 396 assert(node->exceptional == 1); 397 398 item2 = radix_tree_lookup(&tree, 0); 399 free(item2); 400 401 radix_tree_join(&tree, 0, order1, item1); 402 item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL); 403 assert(item2 == item1); 404 assert(node->exceptional == 0); 405 item_kill_tree(&tree); 406 } 407 408 /* 409 * This test revealed an accounting bug for exceptional entries at one point. 410 * Nodes were being freed back into the pool with an elevated exception count 411 * by radix_tree_join() and then radix_tree_split() was failing to zero the 412 * count of exceptional entries. 413 */ 414 static void multiorder_join3(unsigned int order) 415 { 416 RADIX_TREE(tree, GFP_KERNEL); 417 struct radix_tree_node *node; 418 void **slot; 419 struct radix_tree_iter iter; 420 unsigned long i; 421 422 for (i = 0; i < (1 << order); i++) { 423 radix_tree_insert(&tree, i, (void *)0x12UL); 424 } 425 426 radix_tree_join(&tree, 0, order, (void *)0x16UL); 427 rcu_barrier(); 428 429 radix_tree_split(&tree, 0, 0); 430 431 radix_tree_for_each_slot(slot, &tree, &iter, 0) { 432 radix_tree_iter_replace(&tree, &iter, slot, (void *)0x12UL); 433 } 434 435 __radix_tree_lookup(&tree, 0, &node, NULL); 436 assert(node->exceptional == node->count); 437 438 item_kill_tree(&tree); 439 } 440 441 static void multiorder_join(void) 442 { 443 int i, j, idx; 444 445 for (idx = 0; idx < 1024; idx = idx * 2 + 3) { 446 for (i = 1; i < 15; i++) { 447 for (j = 0; j < i; j++) { 448 multiorder_join1(idx, i, j); 449 } 450 } 451 } 452 453 for (i = 1; i < 15; i++) { 454 for (j = 0; j < i; j++) { 455 multiorder_join2(i, j); 456 } 457 } 458 459 for (i = 3; i < 10; i++) { 460 multiorder_join3(i); 461 } 462 } 463 464 static void check_mem(unsigned old_order, unsigned new_order, unsigned alloc) 465 { 466 struct radix_tree_preload *rtp = &radix_tree_preloads; 467 if (rtp->nr != 0) 468 printv(2, "split(%u %u) remaining %u\n", old_order, new_order, 469 rtp->nr); 470 /* 471 * Can't check for equality here as some nodes may have been 472 * RCU-freed while we ran. But we should never finish with more 473 * nodes allocated since they should have all been preloaded. 474 */ 475 if (nr_allocated > alloc) 476 printv(2, "split(%u %u) allocated %u %u\n", old_order, new_order, 477 alloc, nr_allocated); 478 } 479 480 static void __multiorder_split(int old_order, int new_order) 481 { 482 RADIX_TREE(tree, GFP_ATOMIC); 483 void **slot; 484 struct radix_tree_iter iter; 485 unsigned alloc; 486 struct item *item; 487 488 radix_tree_preload(GFP_KERNEL); 489 assert(item_insert_order(&tree, 0, old_order) == 0); 490 radix_tree_preload_end(); 491 492 /* Wipe out the preloaded cache or it'll confuse check_mem() */ 493 radix_tree_cpu_dead(0); 494 495 item = radix_tree_tag_set(&tree, 0, 2); 496 497 radix_tree_split_preload(old_order, new_order, GFP_KERNEL); 498 alloc = nr_allocated; 499 radix_tree_split(&tree, 0, new_order); 500 check_mem(old_order, new_order, alloc); 501 radix_tree_for_each_slot(slot, &tree, &iter, 0) { 502 radix_tree_iter_replace(&tree, &iter, slot, 503 item_create(iter.index, new_order)); 504 } 505 radix_tree_preload_end(); 506 507 item_kill_tree(&tree); 508 free(item); 509 } 510 511 static void __multiorder_split2(int old_order, int new_order) 512 { 513 RADIX_TREE(tree, GFP_KERNEL); 514 void **slot; 515 struct radix_tree_iter iter; 516 struct radix_tree_node *node; 517 void *item; 518 519 __radix_tree_insert(&tree, 0, old_order, (void *)0x12); 520 521 item = __radix_tree_lookup(&tree, 0, &node, NULL); 522 assert(item == (void *)0x12); 523 assert(node->exceptional > 0); 524 525 radix_tree_split(&tree, 0, new_order); 526 radix_tree_for_each_slot(slot, &tree, &iter, 0) { 527 radix_tree_iter_replace(&tree, &iter, slot, 528 item_create(iter.index, new_order)); 529 } 530 531 item = __radix_tree_lookup(&tree, 0, &node, NULL); 532 assert(item != (void *)0x12); 533 assert(node->exceptional == 0); 534 535 item_kill_tree(&tree); 536 } 537 538 static void __multiorder_split3(int old_order, int new_order) 539 { 540 RADIX_TREE(tree, GFP_KERNEL); 541 void **slot; 542 struct radix_tree_iter iter; 543 struct radix_tree_node *node; 544 void *item; 545 546 __radix_tree_insert(&tree, 0, old_order, (void *)0x12); 547 548 item = __radix_tree_lookup(&tree, 0, &node, NULL); 549 assert(item == (void *)0x12); 550 assert(node->exceptional > 0); 551 552 radix_tree_split(&tree, 0, new_order); 553 radix_tree_for_each_slot(slot, &tree, &iter, 0) { 554 radix_tree_iter_replace(&tree, &iter, slot, (void *)0x16); 555 } 556 557 item = __radix_tree_lookup(&tree, 0, &node, NULL); 558 assert(item == (void *)0x16); 559 assert(node->exceptional > 0); 560 561 item_kill_tree(&tree); 562 563 __radix_tree_insert(&tree, 0, old_order, (void *)0x12); 564 565 item = __radix_tree_lookup(&tree, 0, &node, NULL); 566 assert(item == (void *)0x12); 567 assert(node->exceptional > 0); 568 569 radix_tree_split(&tree, 0, new_order); 570 radix_tree_for_each_slot(slot, &tree, &iter, 0) { 571 if (iter.index == (1 << new_order)) 572 radix_tree_iter_replace(&tree, &iter, slot, 573 (void *)0x16); 574 else 575 radix_tree_iter_replace(&tree, &iter, slot, NULL); 576 } 577 578 item = __radix_tree_lookup(&tree, 1 << new_order, &node, NULL); 579 assert(item == (void *)0x16); 580 assert(node->count == node->exceptional); 581 do { 582 node = node->parent; 583 if (!node) 584 break; 585 assert(node->count == 1); 586 assert(node->exceptional == 0); 587 } while (1); 588 589 item_kill_tree(&tree); 590 } 591 592 static void multiorder_split(void) 593 { 594 int i, j; 595 596 for (i = 3; i < 11; i++) 597 for (j = 0; j < i; j++) { 598 __multiorder_split(i, j); 599 __multiorder_split2(i, j); 600 __multiorder_split3(i, j); 601 } 602 } 603 604 static void multiorder_account(void) 605 { 606 RADIX_TREE(tree, GFP_KERNEL); 607 struct radix_tree_node *node; 608 void **slot; 609 610 item_insert_order(&tree, 0, 5); 611 612 __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12); 613 __radix_tree_lookup(&tree, 0, &node, NULL); 614 assert(node->count == node->exceptional * 2); 615 radix_tree_delete(&tree, 1 << 5); 616 assert(node->exceptional == 0); 617 618 __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12); 619 __radix_tree_lookup(&tree, 1 << 5, &node, &slot); 620 assert(node->count == node->exceptional * 2); 621 __radix_tree_replace(&tree, node, slot, NULL, NULL); 622 assert(node->exceptional == 0); 623 624 item_kill_tree(&tree); 625 } 626 627 void multiorder_checks(void) 628 { 629 int i; 630 631 for (i = 0; i < 20; i++) { 632 multiorder_check(200, i); 633 multiorder_check(0, i); 634 multiorder_check((1UL << i) + 1, i); 635 } 636 637 for (i = 0; i < 15; i++) 638 multiorder_shrink((1UL << (i + RADIX_TREE_MAP_SHIFT)), i); 639 640 multiorder_insert_bug(); 641 multiorder_tag_tests(); 642 multiorder_iteration(); 643 multiorder_tagged_iteration(); 644 multiorder_join(); 645 multiorder_split(); 646 multiorder_account(); 647 648 radix_tree_cpu_dead(0); 649 } 650 651 int __weak main(void) 652 { 653 radix_tree_init(); 654 multiorder_checks(); 655 return 0; 656 } 657