1 /* 2 * Copyright (C) 2001 Momchil Velikov 3 * Portions Copyright (C) 2001 Christoph Hellwig 4 * Copyright (C) 2005 SGI, Christoph Lameter 5 * Copyright (C) 2006 Nick Piggin 6 * Copyright (C) 2012 Konstantin Khlebnikov 7 * 8 * This program is free software; you can redistribute it and/or 9 * modify it under the terms of the GNU General Public License as 10 * published by the Free Software Foundation; either version 2, or (at 11 * your option) any later version. 12 * 13 * This program is distributed in the hope that it will be useful, but 14 * WITHOUT ANY WARRANTY; without even the implied warranty of 15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 16 * General Public License for more details. 17 * 18 * You should have received a copy of the GNU General Public License 19 * along with this program; if not, write to the Free Software 20 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. 21 */ 22 23 #include <linux/errno.h> 24 #include <linux/init.h> 25 #include <linux/kernel.h> 26 #include <linux/export.h> 27 #include <linux/radix-tree.h> 28 #include <linux/percpu.h> 29 #include <linux/slab.h> 30 #include <linux/notifier.h> 31 #include <linux/cpu.h> 32 #include <linux/string.h> 33 #include <linux/bitops.h> 34 #include <linux/rcupdate.h> 35 #include <linux/hardirq.h> /* in_interrupt() */ 36 37 38 /* 39 * The height_to_maxindex array needs to be one deeper than the maximum 40 * path as height 0 holds only 1 entry. 41 */ 42 static unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1] __read_mostly; 43 44 /* 45 * Radix tree node cache. 46 */ 47 static struct kmem_cache *radix_tree_node_cachep; 48 49 /* 50 * The radix tree is variable-height, so an insert operation not only has 51 * to build the branch to its corresponding item, it also has to build the 52 * branch to existing items if the size has to be increased (by 53 * radix_tree_extend). 54 * 55 * The worst case is a zero height tree with just a single item at index 0, 56 * and then inserting an item at index ULONG_MAX. This requires 2 new branches 57 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared. 58 * Hence: 59 */ 60 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1) 61 62 /* 63 * Per-cpu pool of preloaded nodes 64 */ 65 struct radix_tree_preload { 66 int nr; 67 struct radix_tree_node *nodes[RADIX_TREE_PRELOAD_SIZE]; 68 }; 69 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, }; 70 71 static inline void *ptr_to_indirect(void *ptr) 72 { 73 return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR); 74 } 75 76 static inline void *indirect_to_ptr(void *ptr) 77 { 78 return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR); 79 } 80 81 static inline gfp_t root_gfp_mask(struct radix_tree_root *root) 82 { 83 return root->gfp_mask & __GFP_BITS_MASK; 84 } 85 86 static inline void tag_set(struct radix_tree_node *node, unsigned int tag, 87 int offset) 88 { 89 __set_bit(offset, node->tags[tag]); 90 } 91 92 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag, 93 int offset) 94 { 95 __clear_bit(offset, node->tags[tag]); 96 } 97 98 static inline int tag_get(struct radix_tree_node *node, unsigned int tag, 99 int offset) 100 { 101 return test_bit(offset, node->tags[tag]); 102 } 103 104 static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag) 105 { 106 root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT)); 107 } 108 109 static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag) 110 { 111 root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT)); 112 } 113 114 static inline void root_tag_clear_all(struct radix_tree_root *root) 115 { 116 root->gfp_mask &= __GFP_BITS_MASK; 117 } 118 119 static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag) 120 { 121 return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT)); 122 } 123 124 /* 125 * Returns 1 if any slot in the node has this tag set. 126 * Otherwise returns 0. 127 */ 128 static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag) 129 { 130 int idx; 131 for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) { 132 if (node->tags[tag][idx]) 133 return 1; 134 } 135 return 0; 136 } 137 138 /** 139 * radix_tree_find_next_bit - find the next set bit in a memory region 140 * 141 * @addr: The address to base the search on 142 * @size: The bitmap size in bits 143 * @offset: The bitnumber to start searching at 144 * 145 * Unrollable variant of find_next_bit() for constant size arrays. 146 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero. 147 * Returns next bit offset, or size if nothing found. 148 */ 149 static __always_inline unsigned long 150 radix_tree_find_next_bit(const unsigned long *addr, 151 unsigned long size, unsigned long offset) 152 { 153 if (!__builtin_constant_p(size)) 154 return find_next_bit(addr, size, offset); 155 156 if (offset < size) { 157 unsigned long tmp; 158 159 addr += offset / BITS_PER_LONG; 160 tmp = *addr >> (offset % BITS_PER_LONG); 161 if (tmp) 162 return __ffs(tmp) + offset; 163 offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1); 164 while (offset < size) { 165 tmp = *++addr; 166 if (tmp) 167 return __ffs(tmp) + offset; 168 offset += BITS_PER_LONG; 169 } 170 } 171 return size; 172 } 173 174 /* 175 * This assumes that the caller has performed appropriate preallocation, and 176 * that the caller has pinned this thread of control to the current CPU. 177 */ 178 static struct radix_tree_node * 179 radix_tree_node_alloc(struct radix_tree_root *root) 180 { 181 struct radix_tree_node *ret = NULL; 182 gfp_t gfp_mask = root_gfp_mask(root); 183 184 /* 185 * Preload code isn't irq safe and it doesn't make sence to use 186 * preloading in the interrupt anyway as all the allocations have to 187 * be atomic. So just do normal allocation when in interrupt. 188 */ 189 if (!(gfp_mask & __GFP_WAIT) && !in_interrupt()) { 190 struct radix_tree_preload *rtp; 191 192 /* 193 * Provided the caller has preloaded here, we will always 194 * succeed in getting a node here (and never reach 195 * kmem_cache_alloc) 196 */ 197 rtp = &__get_cpu_var(radix_tree_preloads); 198 if (rtp->nr) { 199 ret = rtp->nodes[rtp->nr - 1]; 200 rtp->nodes[rtp->nr - 1] = NULL; 201 rtp->nr--; 202 } 203 } 204 if (ret == NULL) 205 ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); 206 207 BUG_ON(radix_tree_is_indirect_ptr(ret)); 208 return ret; 209 } 210 211 static void radix_tree_node_rcu_free(struct rcu_head *head) 212 { 213 struct radix_tree_node *node = 214 container_of(head, struct radix_tree_node, rcu_head); 215 int i; 216 217 /* 218 * must only free zeroed nodes into the slab. radix_tree_shrink 219 * can leave us with a non-NULL entry in the first slot, so clear 220 * that here to make sure. 221 */ 222 for (i = 0; i < RADIX_TREE_MAX_TAGS; i++) 223 tag_clear(node, i, 0); 224 225 node->slots[0] = NULL; 226 node->count = 0; 227 228 kmem_cache_free(radix_tree_node_cachep, node); 229 } 230 231 static inline void 232 radix_tree_node_free(struct radix_tree_node *node) 233 { 234 call_rcu(&node->rcu_head, radix_tree_node_rcu_free); 235 } 236 237 /* 238 * Load up this CPU's radix_tree_node buffer with sufficient objects to 239 * ensure that the addition of a single element in the tree cannot fail. On 240 * success, return zero, with preemption disabled. On error, return -ENOMEM 241 * with preemption not disabled. 242 * 243 * To make use of this facility, the radix tree must be initialised without 244 * __GFP_WAIT being passed to INIT_RADIX_TREE(). 245 */ 246 static int __radix_tree_preload(gfp_t gfp_mask) 247 { 248 struct radix_tree_preload *rtp; 249 struct radix_tree_node *node; 250 int ret = -ENOMEM; 251 252 preempt_disable(); 253 rtp = &__get_cpu_var(radix_tree_preloads); 254 while (rtp->nr < ARRAY_SIZE(rtp->nodes)) { 255 preempt_enable(); 256 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); 257 if (node == NULL) 258 goto out; 259 preempt_disable(); 260 rtp = &__get_cpu_var(radix_tree_preloads); 261 if (rtp->nr < ARRAY_SIZE(rtp->nodes)) 262 rtp->nodes[rtp->nr++] = node; 263 else 264 kmem_cache_free(radix_tree_node_cachep, node); 265 } 266 ret = 0; 267 out: 268 return ret; 269 } 270 271 /* 272 * Load up this CPU's radix_tree_node buffer with sufficient objects to 273 * ensure that the addition of a single element in the tree cannot fail. On 274 * success, return zero, with preemption disabled. On error, return -ENOMEM 275 * with preemption not disabled. 276 * 277 * To make use of this facility, the radix tree must be initialised without 278 * __GFP_WAIT being passed to INIT_RADIX_TREE(). 279 */ 280 int radix_tree_preload(gfp_t gfp_mask) 281 { 282 /* Warn on non-sensical use... */ 283 WARN_ON_ONCE(!(gfp_mask & __GFP_WAIT)); 284 return __radix_tree_preload(gfp_mask); 285 } 286 EXPORT_SYMBOL(radix_tree_preload); 287 288 /* 289 * The same as above function, except we don't guarantee preloading happens. 290 * We do it, if we decide it helps. On success, return zero with preemption 291 * disabled. On error, return -ENOMEM with preemption not disabled. 292 */ 293 int radix_tree_maybe_preload(gfp_t gfp_mask) 294 { 295 if (gfp_mask & __GFP_WAIT) 296 return __radix_tree_preload(gfp_mask); 297 /* Preloading doesn't help anything with this gfp mask, skip it */ 298 preempt_disable(); 299 return 0; 300 } 301 EXPORT_SYMBOL(radix_tree_maybe_preload); 302 303 /* 304 * Return the maximum key which can be store into a 305 * radix tree with height HEIGHT. 306 */ 307 static inline unsigned long radix_tree_maxindex(unsigned int height) 308 { 309 return height_to_maxindex[height]; 310 } 311 312 /* 313 * Extend a radix tree so it can store key @index. 314 */ 315 static int radix_tree_extend(struct radix_tree_root *root, unsigned long index) 316 { 317 struct radix_tree_node *node; 318 struct radix_tree_node *slot; 319 unsigned int height; 320 int tag; 321 322 /* Figure out what the height should be. */ 323 height = root->height + 1; 324 while (index > radix_tree_maxindex(height)) 325 height++; 326 327 if (root->rnode == NULL) { 328 root->height = height; 329 goto out; 330 } 331 332 do { 333 unsigned int newheight; 334 if (!(node = radix_tree_node_alloc(root))) 335 return -ENOMEM; 336 337 /* Propagate the aggregated tag info into the new root */ 338 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) { 339 if (root_tag_get(root, tag)) 340 tag_set(node, tag, 0); 341 } 342 343 /* Increase the height. */ 344 newheight = root->height+1; 345 BUG_ON(newheight & ~RADIX_TREE_HEIGHT_MASK); 346 node->path = newheight; 347 node->count = 1; 348 node->parent = NULL; 349 slot = root->rnode; 350 if (newheight > 1) { 351 slot = indirect_to_ptr(slot); 352 slot->parent = node; 353 } 354 node->slots[0] = slot; 355 node = ptr_to_indirect(node); 356 rcu_assign_pointer(root->rnode, node); 357 root->height = newheight; 358 } while (height > root->height); 359 out: 360 return 0; 361 } 362 363 /** 364 * __radix_tree_create - create a slot in a radix tree 365 * @root: radix tree root 366 * @index: index key 367 * @nodep: returns node 368 * @slotp: returns slot 369 * 370 * Create, if necessary, and return the node and slot for an item 371 * at position @index in the radix tree @root. 372 * 373 * Until there is more than one item in the tree, no nodes are 374 * allocated and @root->rnode is used as a direct slot instead of 375 * pointing to a node, in which case *@nodep will be NULL. 376 * 377 * Returns -ENOMEM, or 0 for success. 378 */ 379 int __radix_tree_create(struct radix_tree_root *root, unsigned long index, 380 struct radix_tree_node **nodep, void ***slotp) 381 { 382 struct radix_tree_node *node = NULL, *slot; 383 unsigned int height, shift, offset; 384 int error; 385 386 /* Make sure the tree is high enough. */ 387 if (index > radix_tree_maxindex(root->height)) { 388 error = radix_tree_extend(root, index); 389 if (error) 390 return error; 391 } 392 393 slot = indirect_to_ptr(root->rnode); 394 395 height = root->height; 396 shift = (height-1) * RADIX_TREE_MAP_SHIFT; 397 398 offset = 0; /* uninitialised var warning */ 399 while (height > 0) { 400 if (slot == NULL) { 401 /* Have to add a child node. */ 402 if (!(slot = radix_tree_node_alloc(root))) 403 return -ENOMEM; 404 slot->path = height; 405 slot->parent = node; 406 if (node) { 407 rcu_assign_pointer(node->slots[offset], slot); 408 node->count++; 409 slot->path |= offset << RADIX_TREE_HEIGHT_SHIFT; 410 } else 411 rcu_assign_pointer(root->rnode, ptr_to_indirect(slot)); 412 } 413 414 /* Go a level down */ 415 offset = (index >> shift) & RADIX_TREE_MAP_MASK; 416 node = slot; 417 slot = node->slots[offset]; 418 shift -= RADIX_TREE_MAP_SHIFT; 419 height--; 420 } 421 422 if (nodep) 423 *nodep = node; 424 if (slotp) 425 *slotp = node ? node->slots + offset : (void **)&root->rnode; 426 return 0; 427 } 428 429 /** 430 * radix_tree_insert - insert into a radix tree 431 * @root: radix tree root 432 * @index: index key 433 * @item: item to insert 434 * 435 * Insert an item into the radix tree at position @index. 436 */ 437 int radix_tree_insert(struct radix_tree_root *root, 438 unsigned long index, void *item) 439 { 440 struct radix_tree_node *node; 441 void **slot; 442 int error; 443 444 BUG_ON(radix_tree_is_indirect_ptr(item)); 445 446 error = __radix_tree_create(root, index, &node, &slot); 447 if (error) 448 return error; 449 if (*slot != NULL) 450 return -EEXIST; 451 rcu_assign_pointer(*slot, item); 452 453 if (node) { 454 node->count++; 455 BUG_ON(tag_get(node, 0, index & RADIX_TREE_MAP_MASK)); 456 BUG_ON(tag_get(node, 1, index & RADIX_TREE_MAP_MASK)); 457 } else { 458 BUG_ON(root_tag_get(root, 0)); 459 BUG_ON(root_tag_get(root, 1)); 460 } 461 462 return 0; 463 } 464 EXPORT_SYMBOL(radix_tree_insert); 465 466 /** 467 * __radix_tree_lookup - lookup an item in a radix tree 468 * @root: radix tree root 469 * @index: index key 470 * @nodep: returns node 471 * @slotp: returns slot 472 * 473 * Lookup and return the item at position @index in the radix 474 * tree @root. 475 * 476 * Until there is more than one item in the tree, no nodes are 477 * allocated and @root->rnode is used as a direct slot instead of 478 * pointing to a node, in which case *@nodep will be NULL. 479 */ 480 void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index, 481 struct radix_tree_node **nodep, void ***slotp) 482 { 483 struct radix_tree_node *node, *parent; 484 unsigned int height, shift; 485 void **slot; 486 487 node = rcu_dereference_raw(root->rnode); 488 if (node == NULL) 489 return NULL; 490 491 if (!radix_tree_is_indirect_ptr(node)) { 492 if (index > 0) 493 return NULL; 494 495 if (nodep) 496 *nodep = NULL; 497 if (slotp) 498 *slotp = (void **)&root->rnode; 499 return node; 500 } 501 node = indirect_to_ptr(node); 502 503 height = node->path & RADIX_TREE_HEIGHT_MASK; 504 if (index > radix_tree_maxindex(height)) 505 return NULL; 506 507 shift = (height-1) * RADIX_TREE_MAP_SHIFT; 508 509 do { 510 parent = node; 511 slot = node->slots + ((index >> shift) & RADIX_TREE_MAP_MASK); 512 node = rcu_dereference_raw(*slot); 513 if (node == NULL) 514 return NULL; 515 516 shift -= RADIX_TREE_MAP_SHIFT; 517 height--; 518 } while (height > 0); 519 520 if (nodep) 521 *nodep = parent; 522 if (slotp) 523 *slotp = slot; 524 return node; 525 } 526 527 /** 528 * radix_tree_lookup_slot - lookup a slot in a radix tree 529 * @root: radix tree root 530 * @index: index key 531 * 532 * Returns: the slot corresponding to the position @index in the 533 * radix tree @root. This is useful for update-if-exists operations. 534 * 535 * This function can be called under rcu_read_lock iff the slot is not 536 * modified by radix_tree_replace_slot, otherwise it must be called 537 * exclusive from other writers. Any dereference of the slot must be done 538 * using radix_tree_deref_slot. 539 */ 540 void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index) 541 { 542 void **slot; 543 544 if (!__radix_tree_lookup(root, index, NULL, &slot)) 545 return NULL; 546 return slot; 547 } 548 EXPORT_SYMBOL(radix_tree_lookup_slot); 549 550 /** 551 * radix_tree_lookup - perform lookup operation on a radix tree 552 * @root: radix tree root 553 * @index: index key 554 * 555 * Lookup the item at the position @index in the radix tree @root. 556 * 557 * This function can be called under rcu_read_lock, however the caller 558 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free 559 * them safely). No RCU barriers are required to access or modify the 560 * returned item, however. 561 */ 562 void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index) 563 { 564 return __radix_tree_lookup(root, index, NULL, NULL); 565 } 566 EXPORT_SYMBOL(radix_tree_lookup); 567 568 /** 569 * radix_tree_tag_set - set a tag on a radix tree node 570 * @root: radix tree root 571 * @index: index key 572 * @tag: tag index 573 * 574 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS) 575 * corresponding to @index in the radix tree. From 576 * the root all the way down to the leaf node. 577 * 578 * Returns the address of the tagged item. Setting a tag on a not-present 579 * item is a bug. 580 */ 581 void *radix_tree_tag_set(struct radix_tree_root *root, 582 unsigned long index, unsigned int tag) 583 { 584 unsigned int height, shift; 585 struct radix_tree_node *slot; 586 587 height = root->height; 588 BUG_ON(index > radix_tree_maxindex(height)); 589 590 slot = indirect_to_ptr(root->rnode); 591 shift = (height - 1) * RADIX_TREE_MAP_SHIFT; 592 593 while (height > 0) { 594 int offset; 595 596 offset = (index >> shift) & RADIX_TREE_MAP_MASK; 597 if (!tag_get(slot, tag, offset)) 598 tag_set(slot, tag, offset); 599 slot = slot->slots[offset]; 600 BUG_ON(slot == NULL); 601 shift -= RADIX_TREE_MAP_SHIFT; 602 height--; 603 } 604 605 /* set the root's tag bit */ 606 if (slot && !root_tag_get(root, tag)) 607 root_tag_set(root, tag); 608 609 return slot; 610 } 611 EXPORT_SYMBOL(radix_tree_tag_set); 612 613 /** 614 * radix_tree_tag_clear - clear a tag on a radix tree node 615 * @root: radix tree root 616 * @index: index key 617 * @tag: tag index 618 * 619 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS) 620 * corresponding to @index in the radix tree. If 621 * this causes the leaf node to have no tags set then clear the tag in the 622 * next-to-leaf node, etc. 623 * 624 * Returns the address of the tagged item on success, else NULL. ie: 625 * has the same return value and semantics as radix_tree_lookup(). 626 */ 627 void *radix_tree_tag_clear(struct radix_tree_root *root, 628 unsigned long index, unsigned int tag) 629 { 630 struct radix_tree_node *node = NULL; 631 struct radix_tree_node *slot = NULL; 632 unsigned int height, shift; 633 int uninitialized_var(offset); 634 635 height = root->height; 636 if (index > radix_tree_maxindex(height)) 637 goto out; 638 639 shift = height * RADIX_TREE_MAP_SHIFT; 640 slot = indirect_to_ptr(root->rnode); 641 642 while (shift) { 643 if (slot == NULL) 644 goto out; 645 646 shift -= RADIX_TREE_MAP_SHIFT; 647 offset = (index >> shift) & RADIX_TREE_MAP_MASK; 648 node = slot; 649 slot = slot->slots[offset]; 650 } 651 652 if (slot == NULL) 653 goto out; 654 655 while (node) { 656 if (!tag_get(node, tag, offset)) 657 goto out; 658 tag_clear(node, tag, offset); 659 if (any_tag_set(node, tag)) 660 goto out; 661 662 index >>= RADIX_TREE_MAP_SHIFT; 663 offset = index & RADIX_TREE_MAP_MASK; 664 node = node->parent; 665 } 666 667 /* clear the root's tag bit */ 668 if (root_tag_get(root, tag)) 669 root_tag_clear(root, tag); 670 671 out: 672 return slot; 673 } 674 EXPORT_SYMBOL(radix_tree_tag_clear); 675 676 /** 677 * radix_tree_tag_get - get a tag on a radix tree node 678 * @root: radix tree root 679 * @index: index key 680 * @tag: tag index (< RADIX_TREE_MAX_TAGS) 681 * 682 * Return values: 683 * 684 * 0: tag not present or not set 685 * 1: tag set 686 * 687 * Note that the return value of this function may not be relied on, even if 688 * the RCU lock is held, unless tag modification and node deletion are excluded 689 * from concurrency. 690 */ 691 int radix_tree_tag_get(struct radix_tree_root *root, 692 unsigned long index, unsigned int tag) 693 { 694 unsigned int height, shift; 695 struct radix_tree_node *node; 696 697 /* check the root's tag bit */ 698 if (!root_tag_get(root, tag)) 699 return 0; 700 701 node = rcu_dereference_raw(root->rnode); 702 if (node == NULL) 703 return 0; 704 705 if (!radix_tree_is_indirect_ptr(node)) 706 return (index == 0); 707 node = indirect_to_ptr(node); 708 709 height = node->path & RADIX_TREE_HEIGHT_MASK; 710 if (index > radix_tree_maxindex(height)) 711 return 0; 712 713 shift = (height - 1) * RADIX_TREE_MAP_SHIFT; 714 715 for ( ; ; ) { 716 int offset; 717 718 if (node == NULL) 719 return 0; 720 721 offset = (index >> shift) & RADIX_TREE_MAP_MASK; 722 if (!tag_get(node, tag, offset)) 723 return 0; 724 if (height == 1) 725 return 1; 726 node = rcu_dereference_raw(node->slots[offset]); 727 shift -= RADIX_TREE_MAP_SHIFT; 728 height--; 729 } 730 } 731 EXPORT_SYMBOL(radix_tree_tag_get); 732 733 /** 734 * radix_tree_next_chunk - find next chunk of slots for iteration 735 * 736 * @root: radix tree root 737 * @iter: iterator state 738 * @flags: RADIX_TREE_ITER_* flags and tag index 739 * Returns: pointer to chunk first slot, or NULL if iteration is over 740 */ 741 void **radix_tree_next_chunk(struct radix_tree_root *root, 742 struct radix_tree_iter *iter, unsigned flags) 743 { 744 unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK; 745 struct radix_tree_node *rnode, *node; 746 unsigned long index, offset, height; 747 748 if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag)) 749 return NULL; 750 751 /* 752 * Catch next_index overflow after ~0UL. iter->index never overflows 753 * during iterating; it can be zero only at the beginning. 754 * And we cannot overflow iter->next_index in a single step, 755 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG. 756 * 757 * This condition also used by radix_tree_next_slot() to stop 758 * contiguous iterating, and forbid swithing to the next chunk. 759 */ 760 index = iter->next_index; 761 if (!index && iter->index) 762 return NULL; 763 764 rnode = rcu_dereference_raw(root->rnode); 765 if (radix_tree_is_indirect_ptr(rnode)) { 766 rnode = indirect_to_ptr(rnode); 767 } else if (rnode && !index) { 768 /* Single-slot tree */ 769 iter->index = 0; 770 iter->next_index = 1; 771 iter->tags = 1; 772 return (void **)&root->rnode; 773 } else 774 return NULL; 775 776 restart: 777 height = rnode->path & RADIX_TREE_HEIGHT_MASK; 778 shift = (height - 1) * RADIX_TREE_MAP_SHIFT; 779 offset = index >> shift; 780 781 /* Index outside of the tree */ 782 if (offset >= RADIX_TREE_MAP_SIZE) 783 return NULL; 784 785 node = rnode; 786 while (1) { 787 if ((flags & RADIX_TREE_ITER_TAGGED) ? 788 !test_bit(offset, node->tags[tag]) : 789 !node->slots[offset]) { 790 /* Hole detected */ 791 if (flags & RADIX_TREE_ITER_CONTIG) 792 return NULL; 793 794 if (flags & RADIX_TREE_ITER_TAGGED) 795 offset = radix_tree_find_next_bit( 796 node->tags[tag], 797 RADIX_TREE_MAP_SIZE, 798 offset + 1); 799 else 800 while (++offset < RADIX_TREE_MAP_SIZE) { 801 if (node->slots[offset]) 802 break; 803 } 804 index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1); 805 index += offset << shift; 806 /* Overflow after ~0UL */ 807 if (!index) 808 return NULL; 809 if (offset == RADIX_TREE_MAP_SIZE) 810 goto restart; 811 } 812 813 /* This is leaf-node */ 814 if (!shift) 815 break; 816 817 node = rcu_dereference_raw(node->slots[offset]); 818 if (node == NULL) 819 goto restart; 820 shift -= RADIX_TREE_MAP_SHIFT; 821 offset = (index >> shift) & RADIX_TREE_MAP_MASK; 822 } 823 824 /* Update the iterator state */ 825 iter->index = index; 826 iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1; 827 828 /* Construct iter->tags bit-mask from node->tags[tag] array */ 829 if (flags & RADIX_TREE_ITER_TAGGED) { 830 unsigned tag_long, tag_bit; 831 832 tag_long = offset / BITS_PER_LONG; 833 tag_bit = offset % BITS_PER_LONG; 834 iter->tags = node->tags[tag][tag_long] >> tag_bit; 835 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */ 836 if (tag_long < RADIX_TREE_TAG_LONGS - 1) { 837 /* Pick tags from next element */ 838 if (tag_bit) 839 iter->tags |= node->tags[tag][tag_long + 1] << 840 (BITS_PER_LONG - tag_bit); 841 /* Clip chunk size, here only BITS_PER_LONG tags */ 842 iter->next_index = index + BITS_PER_LONG; 843 } 844 } 845 846 return node->slots + offset; 847 } 848 EXPORT_SYMBOL(radix_tree_next_chunk); 849 850 /** 851 * radix_tree_range_tag_if_tagged - for each item in given range set given 852 * tag if item has another tag set 853 * @root: radix tree root 854 * @first_indexp: pointer to a starting index of a range to scan 855 * @last_index: last index of a range to scan 856 * @nr_to_tag: maximum number items to tag 857 * @iftag: tag index to test 858 * @settag: tag index to set if tested tag is set 859 * 860 * This function scans range of radix tree from first_index to last_index 861 * (inclusive). For each item in the range if iftag is set, the function sets 862 * also settag. The function stops either after tagging nr_to_tag items or 863 * after reaching last_index. 864 * 865 * The tags must be set from the leaf level only and propagated back up the 866 * path to the root. We must do this so that we resolve the full path before 867 * setting any tags on intermediate nodes. If we set tags as we descend, then 868 * we can get to the leaf node and find that the index that has the iftag 869 * set is outside the range we are scanning. This reults in dangling tags and 870 * can lead to problems with later tag operations (e.g. livelocks on lookups). 871 * 872 * The function returns number of leaves where the tag was set and sets 873 * *first_indexp to the first unscanned index. 874 * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must 875 * be prepared to handle that. 876 */ 877 unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root, 878 unsigned long *first_indexp, unsigned long last_index, 879 unsigned long nr_to_tag, 880 unsigned int iftag, unsigned int settag) 881 { 882 unsigned int height = root->height; 883 struct radix_tree_node *node = NULL; 884 struct radix_tree_node *slot; 885 unsigned int shift; 886 unsigned long tagged = 0; 887 unsigned long index = *first_indexp; 888 889 last_index = min(last_index, radix_tree_maxindex(height)); 890 if (index > last_index) 891 return 0; 892 if (!nr_to_tag) 893 return 0; 894 if (!root_tag_get(root, iftag)) { 895 *first_indexp = last_index + 1; 896 return 0; 897 } 898 if (height == 0) { 899 *first_indexp = last_index + 1; 900 root_tag_set(root, settag); 901 return 1; 902 } 903 904 shift = (height - 1) * RADIX_TREE_MAP_SHIFT; 905 slot = indirect_to_ptr(root->rnode); 906 907 for (;;) { 908 unsigned long upindex; 909 int offset; 910 911 offset = (index >> shift) & RADIX_TREE_MAP_MASK; 912 if (!slot->slots[offset]) 913 goto next; 914 if (!tag_get(slot, iftag, offset)) 915 goto next; 916 if (shift) { 917 /* Go down one level */ 918 shift -= RADIX_TREE_MAP_SHIFT; 919 node = slot; 920 slot = slot->slots[offset]; 921 continue; 922 } 923 924 /* tag the leaf */ 925 tagged++; 926 tag_set(slot, settag, offset); 927 928 /* walk back up the path tagging interior nodes */ 929 upindex = index; 930 while (node) { 931 upindex >>= RADIX_TREE_MAP_SHIFT; 932 offset = upindex & RADIX_TREE_MAP_MASK; 933 934 /* stop if we find a node with the tag already set */ 935 if (tag_get(node, settag, offset)) 936 break; 937 tag_set(node, settag, offset); 938 node = node->parent; 939 } 940 941 /* 942 * Small optimization: now clear that node pointer. 943 * Since all of this slot's ancestors now have the tag set 944 * from setting it above, we have no further need to walk 945 * back up the tree setting tags, until we update slot to 946 * point to another radix_tree_node. 947 */ 948 node = NULL; 949 950 next: 951 /* Go to next item at level determined by 'shift' */ 952 index = ((index >> shift) + 1) << shift; 953 /* Overflow can happen when last_index is ~0UL... */ 954 if (index > last_index || !index) 955 break; 956 if (tagged >= nr_to_tag) 957 break; 958 while (((index >> shift) & RADIX_TREE_MAP_MASK) == 0) { 959 /* 960 * We've fully scanned this node. Go up. Because 961 * last_index is guaranteed to be in the tree, what 962 * we do below cannot wander astray. 963 */ 964 slot = slot->parent; 965 shift += RADIX_TREE_MAP_SHIFT; 966 } 967 } 968 /* 969 * We need not to tag the root tag if there is no tag which is set with 970 * settag within the range from *first_indexp to last_index. 971 */ 972 if (tagged > 0) 973 root_tag_set(root, settag); 974 *first_indexp = index; 975 976 return tagged; 977 } 978 EXPORT_SYMBOL(radix_tree_range_tag_if_tagged); 979 980 /** 981 * radix_tree_gang_lookup - perform multiple lookup on a radix tree 982 * @root: radix tree root 983 * @results: where the results of the lookup are placed 984 * @first_index: start the lookup from this key 985 * @max_items: place up to this many items at *results 986 * 987 * Performs an index-ascending scan of the tree for present items. Places 988 * them at *@results and returns the number of items which were placed at 989 * *@results. 990 * 991 * The implementation is naive. 992 * 993 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under 994 * rcu_read_lock. In this case, rather than the returned results being 995 * an atomic snapshot of the tree at a single point in time, the semantics 996 * of an RCU protected gang lookup are as though multiple radix_tree_lookups 997 * have been issued in individual locks, and results stored in 'results'. 998 */ 999 unsigned int 1000 radix_tree_gang_lookup(struct radix_tree_root *root, void **results, 1001 unsigned long first_index, unsigned int max_items) 1002 { 1003 struct radix_tree_iter iter; 1004 void **slot; 1005 unsigned int ret = 0; 1006 1007 if (unlikely(!max_items)) 1008 return 0; 1009 1010 radix_tree_for_each_slot(slot, root, &iter, first_index) { 1011 results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot)); 1012 if (!results[ret]) 1013 continue; 1014 if (++ret == max_items) 1015 break; 1016 } 1017 1018 return ret; 1019 } 1020 EXPORT_SYMBOL(radix_tree_gang_lookup); 1021 1022 /** 1023 * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree 1024 * @root: radix tree root 1025 * @results: where the results of the lookup are placed 1026 * @indices: where their indices should be placed (but usually NULL) 1027 * @first_index: start the lookup from this key 1028 * @max_items: place up to this many items at *results 1029 * 1030 * Performs an index-ascending scan of the tree for present items. Places 1031 * their slots at *@results and returns the number of items which were 1032 * placed at *@results. 1033 * 1034 * The implementation is naive. 1035 * 1036 * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must 1037 * be dereferenced with radix_tree_deref_slot, and if using only RCU 1038 * protection, radix_tree_deref_slot may fail requiring a retry. 1039 */ 1040 unsigned int 1041 radix_tree_gang_lookup_slot(struct radix_tree_root *root, 1042 void ***results, unsigned long *indices, 1043 unsigned long first_index, unsigned int max_items) 1044 { 1045 struct radix_tree_iter iter; 1046 void **slot; 1047 unsigned int ret = 0; 1048 1049 if (unlikely(!max_items)) 1050 return 0; 1051 1052 radix_tree_for_each_slot(slot, root, &iter, first_index) { 1053 results[ret] = slot; 1054 if (indices) 1055 indices[ret] = iter.index; 1056 if (++ret == max_items) 1057 break; 1058 } 1059 1060 return ret; 1061 } 1062 EXPORT_SYMBOL(radix_tree_gang_lookup_slot); 1063 1064 /** 1065 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree 1066 * based on a tag 1067 * @root: radix tree root 1068 * @results: where the results of the lookup are placed 1069 * @first_index: start the lookup from this key 1070 * @max_items: place up to this many items at *results 1071 * @tag: the tag index (< RADIX_TREE_MAX_TAGS) 1072 * 1073 * Performs an index-ascending scan of the tree for present items which 1074 * have the tag indexed by @tag set. Places the items at *@results and 1075 * returns the number of items which were placed at *@results. 1076 */ 1077 unsigned int 1078 radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results, 1079 unsigned long first_index, unsigned int max_items, 1080 unsigned int tag) 1081 { 1082 struct radix_tree_iter iter; 1083 void **slot; 1084 unsigned int ret = 0; 1085 1086 if (unlikely(!max_items)) 1087 return 0; 1088 1089 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) { 1090 results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot)); 1091 if (!results[ret]) 1092 continue; 1093 if (++ret == max_items) 1094 break; 1095 } 1096 1097 return ret; 1098 } 1099 EXPORT_SYMBOL(radix_tree_gang_lookup_tag); 1100 1101 /** 1102 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a 1103 * radix tree based on a tag 1104 * @root: radix tree root 1105 * @results: where the results of the lookup are placed 1106 * @first_index: start the lookup from this key 1107 * @max_items: place up to this many items at *results 1108 * @tag: the tag index (< RADIX_TREE_MAX_TAGS) 1109 * 1110 * Performs an index-ascending scan of the tree for present items which 1111 * have the tag indexed by @tag set. Places the slots at *@results and 1112 * returns the number of slots which were placed at *@results. 1113 */ 1114 unsigned int 1115 radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results, 1116 unsigned long first_index, unsigned int max_items, 1117 unsigned int tag) 1118 { 1119 struct radix_tree_iter iter; 1120 void **slot; 1121 unsigned int ret = 0; 1122 1123 if (unlikely(!max_items)) 1124 return 0; 1125 1126 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) { 1127 results[ret] = slot; 1128 if (++ret == max_items) 1129 break; 1130 } 1131 1132 return ret; 1133 } 1134 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot); 1135 1136 #if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP) 1137 #include <linux/sched.h> /* for cond_resched() */ 1138 1139 /* 1140 * This linear search is at present only useful to shmem_unuse_inode(). 1141 */ 1142 static unsigned long __locate(struct radix_tree_node *slot, void *item, 1143 unsigned long index, unsigned long *found_index) 1144 { 1145 unsigned int shift, height; 1146 unsigned long i; 1147 1148 height = slot->path & RADIX_TREE_HEIGHT_MASK; 1149 shift = (height-1) * RADIX_TREE_MAP_SHIFT; 1150 1151 for ( ; height > 1; height--) { 1152 i = (index >> shift) & RADIX_TREE_MAP_MASK; 1153 for (;;) { 1154 if (slot->slots[i] != NULL) 1155 break; 1156 index &= ~((1UL << shift) - 1); 1157 index += 1UL << shift; 1158 if (index == 0) 1159 goto out; /* 32-bit wraparound */ 1160 i++; 1161 if (i == RADIX_TREE_MAP_SIZE) 1162 goto out; 1163 } 1164 1165 shift -= RADIX_TREE_MAP_SHIFT; 1166 slot = rcu_dereference_raw(slot->slots[i]); 1167 if (slot == NULL) 1168 goto out; 1169 } 1170 1171 /* Bottom level: check items */ 1172 for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) { 1173 if (slot->slots[i] == item) { 1174 *found_index = index + i; 1175 index = 0; 1176 goto out; 1177 } 1178 } 1179 index += RADIX_TREE_MAP_SIZE; 1180 out: 1181 return index; 1182 } 1183 1184 /** 1185 * radix_tree_locate_item - search through radix tree for item 1186 * @root: radix tree root 1187 * @item: item to be found 1188 * 1189 * Returns index where item was found, or -1 if not found. 1190 * Caller must hold no lock (since this time-consuming function needs 1191 * to be preemptible), and must check afterwards if item is still there. 1192 */ 1193 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item) 1194 { 1195 struct radix_tree_node *node; 1196 unsigned long max_index; 1197 unsigned long cur_index = 0; 1198 unsigned long found_index = -1; 1199 1200 do { 1201 rcu_read_lock(); 1202 node = rcu_dereference_raw(root->rnode); 1203 if (!radix_tree_is_indirect_ptr(node)) { 1204 rcu_read_unlock(); 1205 if (node == item) 1206 found_index = 0; 1207 break; 1208 } 1209 1210 node = indirect_to_ptr(node); 1211 max_index = radix_tree_maxindex(node->path & 1212 RADIX_TREE_HEIGHT_MASK); 1213 if (cur_index > max_index) { 1214 rcu_read_unlock(); 1215 break; 1216 } 1217 1218 cur_index = __locate(node, item, cur_index, &found_index); 1219 rcu_read_unlock(); 1220 cond_resched(); 1221 } while (cur_index != 0 && cur_index <= max_index); 1222 1223 return found_index; 1224 } 1225 #else 1226 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item) 1227 { 1228 return -1; 1229 } 1230 #endif /* CONFIG_SHMEM && CONFIG_SWAP */ 1231 1232 /** 1233 * radix_tree_shrink - shrink height of a radix tree to minimal 1234 * @root radix tree root 1235 */ 1236 static inline void radix_tree_shrink(struct radix_tree_root *root) 1237 { 1238 /* try to shrink tree height */ 1239 while (root->height > 0) { 1240 struct radix_tree_node *to_free = root->rnode; 1241 struct radix_tree_node *slot; 1242 1243 BUG_ON(!radix_tree_is_indirect_ptr(to_free)); 1244 to_free = indirect_to_ptr(to_free); 1245 1246 /* 1247 * The candidate node has more than one child, or its child 1248 * is not at the leftmost slot, we cannot shrink. 1249 */ 1250 if (to_free->count != 1) 1251 break; 1252 if (!to_free->slots[0]) 1253 break; 1254 1255 /* 1256 * We don't need rcu_assign_pointer(), since we are simply 1257 * moving the node from one part of the tree to another: if it 1258 * was safe to dereference the old pointer to it 1259 * (to_free->slots[0]), it will be safe to dereference the new 1260 * one (root->rnode) as far as dependent read barriers go. 1261 */ 1262 slot = to_free->slots[0]; 1263 if (root->height > 1) { 1264 slot->parent = NULL; 1265 slot = ptr_to_indirect(slot); 1266 } 1267 root->rnode = slot; 1268 root->height--; 1269 1270 /* 1271 * We have a dilemma here. The node's slot[0] must not be 1272 * NULLed in case there are concurrent lookups expecting to 1273 * find the item. However if this was a bottom-level node, 1274 * then it may be subject to the slot pointer being visible 1275 * to callers dereferencing it. If item corresponding to 1276 * slot[0] is subsequently deleted, these callers would expect 1277 * their slot to become empty sooner or later. 1278 * 1279 * For example, lockless pagecache will look up a slot, deref 1280 * the page pointer, and if the page is 0 refcount it means it 1281 * was concurrently deleted from pagecache so try the deref 1282 * again. Fortunately there is already a requirement for logic 1283 * to retry the entire slot lookup -- the indirect pointer 1284 * problem (replacing direct root node with an indirect pointer 1285 * also results in a stale slot). So tag the slot as indirect 1286 * to force callers to retry. 1287 */ 1288 if (root->height == 0) 1289 *((unsigned long *)&to_free->slots[0]) |= 1290 RADIX_TREE_INDIRECT_PTR; 1291 1292 radix_tree_node_free(to_free); 1293 } 1294 } 1295 1296 /** 1297 * __radix_tree_delete_node - try to free node after clearing a slot 1298 * @root: radix tree root 1299 * @index: index key 1300 * @node: node containing @index 1301 * 1302 * After clearing the slot at @index in @node from radix tree 1303 * rooted at @root, call this function to attempt freeing the 1304 * node and shrinking the tree. 1305 * 1306 * Returns %true if @node was freed, %false otherwise. 1307 */ 1308 bool __radix_tree_delete_node(struct radix_tree_root *root, 1309 struct radix_tree_node *node) 1310 { 1311 bool deleted = false; 1312 1313 do { 1314 struct radix_tree_node *parent; 1315 1316 if (node->count) { 1317 if (node == indirect_to_ptr(root->rnode)) { 1318 radix_tree_shrink(root); 1319 if (root->height == 0) 1320 deleted = true; 1321 } 1322 return deleted; 1323 } 1324 1325 parent = node->parent; 1326 if (parent) { 1327 unsigned int offset; 1328 1329 offset = node->path >> RADIX_TREE_HEIGHT_SHIFT; 1330 parent->slots[offset] = NULL; 1331 parent->count--; 1332 } else { 1333 root_tag_clear_all(root); 1334 root->height = 0; 1335 root->rnode = NULL; 1336 } 1337 1338 radix_tree_node_free(node); 1339 deleted = true; 1340 1341 node = parent; 1342 } while (node); 1343 1344 return deleted; 1345 } 1346 1347 /** 1348 * radix_tree_delete_item - delete an item from a radix tree 1349 * @root: radix tree root 1350 * @index: index key 1351 * @item: expected item 1352 * 1353 * Remove @item at @index from the radix tree rooted at @root. 1354 * 1355 * Returns the address of the deleted item, or NULL if it was not present 1356 * or the entry at the given @index was not @item. 1357 */ 1358 void *radix_tree_delete_item(struct radix_tree_root *root, 1359 unsigned long index, void *item) 1360 { 1361 struct radix_tree_node *node; 1362 unsigned int offset; 1363 void **slot; 1364 void *entry; 1365 int tag; 1366 1367 entry = __radix_tree_lookup(root, index, &node, &slot); 1368 if (!entry) 1369 return NULL; 1370 1371 if (item && entry != item) 1372 return NULL; 1373 1374 if (!node) { 1375 root_tag_clear_all(root); 1376 root->rnode = NULL; 1377 return entry; 1378 } 1379 1380 offset = index & RADIX_TREE_MAP_MASK; 1381 1382 /* 1383 * Clear all tags associated with the item to be deleted. 1384 * This way of doing it would be inefficient, but seldom is any set. 1385 */ 1386 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) { 1387 if (tag_get(node, tag, offset)) 1388 radix_tree_tag_clear(root, index, tag); 1389 } 1390 1391 node->slots[offset] = NULL; 1392 node->count--; 1393 1394 __radix_tree_delete_node(root, node); 1395 1396 return entry; 1397 } 1398 EXPORT_SYMBOL(radix_tree_delete_item); 1399 1400 /** 1401 * radix_tree_delete - delete an item from a radix tree 1402 * @root: radix tree root 1403 * @index: index key 1404 * 1405 * Remove the item at @index from the radix tree rooted at @root. 1406 * 1407 * Returns the address of the deleted item, or NULL if it was not present. 1408 */ 1409 void *radix_tree_delete(struct radix_tree_root *root, unsigned long index) 1410 { 1411 return radix_tree_delete_item(root, index, NULL); 1412 } 1413 EXPORT_SYMBOL(radix_tree_delete); 1414 1415 /** 1416 * radix_tree_tagged - test whether any items in the tree are tagged 1417 * @root: radix tree root 1418 * @tag: tag to test 1419 */ 1420 int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag) 1421 { 1422 return root_tag_get(root, tag); 1423 } 1424 EXPORT_SYMBOL(radix_tree_tagged); 1425 1426 static void 1427 radix_tree_node_ctor(void *arg) 1428 { 1429 struct radix_tree_node *node = arg; 1430 1431 memset(node, 0, sizeof(*node)); 1432 INIT_LIST_HEAD(&node->private_list); 1433 } 1434 1435 static __init unsigned long __maxindex(unsigned int height) 1436 { 1437 unsigned int width = height * RADIX_TREE_MAP_SHIFT; 1438 int shift = RADIX_TREE_INDEX_BITS - width; 1439 1440 if (shift < 0) 1441 return ~0UL; 1442 if (shift >= BITS_PER_LONG) 1443 return 0UL; 1444 return ~0UL >> shift; 1445 } 1446 1447 static __init void radix_tree_init_maxindex(void) 1448 { 1449 unsigned int i; 1450 1451 for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++) 1452 height_to_maxindex[i] = __maxindex(i); 1453 } 1454 1455 static int radix_tree_callback(struct notifier_block *nfb, 1456 unsigned long action, 1457 void *hcpu) 1458 { 1459 int cpu = (long)hcpu; 1460 struct radix_tree_preload *rtp; 1461 1462 /* Free per-cpu pool of perloaded nodes */ 1463 if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) { 1464 rtp = &per_cpu(radix_tree_preloads, cpu); 1465 while (rtp->nr) { 1466 kmem_cache_free(radix_tree_node_cachep, 1467 rtp->nodes[rtp->nr-1]); 1468 rtp->nodes[rtp->nr-1] = NULL; 1469 rtp->nr--; 1470 } 1471 } 1472 return NOTIFY_OK; 1473 } 1474 1475 void __init radix_tree_init(void) 1476 { 1477 radix_tree_node_cachep = kmem_cache_create("radix_tree_node", 1478 sizeof(struct radix_tree_node), 0, 1479 SLAB_PANIC | SLAB_RECLAIM_ACCOUNT, 1480 radix_tree_node_ctor); 1481 radix_tree_init_maxindex(); 1482 hotcpu_notifier(radix_tree_callback, 0); 1483 } 1484