xref: /openbmc/linux/lib/radix-tree.c (revision 77a87824)
1 /*
2  * Copyright (C) 2001 Momchil Velikov
3  * Portions Copyright (C) 2001 Christoph Hellwig
4  * Copyright (C) 2005 SGI, Christoph Lameter
5  * Copyright (C) 2006 Nick Piggin
6  * Copyright (C) 2012 Konstantin Khlebnikov
7  * Copyright (C) 2016 Intel, Matthew Wilcox
8  * Copyright (C) 2016 Intel, Ross Zwisler
9  *
10  * This program is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU General Public License as
12  * published by the Free Software Foundation; either version 2, or (at
13  * your option) any later version.
14  *
15  * This program is distributed in the hope that it will be useful, but
16  * WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * General Public License for more details.
19  *
20  * You should have received a copy of the GNU General Public License
21  * along with this program; if not, write to the Free Software
22  * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
23  */
24 
25 #include <linux/errno.h>
26 #include <linux/init.h>
27 #include <linux/kernel.h>
28 #include <linux/export.h>
29 #include <linux/radix-tree.h>
30 #include <linux/percpu.h>
31 #include <linux/slab.h>
32 #include <linux/kmemleak.h>
33 #include <linux/notifier.h>
34 #include <linux/cpu.h>
35 #include <linux/string.h>
36 #include <linux/bitops.h>
37 #include <linux/rcupdate.h>
38 #include <linux/preempt.h>		/* in_interrupt() */
39 
40 
41 /* Number of nodes in fully populated tree of given height */
42 static unsigned long height_to_maxnodes[RADIX_TREE_MAX_PATH + 1] __read_mostly;
43 
44 /*
45  * Radix tree node cache.
46  */
47 static struct kmem_cache *radix_tree_node_cachep;
48 
49 /*
50  * The radix tree is variable-height, so an insert operation not only has
51  * to build the branch to its corresponding item, it also has to build the
52  * branch to existing items if the size has to be increased (by
53  * radix_tree_extend).
54  *
55  * The worst case is a zero height tree with just a single item at index 0,
56  * and then inserting an item at index ULONG_MAX. This requires 2 new branches
57  * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
58  * Hence:
59  */
60 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
61 
62 /*
63  * Per-cpu pool of preloaded nodes
64  */
65 struct radix_tree_preload {
66 	unsigned nr;
67 	/* nodes->private_data points to next preallocated node */
68 	struct radix_tree_node *nodes;
69 };
70 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
71 
72 static inline void *node_to_entry(void *ptr)
73 {
74 	return (void *)((unsigned long)ptr | RADIX_TREE_INTERNAL_NODE);
75 }
76 
77 #define RADIX_TREE_RETRY	node_to_entry(NULL)
78 
79 #ifdef CONFIG_RADIX_TREE_MULTIORDER
80 /* Sibling slots point directly to another slot in the same node */
81 static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
82 {
83 	void **ptr = node;
84 	return (parent->slots <= ptr) &&
85 			(ptr < parent->slots + RADIX_TREE_MAP_SIZE);
86 }
87 #else
88 static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
89 {
90 	return false;
91 }
92 #endif
93 
94 static inline unsigned long get_slot_offset(struct radix_tree_node *parent,
95 						 void **slot)
96 {
97 	return slot - parent->slots;
98 }
99 
100 static unsigned int radix_tree_descend(struct radix_tree_node *parent,
101 			struct radix_tree_node **nodep, unsigned long index)
102 {
103 	unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK;
104 	void **entry = rcu_dereference_raw(parent->slots[offset]);
105 
106 #ifdef CONFIG_RADIX_TREE_MULTIORDER
107 	if (radix_tree_is_internal_node(entry)) {
108 		unsigned long siboff = get_slot_offset(parent, entry);
109 		if (siboff < RADIX_TREE_MAP_SIZE) {
110 			offset = siboff;
111 			entry = rcu_dereference_raw(parent->slots[offset]);
112 		}
113 	}
114 #endif
115 
116 	*nodep = (void *)entry;
117 	return offset;
118 }
119 
120 static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
121 {
122 	return root->gfp_mask & __GFP_BITS_MASK;
123 }
124 
125 static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
126 		int offset)
127 {
128 	__set_bit(offset, node->tags[tag]);
129 }
130 
131 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
132 		int offset)
133 {
134 	__clear_bit(offset, node->tags[tag]);
135 }
136 
137 static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
138 		int offset)
139 {
140 	return test_bit(offset, node->tags[tag]);
141 }
142 
143 static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
144 {
145 	root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
146 }
147 
148 static inline void root_tag_clear(struct radix_tree_root *root, unsigned tag)
149 {
150 	root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
151 }
152 
153 static inline void root_tag_clear_all(struct radix_tree_root *root)
154 {
155 	root->gfp_mask &= __GFP_BITS_MASK;
156 }
157 
158 static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
159 {
160 	return (__force int)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
161 }
162 
163 static inline unsigned root_tags_get(struct radix_tree_root *root)
164 {
165 	return (__force unsigned)root->gfp_mask >> __GFP_BITS_SHIFT;
166 }
167 
168 /*
169  * Returns 1 if any slot in the node has this tag set.
170  * Otherwise returns 0.
171  */
172 static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
173 {
174 	unsigned idx;
175 	for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
176 		if (node->tags[tag][idx])
177 			return 1;
178 	}
179 	return 0;
180 }
181 
182 /**
183  * radix_tree_find_next_bit - find the next set bit in a memory region
184  *
185  * @addr: The address to base the search on
186  * @size: The bitmap size in bits
187  * @offset: The bitnumber to start searching at
188  *
189  * Unrollable variant of find_next_bit() for constant size arrays.
190  * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
191  * Returns next bit offset, or size if nothing found.
192  */
193 static __always_inline unsigned long
194 radix_tree_find_next_bit(const unsigned long *addr,
195 			 unsigned long size, unsigned long offset)
196 {
197 	if (!__builtin_constant_p(size))
198 		return find_next_bit(addr, size, offset);
199 
200 	if (offset < size) {
201 		unsigned long tmp;
202 
203 		addr += offset / BITS_PER_LONG;
204 		tmp = *addr >> (offset % BITS_PER_LONG);
205 		if (tmp)
206 			return __ffs(tmp) + offset;
207 		offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
208 		while (offset < size) {
209 			tmp = *++addr;
210 			if (tmp)
211 				return __ffs(tmp) + offset;
212 			offset += BITS_PER_LONG;
213 		}
214 	}
215 	return size;
216 }
217 
218 #ifndef __KERNEL__
219 static void dump_node(struct radix_tree_node *node, unsigned long index)
220 {
221 	unsigned long i;
222 
223 	pr_debug("radix node: %p offset %d tags %lx %lx %lx shift %d count %d parent %p\n",
224 		node, node->offset,
225 		node->tags[0][0], node->tags[1][0], node->tags[2][0],
226 		node->shift, node->count, node->parent);
227 
228 	for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
229 		unsigned long first = index | (i << node->shift);
230 		unsigned long last = first | ((1UL << node->shift) - 1);
231 		void *entry = node->slots[i];
232 		if (!entry)
233 			continue;
234 		if (is_sibling_entry(node, entry)) {
235 			pr_debug("radix sblng %p offset %ld val %p indices %ld-%ld\n",
236 					entry, i,
237 					*(void **)entry_to_node(entry),
238 					first, last);
239 		} else if (!radix_tree_is_internal_node(entry)) {
240 			pr_debug("radix entry %p offset %ld indices %ld-%ld\n",
241 					entry, i, first, last);
242 		} else {
243 			dump_node(entry_to_node(entry), first);
244 		}
245 	}
246 }
247 
248 /* For debug */
249 static void radix_tree_dump(struct radix_tree_root *root)
250 {
251 	pr_debug("radix root: %p rnode %p tags %x\n",
252 			root, root->rnode,
253 			root->gfp_mask >> __GFP_BITS_SHIFT);
254 	if (!radix_tree_is_internal_node(root->rnode))
255 		return;
256 	dump_node(entry_to_node(root->rnode), 0);
257 }
258 #endif
259 
260 /*
261  * This assumes that the caller has performed appropriate preallocation, and
262  * that the caller has pinned this thread of control to the current CPU.
263  */
264 static struct radix_tree_node *
265 radix_tree_node_alloc(struct radix_tree_root *root)
266 {
267 	struct radix_tree_node *ret = NULL;
268 	gfp_t gfp_mask = root_gfp_mask(root);
269 
270 	/*
271 	 * Preload code isn't irq safe and it doesn't make sense to use
272 	 * preloading during an interrupt anyway as all the allocations have
273 	 * to be atomic. So just do normal allocation when in interrupt.
274 	 */
275 	if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) {
276 		struct radix_tree_preload *rtp;
277 
278 		/*
279 		 * Even if the caller has preloaded, try to allocate from the
280 		 * cache first for the new node to get accounted.
281 		 */
282 		ret = kmem_cache_alloc(radix_tree_node_cachep,
283 				       gfp_mask | __GFP_ACCOUNT | __GFP_NOWARN);
284 		if (ret)
285 			goto out;
286 
287 		/*
288 		 * Provided the caller has preloaded here, we will always
289 		 * succeed in getting a node here (and never reach
290 		 * kmem_cache_alloc)
291 		 */
292 		rtp = this_cpu_ptr(&radix_tree_preloads);
293 		if (rtp->nr) {
294 			ret = rtp->nodes;
295 			rtp->nodes = ret->private_data;
296 			ret->private_data = NULL;
297 			rtp->nr--;
298 		}
299 		/*
300 		 * Update the allocation stack trace as this is more useful
301 		 * for debugging.
302 		 */
303 		kmemleak_update_trace(ret);
304 		goto out;
305 	}
306 	ret = kmem_cache_alloc(radix_tree_node_cachep,
307 			       gfp_mask | __GFP_ACCOUNT);
308 out:
309 	BUG_ON(radix_tree_is_internal_node(ret));
310 	return ret;
311 }
312 
313 static void radix_tree_node_rcu_free(struct rcu_head *head)
314 {
315 	struct radix_tree_node *node =
316 			container_of(head, struct radix_tree_node, rcu_head);
317 	int i;
318 
319 	/*
320 	 * must only free zeroed nodes into the slab. radix_tree_shrink
321 	 * can leave us with a non-NULL entry in the first slot, so clear
322 	 * that here to make sure.
323 	 */
324 	for (i = 0; i < RADIX_TREE_MAX_TAGS; i++)
325 		tag_clear(node, i, 0);
326 
327 	node->slots[0] = NULL;
328 	node->count = 0;
329 
330 	kmem_cache_free(radix_tree_node_cachep, node);
331 }
332 
333 static inline void
334 radix_tree_node_free(struct radix_tree_node *node)
335 {
336 	call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
337 }
338 
339 /*
340  * Load up this CPU's radix_tree_node buffer with sufficient objects to
341  * ensure that the addition of a single element in the tree cannot fail.  On
342  * success, return zero, with preemption disabled.  On error, return -ENOMEM
343  * with preemption not disabled.
344  *
345  * To make use of this facility, the radix tree must be initialised without
346  * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
347  */
348 static int __radix_tree_preload(gfp_t gfp_mask, int nr)
349 {
350 	struct radix_tree_preload *rtp;
351 	struct radix_tree_node *node;
352 	int ret = -ENOMEM;
353 
354 	preempt_disable();
355 	rtp = this_cpu_ptr(&radix_tree_preloads);
356 	while (rtp->nr < nr) {
357 		preempt_enable();
358 		node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
359 		if (node == NULL)
360 			goto out;
361 		preempt_disable();
362 		rtp = this_cpu_ptr(&radix_tree_preloads);
363 		if (rtp->nr < nr) {
364 			node->private_data = rtp->nodes;
365 			rtp->nodes = node;
366 			rtp->nr++;
367 		} else {
368 			kmem_cache_free(radix_tree_node_cachep, node);
369 		}
370 	}
371 	ret = 0;
372 out:
373 	return ret;
374 }
375 
376 /*
377  * Load up this CPU's radix_tree_node buffer with sufficient objects to
378  * ensure that the addition of a single element in the tree cannot fail.  On
379  * success, return zero, with preemption disabled.  On error, return -ENOMEM
380  * with preemption not disabled.
381  *
382  * To make use of this facility, the radix tree must be initialised without
383  * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
384  */
385 int radix_tree_preload(gfp_t gfp_mask)
386 {
387 	/* Warn on non-sensical use... */
388 	WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
389 	return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
390 }
391 EXPORT_SYMBOL(radix_tree_preload);
392 
393 /*
394  * The same as above function, except we don't guarantee preloading happens.
395  * We do it, if we decide it helps. On success, return zero with preemption
396  * disabled. On error, return -ENOMEM with preemption not disabled.
397  */
398 int radix_tree_maybe_preload(gfp_t gfp_mask)
399 {
400 	if (gfpflags_allow_blocking(gfp_mask))
401 		return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
402 	/* Preloading doesn't help anything with this gfp mask, skip it */
403 	preempt_disable();
404 	return 0;
405 }
406 EXPORT_SYMBOL(radix_tree_maybe_preload);
407 
408 /*
409  * The same as function above, but preload number of nodes required to insert
410  * (1 << order) continuous naturally-aligned elements.
411  */
412 int radix_tree_maybe_preload_order(gfp_t gfp_mask, int order)
413 {
414 	unsigned long nr_subtrees;
415 	int nr_nodes, subtree_height;
416 
417 	/* Preloading doesn't help anything with this gfp mask, skip it */
418 	if (!gfpflags_allow_blocking(gfp_mask)) {
419 		preempt_disable();
420 		return 0;
421 	}
422 
423 	/*
424 	 * Calculate number and height of fully populated subtrees it takes to
425 	 * store (1 << order) elements.
426 	 */
427 	nr_subtrees = 1 << order;
428 	for (subtree_height = 0; nr_subtrees > RADIX_TREE_MAP_SIZE;
429 			subtree_height++)
430 		nr_subtrees >>= RADIX_TREE_MAP_SHIFT;
431 
432 	/*
433 	 * The worst case is zero height tree with a single item at index 0 and
434 	 * then inserting items starting at ULONG_MAX - (1 << order).
435 	 *
436 	 * This requires RADIX_TREE_MAX_PATH nodes to build branch from root to
437 	 * 0-index item.
438 	 */
439 	nr_nodes = RADIX_TREE_MAX_PATH;
440 
441 	/* Plus branch to fully populated subtrees. */
442 	nr_nodes += RADIX_TREE_MAX_PATH - subtree_height;
443 
444 	/* Root node is shared. */
445 	nr_nodes--;
446 
447 	/* Plus nodes required to build subtrees. */
448 	nr_nodes += nr_subtrees * height_to_maxnodes[subtree_height];
449 
450 	return __radix_tree_preload(gfp_mask, nr_nodes);
451 }
452 
453 /*
454  * The maximum index which can be stored in a radix tree
455  */
456 static inline unsigned long shift_maxindex(unsigned int shift)
457 {
458 	return (RADIX_TREE_MAP_SIZE << shift) - 1;
459 }
460 
461 static inline unsigned long node_maxindex(struct radix_tree_node *node)
462 {
463 	return shift_maxindex(node->shift);
464 }
465 
466 static unsigned radix_tree_load_root(struct radix_tree_root *root,
467 		struct radix_tree_node **nodep, unsigned long *maxindex)
468 {
469 	struct radix_tree_node *node = rcu_dereference_raw(root->rnode);
470 
471 	*nodep = node;
472 
473 	if (likely(radix_tree_is_internal_node(node))) {
474 		node = entry_to_node(node);
475 		*maxindex = node_maxindex(node);
476 		return node->shift + RADIX_TREE_MAP_SHIFT;
477 	}
478 
479 	*maxindex = 0;
480 	return 0;
481 }
482 
483 /*
484  *	Extend a radix tree so it can store key @index.
485  */
486 static int radix_tree_extend(struct radix_tree_root *root,
487 				unsigned long index, unsigned int shift)
488 {
489 	struct radix_tree_node *slot;
490 	unsigned int maxshift;
491 	int tag;
492 
493 	/* Figure out what the shift should be.  */
494 	maxshift = shift;
495 	while (index > shift_maxindex(maxshift))
496 		maxshift += RADIX_TREE_MAP_SHIFT;
497 
498 	slot = root->rnode;
499 	if (!slot)
500 		goto out;
501 
502 	do {
503 		struct radix_tree_node *node = radix_tree_node_alloc(root);
504 
505 		if (!node)
506 			return -ENOMEM;
507 
508 		/* Propagate the aggregated tag info into the new root */
509 		for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
510 			if (root_tag_get(root, tag))
511 				tag_set(node, tag, 0);
512 		}
513 
514 		BUG_ON(shift > BITS_PER_LONG);
515 		node->shift = shift;
516 		node->offset = 0;
517 		node->count = 1;
518 		node->parent = NULL;
519 		if (radix_tree_is_internal_node(slot))
520 			entry_to_node(slot)->parent = node;
521 		node->slots[0] = slot;
522 		slot = node_to_entry(node);
523 		rcu_assign_pointer(root->rnode, slot);
524 		shift += RADIX_TREE_MAP_SHIFT;
525 	} while (shift <= maxshift);
526 out:
527 	return maxshift + RADIX_TREE_MAP_SHIFT;
528 }
529 
530 /**
531  *	__radix_tree_create	-	create a slot in a radix tree
532  *	@root:		radix tree root
533  *	@index:		index key
534  *	@order:		index occupies 2^order aligned slots
535  *	@nodep:		returns node
536  *	@slotp:		returns slot
537  *
538  *	Create, if necessary, and return the node and slot for an item
539  *	at position @index in the radix tree @root.
540  *
541  *	Until there is more than one item in the tree, no nodes are
542  *	allocated and @root->rnode is used as a direct slot instead of
543  *	pointing to a node, in which case *@nodep will be NULL.
544  *
545  *	Returns -ENOMEM, or 0 for success.
546  */
547 int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
548 			unsigned order, struct radix_tree_node **nodep,
549 			void ***slotp)
550 {
551 	struct radix_tree_node *node = NULL, *child;
552 	void **slot = (void **)&root->rnode;
553 	unsigned long maxindex;
554 	unsigned int shift, offset = 0;
555 	unsigned long max = index | ((1UL << order) - 1);
556 
557 	shift = radix_tree_load_root(root, &child, &maxindex);
558 
559 	/* Make sure the tree is high enough.  */
560 	if (max > maxindex) {
561 		int error = radix_tree_extend(root, max, shift);
562 		if (error < 0)
563 			return error;
564 		shift = error;
565 		child = root->rnode;
566 		if (order == shift)
567 			shift += RADIX_TREE_MAP_SHIFT;
568 	}
569 
570 	while (shift > order) {
571 		shift -= RADIX_TREE_MAP_SHIFT;
572 		if (child == NULL) {
573 			/* Have to add a child node.  */
574 			child = radix_tree_node_alloc(root);
575 			if (!child)
576 				return -ENOMEM;
577 			child->shift = shift;
578 			child->offset = offset;
579 			child->parent = node;
580 			rcu_assign_pointer(*slot, node_to_entry(child));
581 			if (node)
582 				node->count++;
583 		} else if (!radix_tree_is_internal_node(child))
584 			break;
585 
586 		/* Go a level down */
587 		node = entry_to_node(child);
588 		offset = radix_tree_descend(node, &child, index);
589 		slot = &node->slots[offset];
590 	}
591 
592 #ifdef CONFIG_RADIX_TREE_MULTIORDER
593 	/* Insert pointers to the canonical entry */
594 	if (order > shift) {
595 		unsigned i, n = 1 << (order - shift);
596 		offset = offset & ~(n - 1);
597 		slot = &node->slots[offset];
598 		child = node_to_entry(slot);
599 		for (i = 0; i < n; i++) {
600 			if (slot[i])
601 				return -EEXIST;
602 		}
603 
604 		for (i = 1; i < n; i++) {
605 			rcu_assign_pointer(slot[i], child);
606 			node->count++;
607 		}
608 	}
609 #endif
610 
611 	if (nodep)
612 		*nodep = node;
613 	if (slotp)
614 		*slotp = slot;
615 	return 0;
616 }
617 
618 /**
619  *	__radix_tree_insert    -    insert into a radix tree
620  *	@root:		radix tree root
621  *	@index:		index key
622  *	@order:		key covers the 2^order indices around index
623  *	@item:		item to insert
624  *
625  *	Insert an item into the radix tree at position @index.
626  */
627 int __radix_tree_insert(struct radix_tree_root *root, unsigned long index,
628 			unsigned order, void *item)
629 {
630 	struct radix_tree_node *node;
631 	void **slot;
632 	int error;
633 
634 	BUG_ON(radix_tree_is_internal_node(item));
635 
636 	error = __radix_tree_create(root, index, order, &node, &slot);
637 	if (error)
638 		return error;
639 	if (*slot != NULL)
640 		return -EEXIST;
641 	rcu_assign_pointer(*slot, item);
642 
643 	if (node) {
644 		unsigned offset = get_slot_offset(node, slot);
645 		node->count++;
646 		BUG_ON(tag_get(node, 0, offset));
647 		BUG_ON(tag_get(node, 1, offset));
648 		BUG_ON(tag_get(node, 2, offset));
649 	} else {
650 		BUG_ON(root_tags_get(root));
651 	}
652 
653 	return 0;
654 }
655 EXPORT_SYMBOL(__radix_tree_insert);
656 
657 /**
658  *	__radix_tree_lookup	-	lookup an item in a radix tree
659  *	@root:		radix tree root
660  *	@index:		index key
661  *	@nodep:		returns node
662  *	@slotp:		returns slot
663  *
664  *	Lookup and return the item at position @index in the radix
665  *	tree @root.
666  *
667  *	Until there is more than one item in the tree, no nodes are
668  *	allocated and @root->rnode is used as a direct slot instead of
669  *	pointing to a node, in which case *@nodep will be NULL.
670  */
671 void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index,
672 			  struct radix_tree_node **nodep, void ***slotp)
673 {
674 	struct radix_tree_node *node, *parent;
675 	unsigned long maxindex;
676 	void **slot;
677 
678  restart:
679 	parent = NULL;
680 	slot = (void **)&root->rnode;
681 	radix_tree_load_root(root, &node, &maxindex);
682 	if (index > maxindex)
683 		return NULL;
684 
685 	while (radix_tree_is_internal_node(node)) {
686 		unsigned offset;
687 
688 		if (node == RADIX_TREE_RETRY)
689 			goto restart;
690 		parent = entry_to_node(node);
691 		offset = radix_tree_descend(parent, &node, index);
692 		slot = parent->slots + offset;
693 	}
694 
695 	if (nodep)
696 		*nodep = parent;
697 	if (slotp)
698 		*slotp = slot;
699 	return node;
700 }
701 
702 /**
703  *	radix_tree_lookup_slot    -    lookup a slot in a radix tree
704  *	@root:		radix tree root
705  *	@index:		index key
706  *
707  *	Returns:  the slot corresponding to the position @index in the
708  *	radix tree @root. This is useful for update-if-exists operations.
709  *
710  *	This function can be called under rcu_read_lock iff the slot is not
711  *	modified by radix_tree_replace_slot, otherwise it must be called
712  *	exclusive from other writers. Any dereference of the slot must be done
713  *	using radix_tree_deref_slot.
714  */
715 void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
716 {
717 	void **slot;
718 
719 	if (!__radix_tree_lookup(root, index, NULL, &slot))
720 		return NULL;
721 	return slot;
722 }
723 EXPORT_SYMBOL(radix_tree_lookup_slot);
724 
725 /**
726  *	radix_tree_lookup    -    perform lookup operation on a radix tree
727  *	@root:		radix tree root
728  *	@index:		index key
729  *
730  *	Lookup the item at the position @index in the radix tree @root.
731  *
732  *	This function can be called under rcu_read_lock, however the caller
733  *	must manage lifetimes of leaf nodes (eg. RCU may also be used to free
734  *	them safely). No RCU barriers are required to access or modify the
735  *	returned item, however.
736  */
737 void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
738 {
739 	return __radix_tree_lookup(root, index, NULL, NULL);
740 }
741 EXPORT_SYMBOL(radix_tree_lookup);
742 
743 /**
744  *	radix_tree_tag_set - set a tag on a radix tree node
745  *	@root:		radix tree root
746  *	@index:		index key
747  *	@tag:		tag index
748  *
749  *	Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
750  *	corresponding to @index in the radix tree.  From
751  *	the root all the way down to the leaf node.
752  *
753  *	Returns the address of the tagged item.  Setting a tag on a not-present
754  *	item is a bug.
755  */
756 void *radix_tree_tag_set(struct radix_tree_root *root,
757 			unsigned long index, unsigned int tag)
758 {
759 	struct radix_tree_node *node, *parent;
760 	unsigned long maxindex;
761 
762 	radix_tree_load_root(root, &node, &maxindex);
763 	BUG_ON(index > maxindex);
764 
765 	while (radix_tree_is_internal_node(node)) {
766 		unsigned offset;
767 
768 		parent = entry_to_node(node);
769 		offset = radix_tree_descend(parent, &node, index);
770 		BUG_ON(!node);
771 
772 		if (!tag_get(parent, tag, offset))
773 			tag_set(parent, tag, offset);
774 	}
775 
776 	/* set the root's tag bit */
777 	if (!root_tag_get(root, tag))
778 		root_tag_set(root, tag);
779 
780 	return node;
781 }
782 EXPORT_SYMBOL(radix_tree_tag_set);
783 
784 static void node_tag_clear(struct radix_tree_root *root,
785 				struct radix_tree_node *node,
786 				unsigned int tag, unsigned int offset)
787 {
788 	while (node) {
789 		if (!tag_get(node, tag, offset))
790 			return;
791 		tag_clear(node, tag, offset);
792 		if (any_tag_set(node, tag))
793 			return;
794 
795 		offset = node->offset;
796 		node = node->parent;
797 	}
798 
799 	/* clear the root's tag bit */
800 	if (root_tag_get(root, tag))
801 		root_tag_clear(root, tag);
802 }
803 
804 /**
805  *	radix_tree_tag_clear - clear a tag on a radix tree node
806  *	@root:		radix tree root
807  *	@index:		index key
808  *	@tag:		tag index
809  *
810  *	Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
811  *	corresponding to @index in the radix tree.  If this causes
812  *	the leaf node to have no tags set then clear the tag in the
813  *	next-to-leaf node, etc.
814  *
815  *	Returns the address of the tagged item on success, else NULL.  ie:
816  *	has the same return value and semantics as radix_tree_lookup().
817  */
818 void *radix_tree_tag_clear(struct radix_tree_root *root,
819 			unsigned long index, unsigned int tag)
820 {
821 	struct radix_tree_node *node, *parent;
822 	unsigned long maxindex;
823 	int uninitialized_var(offset);
824 
825 	radix_tree_load_root(root, &node, &maxindex);
826 	if (index > maxindex)
827 		return NULL;
828 
829 	parent = NULL;
830 
831 	while (radix_tree_is_internal_node(node)) {
832 		parent = entry_to_node(node);
833 		offset = radix_tree_descend(parent, &node, index);
834 	}
835 
836 	if (node)
837 		node_tag_clear(root, parent, tag, offset);
838 
839 	return node;
840 }
841 EXPORT_SYMBOL(radix_tree_tag_clear);
842 
843 /**
844  * radix_tree_tag_get - get a tag on a radix tree node
845  * @root:		radix tree root
846  * @index:		index key
847  * @tag:		tag index (< RADIX_TREE_MAX_TAGS)
848  *
849  * Return values:
850  *
851  *  0: tag not present or not set
852  *  1: tag set
853  *
854  * Note that the return value of this function may not be relied on, even if
855  * the RCU lock is held, unless tag modification and node deletion are excluded
856  * from concurrency.
857  */
858 int radix_tree_tag_get(struct radix_tree_root *root,
859 			unsigned long index, unsigned int tag)
860 {
861 	struct radix_tree_node *node, *parent;
862 	unsigned long maxindex;
863 
864 	if (!root_tag_get(root, tag))
865 		return 0;
866 
867 	radix_tree_load_root(root, &node, &maxindex);
868 	if (index > maxindex)
869 		return 0;
870 	if (node == NULL)
871 		return 0;
872 
873 	while (radix_tree_is_internal_node(node)) {
874 		unsigned offset;
875 
876 		parent = entry_to_node(node);
877 		offset = radix_tree_descend(parent, &node, index);
878 
879 		if (!node)
880 			return 0;
881 		if (!tag_get(parent, tag, offset))
882 			return 0;
883 		if (node == RADIX_TREE_RETRY)
884 			break;
885 	}
886 
887 	return 1;
888 }
889 EXPORT_SYMBOL(radix_tree_tag_get);
890 
891 static inline void __set_iter_shift(struct radix_tree_iter *iter,
892 					unsigned int shift)
893 {
894 #ifdef CONFIG_RADIX_TREE_MULTIORDER
895 	iter->shift = shift;
896 #endif
897 }
898 
899 /**
900  * radix_tree_next_chunk - find next chunk of slots for iteration
901  *
902  * @root:	radix tree root
903  * @iter:	iterator state
904  * @flags:	RADIX_TREE_ITER_* flags and tag index
905  * Returns:	pointer to chunk first slot, or NULL if iteration is over
906  */
907 void **radix_tree_next_chunk(struct radix_tree_root *root,
908 			     struct radix_tree_iter *iter, unsigned flags)
909 {
910 	unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK;
911 	struct radix_tree_node *node, *child;
912 	unsigned long index, offset, maxindex;
913 
914 	if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
915 		return NULL;
916 
917 	/*
918 	 * Catch next_index overflow after ~0UL. iter->index never overflows
919 	 * during iterating; it can be zero only at the beginning.
920 	 * And we cannot overflow iter->next_index in a single step,
921 	 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
922 	 *
923 	 * This condition also used by radix_tree_next_slot() to stop
924 	 * contiguous iterating, and forbid swithing to the next chunk.
925 	 */
926 	index = iter->next_index;
927 	if (!index && iter->index)
928 		return NULL;
929 
930  restart:
931 	radix_tree_load_root(root, &child, &maxindex);
932 	if (index > maxindex)
933 		return NULL;
934 	if (!child)
935 		return NULL;
936 
937 	if (!radix_tree_is_internal_node(child)) {
938 		/* Single-slot tree */
939 		iter->index = index;
940 		iter->next_index = maxindex + 1;
941 		iter->tags = 1;
942 		__set_iter_shift(iter, 0);
943 		return (void **)&root->rnode;
944 	}
945 
946 	do {
947 		node = entry_to_node(child);
948 		offset = radix_tree_descend(node, &child, index);
949 
950 		if ((flags & RADIX_TREE_ITER_TAGGED) ?
951 				!tag_get(node, tag, offset) : !child) {
952 			/* Hole detected */
953 			if (flags & RADIX_TREE_ITER_CONTIG)
954 				return NULL;
955 
956 			if (flags & RADIX_TREE_ITER_TAGGED)
957 				offset = radix_tree_find_next_bit(
958 						node->tags[tag],
959 						RADIX_TREE_MAP_SIZE,
960 						offset + 1);
961 			else
962 				while (++offset	< RADIX_TREE_MAP_SIZE) {
963 					void *slot = node->slots[offset];
964 					if (is_sibling_entry(node, slot))
965 						continue;
966 					if (slot)
967 						break;
968 				}
969 			index &= ~node_maxindex(node);
970 			index += offset << node->shift;
971 			/* Overflow after ~0UL */
972 			if (!index)
973 				return NULL;
974 			if (offset == RADIX_TREE_MAP_SIZE)
975 				goto restart;
976 			child = rcu_dereference_raw(node->slots[offset]);
977 		}
978 
979 		if ((child == NULL) || (child == RADIX_TREE_RETRY))
980 			goto restart;
981 	} while (radix_tree_is_internal_node(child));
982 
983 	/* Update the iterator state */
984 	iter->index = (index &~ node_maxindex(node)) | (offset << node->shift);
985 	iter->next_index = (index | node_maxindex(node)) + 1;
986 	__set_iter_shift(iter, node->shift);
987 
988 	/* Construct iter->tags bit-mask from node->tags[tag] array */
989 	if (flags & RADIX_TREE_ITER_TAGGED) {
990 		unsigned tag_long, tag_bit;
991 
992 		tag_long = offset / BITS_PER_LONG;
993 		tag_bit  = offset % BITS_PER_LONG;
994 		iter->tags = node->tags[tag][tag_long] >> tag_bit;
995 		/* This never happens if RADIX_TREE_TAG_LONGS == 1 */
996 		if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
997 			/* Pick tags from next element */
998 			if (tag_bit)
999 				iter->tags |= node->tags[tag][tag_long + 1] <<
1000 						(BITS_PER_LONG - tag_bit);
1001 			/* Clip chunk size, here only BITS_PER_LONG tags */
1002 			iter->next_index = index + BITS_PER_LONG;
1003 		}
1004 	}
1005 
1006 	return node->slots + offset;
1007 }
1008 EXPORT_SYMBOL(radix_tree_next_chunk);
1009 
1010 /**
1011  * radix_tree_range_tag_if_tagged - for each item in given range set given
1012  *				   tag if item has another tag set
1013  * @root:		radix tree root
1014  * @first_indexp:	pointer to a starting index of a range to scan
1015  * @last_index:		last index of a range to scan
1016  * @nr_to_tag:		maximum number items to tag
1017  * @iftag:		tag index to test
1018  * @settag:		tag index to set if tested tag is set
1019  *
1020  * This function scans range of radix tree from first_index to last_index
1021  * (inclusive).  For each item in the range if iftag is set, the function sets
1022  * also settag. The function stops either after tagging nr_to_tag items or
1023  * after reaching last_index.
1024  *
1025  * The tags must be set from the leaf level only and propagated back up the
1026  * path to the root. We must do this so that we resolve the full path before
1027  * setting any tags on intermediate nodes. If we set tags as we descend, then
1028  * we can get to the leaf node and find that the index that has the iftag
1029  * set is outside the range we are scanning. This reults in dangling tags and
1030  * can lead to problems with later tag operations (e.g. livelocks on lookups).
1031  *
1032  * The function returns the number of leaves where the tag was set and sets
1033  * *first_indexp to the first unscanned index.
1034  * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
1035  * be prepared to handle that.
1036  */
1037 unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
1038 		unsigned long *first_indexp, unsigned long last_index,
1039 		unsigned long nr_to_tag,
1040 		unsigned int iftag, unsigned int settag)
1041 {
1042 	struct radix_tree_node *parent, *node, *child;
1043 	unsigned long maxindex;
1044 	unsigned long tagged = 0;
1045 	unsigned long index = *first_indexp;
1046 
1047 	radix_tree_load_root(root, &child, &maxindex);
1048 	last_index = min(last_index, maxindex);
1049 	if (index > last_index)
1050 		return 0;
1051 	if (!nr_to_tag)
1052 		return 0;
1053 	if (!root_tag_get(root, iftag)) {
1054 		*first_indexp = last_index + 1;
1055 		return 0;
1056 	}
1057 	if (!radix_tree_is_internal_node(child)) {
1058 		*first_indexp = last_index + 1;
1059 		root_tag_set(root, settag);
1060 		return 1;
1061 	}
1062 
1063 	node = entry_to_node(child);
1064 
1065 	for (;;) {
1066 		unsigned offset = radix_tree_descend(node, &child, index);
1067 		if (!child)
1068 			goto next;
1069 		if (!tag_get(node, iftag, offset))
1070 			goto next;
1071 		/* Sibling slots never have tags set on them */
1072 		if (radix_tree_is_internal_node(child)) {
1073 			node = entry_to_node(child);
1074 			continue;
1075 		}
1076 
1077 		/* tag the leaf */
1078 		tagged++;
1079 		tag_set(node, settag, offset);
1080 
1081 		/* walk back up the path tagging interior nodes */
1082 		parent = node;
1083 		for (;;) {
1084 			offset = parent->offset;
1085 			parent = parent->parent;
1086 			if (!parent)
1087 				break;
1088 			/* stop if we find a node with the tag already set */
1089 			if (tag_get(parent, settag, offset))
1090 				break;
1091 			tag_set(parent, settag, offset);
1092 		}
1093  next:
1094 		/* Go to next entry in node */
1095 		index = ((index >> node->shift) + 1) << node->shift;
1096 		/* Overflow can happen when last_index is ~0UL... */
1097 		if (index > last_index || !index)
1098 			break;
1099 		offset = (index >> node->shift) & RADIX_TREE_MAP_MASK;
1100 		while (offset == 0) {
1101 			/*
1102 			 * We've fully scanned this node. Go up. Because
1103 			 * last_index is guaranteed to be in the tree, what
1104 			 * we do below cannot wander astray.
1105 			 */
1106 			node = node->parent;
1107 			offset = (index >> node->shift) & RADIX_TREE_MAP_MASK;
1108 		}
1109 		if (is_sibling_entry(node, node->slots[offset]))
1110 			goto next;
1111 		if (tagged >= nr_to_tag)
1112 			break;
1113 	}
1114 	/*
1115 	 * We need not to tag the root tag if there is no tag which is set with
1116 	 * settag within the range from *first_indexp to last_index.
1117 	 */
1118 	if (tagged > 0)
1119 		root_tag_set(root, settag);
1120 	*first_indexp = index;
1121 
1122 	return tagged;
1123 }
1124 EXPORT_SYMBOL(radix_tree_range_tag_if_tagged);
1125 
1126 /**
1127  *	radix_tree_gang_lookup - perform multiple lookup on a radix tree
1128  *	@root:		radix tree root
1129  *	@results:	where the results of the lookup are placed
1130  *	@first_index:	start the lookup from this key
1131  *	@max_items:	place up to this many items at *results
1132  *
1133  *	Performs an index-ascending scan of the tree for present items.  Places
1134  *	them at *@results and returns the number of items which were placed at
1135  *	*@results.
1136  *
1137  *	The implementation is naive.
1138  *
1139  *	Like radix_tree_lookup, radix_tree_gang_lookup may be called under
1140  *	rcu_read_lock. In this case, rather than the returned results being
1141  *	an atomic snapshot of the tree at a single point in time, the
1142  *	semantics of an RCU protected gang lookup are as though multiple
1143  *	radix_tree_lookups have been issued in individual locks, and results
1144  *	stored in 'results'.
1145  */
1146 unsigned int
1147 radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
1148 			unsigned long first_index, unsigned int max_items)
1149 {
1150 	struct radix_tree_iter iter;
1151 	void **slot;
1152 	unsigned int ret = 0;
1153 
1154 	if (unlikely(!max_items))
1155 		return 0;
1156 
1157 	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1158 		results[ret] = rcu_dereference_raw(*slot);
1159 		if (!results[ret])
1160 			continue;
1161 		if (radix_tree_is_internal_node(results[ret])) {
1162 			slot = radix_tree_iter_retry(&iter);
1163 			continue;
1164 		}
1165 		if (++ret == max_items)
1166 			break;
1167 	}
1168 
1169 	return ret;
1170 }
1171 EXPORT_SYMBOL(radix_tree_gang_lookup);
1172 
1173 /**
1174  *	radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1175  *	@root:		radix tree root
1176  *	@results:	where the results of the lookup are placed
1177  *	@indices:	where their indices should be placed (but usually NULL)
1178  *	@first_index:	start the lookup from this key
1179  *	@max_items:	place up to this many items at *results
1180  *
1181  *	Performs an index-ascending scan of the tree for present items.  Places
1182  *	their slots at *@results and returns the number of items which were
1183  *	placed at *@results.
1184  *
1185  *	The implementation is naive.
1186  *
1187  *	Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1188  *	be dereferenced with radix_tree_deref_slot, and if using only RCU
1189  *	protection, radix_tree_deref_slot may fail requiring a retry.
1190  */
1191 unsigned int
1192 radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1193 			void ***results, unsigned long *indices,
1194 			unsigned long first_index, unsigned int max_items)
1195 {
1196 	struct radix_tree_iter iter;
1197 	void **slot;
1198 	unsigned int ret = 0;
1199 
1200 	if (unlikely(!max_items))
1201 		return 0;
1202 
1203 	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1204 		results[ret] = slot;
1205 		if (indices)
1206 			indices[ret] = iter.index;
1207 		if (++ret == max_items)
1208 			break;
1209 	}
1210 
1211 	return ret;
1212 }
1213 EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1214 
1215 /**
1216  *	radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1217  *	                             based on a tag
1218  *	@root:		radix tree root
1219  *	@results:	where the results of the lookup are placed
1220  *	@first_index:	start the lookup from this key
1221  *	@max_items:	place up to this many items at *results
1222  *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1223  *
1224  *	Performs an index-ascending scan of the tree for present items which
1225  *	have the tag indexed by @tag set.  Places the items at *@results and
1226  *	returns the number of items which were placed at *@results.
1227  */
1228 unsigned int
1229 radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
1230 		unsigned long first_index, unsigned int max_items,
1231 		unsigned int tag)
1232 {
1233 	struct radix_tree_iter iter;
1234 	void **slot;
1235 	unsigned int ret = 0;
1236 
1237 	if (unlikely(!max_items))
1238 		return 0;
1239 
1240 	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1241 		results[ret] = rcu_dereference_raw(*slot);
1242 		if (!results[ret])
1243 			continue;
1244 		if (radix_tree_is_internal_node(results[ret])) {
1245 			slot = radix_tree_iter_retry(&iter);
1246 			continue;
1247 		}
1248 		if (++ret == max_items)
1249 			break;
1250 	}
1251 
1252 	return ret;
1253 }
1254 EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1255 
1256 /**
1257  *	radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1258  *					  radix tree based on a tag
1259  *	@root:		radix tree root
1260  *	@results:	where the results of the lookup are placed
1261  *	@first_index:	start the lookup from this key
1262  *	@max_items:	place up to this many items at *results
1263  *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1264  *
1265  *	Performs an index-ascending scan of the tree for present items which
1266  *	have the tag indexed by @tag set.  Places the slots at *@results and
1267  *	returns the number of slots which were placed at *@results.
1268  */
1269 unsigned int
1270 radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1271 		unsigned long first_index, unsigned int max_items,
1272 		unsigned int tag)
1273 {
1274 	struct radix_tree_iter iter;
1275 	void **slot;
1276 	unsigned int ret = 0;
1277 
1278 	if (unlikely(!max_items))
1279 		return 0;
1280 
1281 	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1282 		results[ret] = slot;
1283 		if (++ret == max_items)
1284 			break;
1285 	}
1286 
1287 	return ret;
1288 }
1289 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1290 
1291 #if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1292 #include <linux/sched.h> /* for cond_resched() */
1293 
1294 struct locate_info {
1295 	unsigned long found_index;
1296 	bool stop;
1297 };
1298 
1299 /*
1300  * This linear search is at present only useful to shmem_unuse_inode().
1301  */
1302 static unsigned long __locate(struct radix_tree_node *slot, void *item,
1303 			      unsigned long index, struct locate_info *info)
1304 {
1305 	unsigned long i;
1306 
1307 	do {
1308 		unsigned int shift = slot->shift;
1309 
1310 		for (i = (index >> shift) & RADIX_TREE_MAP_MASK;
1311 		     i < RADIX_TREE_MAP_SIZE;
1312 		     i++, index += (1UL << shift)) {
1313 			struct radix_tree_node *node =
1314 					rcu_dereference_raw(slot->slots[i]);
1315 			if (node == RADIX_TREE_RETRY)
1316 				goto out;
1317 			if (!radix_tree_is_internal_node(node)) {
1318 				if (node == item) {
1319 					info->found_index = index;
1320 					info->stop = true;
1321 					goto out;
1322 				}
1323 				continue;
1324 			}
1325 			node = entry_to_node(node);
1326 			if (is_sibling_entry(slot, node))
1327 				continue;
1328 			slot = node;
1329 			break;
1330 		}
1331 	} while (i < RADIX_TREE_MAP_SIZE);
1332 
1333 out:
1334 	if ((index == 0) && (i == RADIX_TREE_MAP_SIZE))
1335 		info->stop = true;
1336 	return index;
1337 }
1338 
1339 /**
1340  *	radix_tree_locate_item - search through radix tree for item
1341  *	@root:		radix tree root
1342  *	@item:		item to be found
1343  *
1344  *	Returns index where item was found, or -1 if not found.
1345  *	Caller must hold no lock (since this time-consuming function needs
1346  *	to be preemptible), and must check afterwards if item is still there.
1347  */
1348 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1349 {
1350 	struct radix_tree_node *node;
1351 	unsigned long max_index;
1352 	unsigned long cur_index = 0;
1353 	struct locate_info info = {
1354 		.found_index = -1,
1355 		.stop = false,
1356 	};
1357 
1358 	do {
1359 		rcu_read_lock();
1360 		node = rcu_dereference_raw(root->rnode);
1361 		if (!radix_tree_is_internal_node(node)) {
1362 			rcu_read_unlock();
1363 			if (node == item)
1364 				info.found_index = 0;
1365 			break;
1366 		}
1367 
1368 		node = entry_to_node(node);
1369 
1370 		max_index = node_maxindex(node);
1371 		if (cur_index > max_index) {
1372 			rcu_read_unlock();
1373 			break;
1374 		}
1375 
1376 		cur_index = __locate(node, item, cur_index, &info);
1377 		rcu_read_unlock();
1378 		cond_resched();
1379 	} while (!info.stop && cur_index <= max_index);
1380 
1381 	return info.found_index;
1382 }
1383 #else
1384 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1385 {
1386 	return -1;
1387 }
1388 #endif /* CONFIG_SHMEM && CONFIG_SWAP */
1389 
1390 /**
1391  *	radix_tree_shrink    -    shrink radix tree to minimum height
1392  *	@root		radix tree root
1393  */
1394 static inline bool radix_tree_shrink(struct radix_tree_root *root)
1395 {
1396 	bool shrunk = false;
1397 
1398 	for (;;) {
1399 		struct radix_tree_node *node = root->rnode;
1400 		struct radix_tree_node *child;
1401 
1402 		if (!radix_tree_is_internal_node(node))
1403 			break;
1404 		node = entry_to_node(node);
1405 
1406 		/*
1407 		 * The candidate node has more than one child, or its child
1408 		 * is not at the leftmost slot, or the child is a multiorder
1409 		 * entry, we cannot shrink.
1410 		 */
1411 		if (node->count != 1)
1412 			break;
1413 		child = node->slots[0];
1414 		if (!child)
1415 			break;
1416 		if (!radix_tree_is_internal_node(child) && node->shift)
1417 			break;
1418 
1419 		if (radix_tree_is_internal_node(child))
1420 			entry_to_node(child)->parent = NULL;
1421 
1422 		/*
1423 		 * We don't need rcu_assign_pointer(), since we are simply
1424 		 * moving the node from one part of the tree to another: if it
1425 		 * was safe to dereference the old pointer to it
1426 		 * (node->slots[0]), it will be safe to dereference the new
1427 		 * one (root->rnode) as far as dependent read barriers go.
1428 		 */
1429 		root->rnode = child;
1430 
1431 		/*
1432 		 * We have a dilemma here. The node's slot[0] must not be
1433 		 * NULLed in case there are concurrent lookups expecting to
1434 		 * find the item. However if this was a bottom-level node,
1435 		 * then it may be subject to the slot pointer being visible
1436 		 * to callers dereferencing it. If item corresponding to
1437 		 * slot[0] is subsequently deleted, these callers would expect
1438 		 * their slot to become empty sooner or later.
1439 		 *
1440 		 * For example, lockless pagecache will look up a slot, deref
1441 		 * the page pointer, and if the page has 0 refcount it means it
1442 		 * was concurrently deleted from pagecache so try the deref
1443 		 * again. Fortunately there is already a requirement for logic
1444 		 * to retry the entire slot lookup -- the indirect pointer
1445 		 * problem (replacing direct root node with an indirect pointer
1446 		 * also results in a stale slot). So tag the slot as indirect
1447 		 * to force callers to retry.
1448 		 */
1449 		if (!radix_tree_is_internal_node(child))
1450 			node->slots[0] = RADIX_TREE_RETRY;
1451 
1452 		radix_tree_node_free(node);
1453 		shrunk = true;
1454 	}
1455 
1456 	return shrunk;
1457 }
1458 
1459 /**
1460  *	__radix_tree_delete_node    -    try to free node after clearing a slot
1461  *	@root:		radix tree root
1462  *	@node:		node containing @index
1463  *
1464  *	After clearing the slot at @index in @node from radix tree
1465  *	rooted at @root, call this function to attempt freeing the
1466  *	node and shrinking the tree.
1467  *
1468  *	Returns %true if @node was freed, %false otherwise.
1469  */
1470 bool __radix_tree_delete_node(struct radix_tree_root *root,
1471 			      struct radix_tree_node *node)
1472 {
1473 	bool deleted = false;
1474 
1475 	do {
1476 		struct radix_tree_node *parent;
1477 
1478 		if (node->count) {
1479 			if (node == entry_to_node(root->rnode))
1480 				deleted |= radix_tree_shrink(root);
1481 			return deleted;
1482 		}
1483 
1484 		parent = node->parent;
1485 		if (parent) {
1486 			parent->slots[node->offset] = NULL;
1487 			parent->count--;
1488 		} else {
1489 			root_tag_clear_all(root);
1490 			root->rnode = NULL;
1491 		}
1492 
1493 		radix_tree_node_free(node);
1494 		deleted = true;
1495 
1496 		node = parent;
1497 	} while (node);
1498 
1499 	return deleted;
1500 }
1501 
1502 static inline void delete_sibling_entries(struct radix_tree_node *node,
1503 					void *ptr, unsigned offset)
1504 {
1505 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1506 	int i;
1507 	for (i = 1; offset + i < RADIX_TREE_MAP_SIZE; i++) {
1508 		if (node->slots[offset + i] != ptr)
1509 			break;
1510 		node->slots[offset + i] = NULL;
1511 		node->count--;
1512 	}
1513 #endif
1514 }
1515 
1516 /**
1517  *	radix_tree_delete_item    -    delete an item from a radix tree
1518  *	@root:		radix tree root
1519  *	@index:		index key
1520  *	@item:		expected item
1521  *
1522  *	Remove @item at @index from the radix tree rooted at @root.
1523  *
1524  *	Returns the address of the deleted item, or NULL if it was not present
1525  *	or the entry at the given @index was not @item.
1526  */
1527 void *radix_tree_delete_item(struct radix_tree_root *root,
1528 			     unsigned long index, void *item)
1529 {
1530 	struct radix_tree_node *node;
1531 	unsigned int offset;
1532 	void **slot;
1533 	void *entry;
1534 	int tag;
1535 
1536 	entry = __radix_tree_lookup(root, index, &node, &slot);
1537 	if (!entry)
1538 		return NULL;
1539 
1540 	if (item && entry != item)
1541 		return NULL;
1542 
1543 	if (!node) {
1544 		root_tag_clear_all(root);
1545 		root->rnode = NULL;
1546 		return entry;
1547 	}
1548 
1549 	offset = get_slot_offset(node, slot);
1550 
1551 	/* Clear all tags associated with the item to be deleted.  */
1552 	for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1553 		node_tag_clear(root, node, tag, offset);
1554 
1555 	delete_sibling_entries(node, node_to_entry(slot), offset);
1556 	node->slots[offset] = NULL;
1557 	node->count--;
1558 
1559 	__radix_tree_delete_node(root, node);
1560 
1561 	return entry;
1562 }
1563 EXPORT_SYMBOL(radix_tree_delete_item);
1564 
1565 /**
1566  *	radix_tree_delete    -    delete an item from a radix tree
1567  *	@root:		radix tree root
1568  *	@index:		index key
1569  *
1570  *	Remove the item at @index from the radix tree rooted at @root.
1571  *
1572  *	Returns the address of the deleted item, or NULL if it was not present.
1573  */
1574 void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1575 {
1576 	return radix_tree_delete_item(root, index, NULL);
1577 }
1578 EXPORT_SYMBOL(radix_tree_delete);
1579 
1580 struct radix_tree_node *radix_tree_replace_clear_tags(
1581 			struct radix_tree_root *root,
1582 			unsigned long index, void *entry)
1583 {
1584 	struct radix_tree_node *node;
1585 	void **slot;
1586 
1587 	__radix_tree_lookup(root, index, &node, &slot);
1588 
1589 	if (node) {
1590 		unsigned int tag, offset = get_slot_offset(node, slot);
1591 		for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1592 			node_tag_clear(root, node, tag, offset);
1593 	} else {
1594 		/* Clear root node tags */
1595 		root->gfp_mask &= __GFP_BITS_MASK;
1596 	}
1597 
1598 	radix_tree_replace_slot(slot, entry);
1599 	return node;
1600 }
1601 
1602 /**
1603  *	radix_tree_tagged - test whether any items in the tree are tagged
1604  *	@root:		radix tree root
1605  *	@tag:		tag to test
1606  */
1607 int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1608 {
1609 	return root_tag_get(root, tag);
1610 }
1611 EXPORT_SYMBOL(radix_tree_tagged);
1612 
1613 static void
1614 radix_tree_node_ctor(void *arg)
1615 {
1616 	struct radix_tree_node *node = arg;
1617 
1618 	memset(node, 0, sizeof(*node));
1619 	INIT_LIST_HEAD(&node->private_list);
1620 }
1621 
1622 static __init unsigned long __maxindex(unsigned int height)
1623 {
1624 	unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1625 	int shift = RADIX_TREE_INDEX_BITS - width;
1626 
1627 	if (shift < 0)
1628 		return ~0UL;
1629 	if (shift >= BITS_PER_LONG)
1630 		return 0UL;
1631 	return ~0UL >> shift;
1632 }
1633 
1634 static __init void radix_tree_init_maxnodes(void)
1635 {
1636 	unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1];
1637 	unsigned int i, j;
1638 
1639 	for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1640 		height_to_maxindex[i] = __maxindex(i);
1641 	for (i = 0; i < ARRAY_SIZE(height_to_maxnodes); i++) {
1642 		for (j = i; j > 0; j--)
1643 			height_to_maxnodes[i] += height_to_maxindex[j - 1] + 1;
1644 	}
1645 }
1646 
1647 static int radix_tree_callback(struct notifier_block *nfb,
1648 				unsigned long action, void *hcpu)
1649 {
1650 	int cpu = (long)hcpu;
1651 	struct radix_tree_preload *rtp;
1652 	struct radix_tree_node *node;
1653 
1654 	/* Free per-cpu pool of preloaded nodes */
1655 	if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
1656 		rtp = &per_cpu(radix_tree_preloads, cpu);
1657 		while (rtp->nr) {
1658 			node = rtp->nodes;
1659 			rtp->nodes = node->private_data;
1660 			kmem_cache_free(radix_tree_node_cachep, node);
1661 			rtp->nr--;
1662 		}
1663 	}
1664 	return NOTIFY_OK;
1665 }
1666 
1667 void __init radix_tree_init(void)
1668 {
1669 	radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1670 			sizeof(struct radix_tree_node), 0,
1671 			SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1672 			radix_tree_node_ctor);
1673 	radix_tree_init_maxnodes();
1674 	hotcpu_notifier(radix_tree_callback, 0);
1675 }
1676