xref: /openbmc/linux/lib/radix-tree.c (revision e7065e20)
1 /*
2  * Copyright (C) 2001 Momchil Velikov
3  * Portions Copyright (C) 2001 Christoph Hellwig
4  * Copyright (C) 2005 SGI, Christoph Lameter
5  * Copyright (C) 2006 Nick Piggin
6  * Copyright (C) 2012 Konstantin Khlebnikov
7  *
8  * This program is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU General Public License as
10  * published by the Free Software Foundation; either version 2, or (at
11  * your option) any later version.
12  *
13  * This program is distributed in the hope that it will be useful, but
14  * WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16  * General Public License for more details.
17  *
18  * You should have received a copy of the GNU General Public License
19  * along with this program; if not, write to the Free Software
20  * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
21  */
22 
23 #include <linux/errno.h>
24 #include <linux/init.h>
25 #include <linux/kernel.h>
26 #include <linux/export.h>
27 #include <linux/radix-tree.h>
28 #include <linux/percpu.h>
29 #include <linux/slab.h>
30 #include <linux/notifier.h>
31 #include <linux/cpu.h>
32 #include <linux/string.h>
33 #include <linux/bitops.h>
34 #include <linux/rcupdate.h>
35 
36 
37 #ifdef __KERNEL__
38 #define RADIX_TREE_MAP_SHIFT	(CONFIG_BASE_SMALL ? 4 : 6)
39 #else
40 #define RADIX_TREE_MAP_SHIFT	3	/* For more stressful testing */
41 #endif
42 
43 #define RADIX_TREE_MAP_SIZE	(1UL << RADIX_TREE_MAP_SHIFT)
44 #define RADIX_TREE_MAP_MASK	(RADIX_TREE_MAP_SIZE-1)
45 
46 #define RADIX_TREE_TAG_LONGS	\
47 	((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG)
48 
49 struct radix_tree_node {
50 	unsigned int	height;		/* Height from the bottom */
51 	unsigned int	count;
52 	union {
53 		struct radix_tree_node *parent;	/* Used when ascending tree */
54 		struct rcu_head	rcu_head;	/* Used when freeing node */
55 	};
56 	void __rcu	*slots[RADIX_TREE_MAP_SIZE];
57 	unsigned long	tags[RADIX_TREE_MAX_TAGS][RADIX_TREE_TAG_LONGS];
58 };
59 
60 #define RADIX_TREE_INDEX_BITS  (8 /* CHAR_BIT */ * sizeof(unsigned long))
61 #define RADIX_TREE_MAX_PATH (DIV_ROUND_UP(RADIX_TREE_INDEX_BITS, \
62 					  RADIX_TREE_MAP_SHIFT))
63 
64 /*
65  * The height_to_maxindex array needs to be one deeper than the maximum
66  * path as height 0 holds only 1 entry.
67  */
68 static unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1] __read_mostly;
69 
70 /*
71  * Radix tree node cache.
72  */
73 static struct kmem_cache *radix_tree_node_cachep;
74 
75 /*
76  * Per-cpu pool of preloaded nodes
77  */
78 struct radix_tree_preload {
79 	int nr;
80 	struct radix_tree_node *nodes[RADIX_TREE_MAX_PATH];
81 };
82 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
83 
84 static inline void *ptr_to_indirect(void *ptr)
85 {
86 	return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR);
87 }
88 
89 static inline void *indirect_to_ptr(void *ptr)
90 {
91 	return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR);
92 }
93 
94 static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
95 {
96 	return root->gfp_mask & __GFP_BITS_MASK;
97 }
98 
99 static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
100 		int offset)
101 {
102 	__set_bit(offset, node->tags[tag]);
103 }
104 
105 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
106 		int offset)
107 {
108 	__clear_bit(offset, node->tags[tag]);
109 }
110 
111 static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
112 		int offset)
113 {
114 	return test_bit(offset, node->tags[tag]);
115 }
116 
117 static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
118 {
119 	root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
120 }
121 
122 static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag)
123 {
124 	root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
125 }
126 
127 static inline void root_tag_clear_all(struct radix_tree_root *root)
128 {
129 	root->gfp_mask &= __GFP_BITS_MASK;
130 }
131 
132 static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
133 {
134 	return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
135 }
136 
137 /*
138  * Returns 1 if any slot in the node has this tag set.
139  * Otherwise returns 0.
140  */
141 static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
142 {
143 	int idx;
144 	for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
145 		if (node->tags[tag][idx])
146 			return 1;
147 	}
148 	return 0;
149 }
150 
151 /**
152  * radix_tree_find_next_bit - find the next set bit in a memory region
153  *
154  * @addr: The address to base the search on
155  * @size: The bitmap size in bits
156  * @offset: The bitnumber to start searching at
157  *
158  * Unrollable variant of find_next_bit() for constant size arrays.
159  * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
160  * Returns next bit offset, or size if nothing found.
161  */
162 static __always_inline unsigned long
163 radix_tree_find_next_bit(const unsigned long *addr,
164 			 unsigned long size, unsigned long offset)
165 {
166 	if (!__builtin_constant_p(size))
167 		return find_next_bit(addr, size, offset);
168 
169 	if (offset < size) {
170 		unsigned long tmp;
171 
172 		addr += offset / BITS_PER_LONG;
173 		tmp = *addr >> (offset % BITS_PER_LONG);
174 		if (tmp)
175 			return __ffs(tmp) + offset;
176 		offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
177 		while (offset < size) {
178 			tmp = *++addr;
179 			if (tmp)
180 				return __ffs(tmp) + offset;
181 			offset += BITS_PER_LONG;
182 		}
183 	}
184 	return size;
185 }
186 
187 /*
188  * This assumes that the caller has performed appropriate preallocation, and
189  * that the caller has pinned this thread of control to the current CPU.
190  */
191 static struct radix_tree_node *
192 radix_tree_node_alloc(struct radix_tree_root *root)
193 {
194 	struct radix_tree_node *ret = NULL;
195 	gfp_t gfp_mask = root_gfp_mask(root);
196 
197 	if (!(gfp_mask & __GFP_WAIT)) {
198 		struct radix_tree_preload *rtp;
199 
200 		/*
201 		 * Provided the caller has preloaded here, we will always
202 		 * succeed in getting a node here (and never reach
203 		 * kmem_cache_alloc)
204 		 */
205 		rtp = &__get_cpu_var(radix_tree_preloads);
206 		if (rtp->nr) {
207 			ret = rtp->nodes[rtp->nr - 1];
208 			rtp->nodes[rtp->nr - 1] = NULL;
209 			rtp->nr--;
210 		}
211 	}
212 	if (ret == NULL)
213 		ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
214 
215 	BUG_ON(radix_tree_is_indirect_ptr(ret));
216 	return ret;
217 }
218 
219 static void radix_tree_node_rcu_free(struct rcu_head *head)
220 {
221 	struct radix_tree_node *node =
222 			container_of(head, struct radix_tree_node, rcu_head);
223 	int i;
224 
225 	/*
226 	 * must only free zeroed nodes into the slab. radix_tree_shrink
227 	 * can leave us with a non-NULL entry in the first slot, so clear
228 	 * that here to make sure.
229 	 */
230 	for (i = 0; i < RADIX_TREE_MAX_TAGS; i++)
231 		tag_clear(node, i, 0);
232 
233 	node->slots[0] = NULL;
234 	node->count = 0;
235 
236 	kmem_cache_free(radix_tree_node_cachep, node);
237 }
238 
239 static inline void
240 radix_tree_node_free(struct radix_tree_node *node)
241 {
242 	call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
243 }
244 
245 /*
246  * Load up this CPU's radix_tree_node buffer with sufficient objects to
247  * ensure that the addition of a single element in the tree cannot fail.  On
248  * success, return zero, with preemption disabled.  On error, return -ENOMEM
249  * with preemption not disabled.
250  *
251  * To make use of this facility, the radix tree must be initialised without
252  * __GFP_WAIT being passed to INIT_RADIX_TREE().
253  */
254 int radix_tree_preload(gfp_t gfp_mask)
255 {
256 	struct radix_tree_preload *rtp;
257 	struct radix_tree_node *node;
258 	int ret = -ENOMEM;
259 
260 	preempt_disable();
261 	rtp = &__get_cpu_var(radix_tree_preloads);
262 	while (rtp->nr < ARRAY_SIZE(rtp->nodes)) {
263 		preempt_enable();
264 		node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
265 		if (node == NULL)
266 			goto out;
267 		preempt_disable();
268 		rtp = &__get_cpu_var(radix_tree_preloads);
269 		if (rtp->nr < ARRAY_SIZE(rtp->nodes))
270 			rtp->nodes[rtp->nr++] = node;
271 		else
272 			kmem_cache_free(radix_tree_node_cachep, node);
273 	}
274 	ret = 0;
275 out:
276 	return ret;
277 }
278 EXPORT_SYMBOL(radix_tree_preload);
279 
280 /*
281  *	Return the maximum key which can be store into a
282  *	radix tree with height HEIGHT.
283  */
284 static inline unsigned long radix_tree_maxindex(unsigned int height)
285 {
286 	return height_to_maxindex[height];
287 }
288 
289 /*
290  *	Extend a radix tree so it can store key @index.
291  */
292 static int radix_tree_extend(struct radix_tree_root *root, unsigned long index)
293 {
294 	struct radix_tree_node *node;
295 	struct radix_tree_node *slot;
296 	unsigned int height;
297 	int tag;
298 
299 	/* Figure out what the height should be.  */
300 	height = root->height + 1;
301 	while (index > radix_tree_maxindex(height))
302 		height++;
303 
304 	if (root->rnode == NULL) {
305 		root->height = height;
306 		goto out;
307 	}
308 
309 	do {
310 		unsigned int newheight;
311 		if (!(node = radix_tree_node_alloc(root)))
312 			return -ENOMEM;
313 
314 		/* Propagate the aggregated tag info into the new root */
315 		for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
316 			if (root_tag_get(root, tag))
317 				tag_set(node, tag, 0);
318 		}
319 
320 		/* Increase the height.  */
321 		newheight = root->height+1;
322 		node->height = newheight;
323 		node->count = 1;
324 		node->parent = NULL;
325 		slot = root->rnode;
326 		if (newheight > 1) {
327 			slot = indirect_to_ptr(slot);
328 			slot->parent = node;
329 		}
330 		node->slots[0] = slot;
331 		node = ptr_to_indirect(node);
332 		rcu_assign_pointer(root->rnode, node);
333 		root->height = newheight;
334 	} while (height > root->height);
335 out:
336 	return 0;
337 }
338 
339 /**
340  *	radix_tree_insert    -    insert into a radix tree
341  *	@root:		radix tree root
342  *	@index:		index key
343  *	@item:		item to insert
344  *
345  *	Insert an item into the radix tree at position @index.
346  */
347 int radix_tree_insert(struct radix_tree_root *root,
348 			unsigned long index, void *item)
349 {
350 	struct radix_tree_node *node = NULL, *slot;
351 	unsigned int height, shift;
352 	int offset;
353 	int error;
354 
355 	BUG_ON(radix_tree_is_indirect_ptr(item));
356 
357 	/* Make sure the tree is high enough.  */
358 	if (index > radix_tree_maxindex(root->height)) {
359 		error = radix_tree_extend(root, index);
360 		if (error)
361 			return error;
362 	}
363 
364 	slot = indirect_to_ptr(root->rnode);
365 
366 	height = root->height;
367 	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
368 
369 	offset = 0;			/* uninitialised var warning */
370 	while (height > 0) {
371 		if (slot == NULL) {
372 			/* Have to add a child node.  */
373 			if (!(slot = radix_tree_node_alloc(root)))
374 				return -ENOMEM;
375 			slot->height = height;
376 			slot->parent = node;
377 			if (node) {
378 				rcu_assign_pointer(node->slots[offset], slot);
379 				node->count++;
380 			} else
381 				rcu_assign_pointer(root->rnode, ptr_to_indirect(slot));
382 		}
383 
384 		/* Go a level down */
385 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
386 		node = slot;
387 		slot = node->slots[offset];
388 		shift -= RADIX_TREE_MAP_SHIFT;
389 		height--;
390 	}
391 
392 	if (slot != NULL)
393 		return -EEXIST;
394 
395 	if (node) {
396 		node->count++;
397 		rcu_assign_pointer(node->slots[offset], item);
398 		BUG_ON(tag_get(node, 0, offset));
399 		BUG_ON(tag_get(node, 1, offset));
400 	} else {
401 		rcu_assign_pointer(root->rnode, item);
402 		BUG_ON(root_tag_get(root, 0));
403 		BUG_ON(root_tag_get(root, 1));
404 	}
405 
406 	return 0;
407 }
408 EXPORT_SYMBOL(radix_tree_insert);
409 
410 /*
411  * is_slot == 1 : search for the slot.
412  * is_slot == 0 : search for the node.
413  */
414 static void *radix_tree_lookup_element(struct radix_tree_root *root,
415 				unsigned long index, int is_slot)
416 {
417 	unsigned int height, shift;
418 	struct radix_tree_node *node, **slot;
419 
420 	node = rcu_dereference_raw(root->rnode);
421 	if (node == NULL)
422 		return NULL;
423 
424 	if (!radix_tree_is_indirect_ptr(node)) {
425 		if (index > 0)
426 			return NULL;
427 		return is_slot ? (void *)&root->rnode : node;
428 	}
429 	node = indirect_to_ptr(node);
430 
431 	height = node->height;
432 	if (index > radix_tree_maxindex(height))
433 		return NULL;
434 
435 	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
436 
437 	do {
438 		slot = (struct radix_tree_node **)
439 			(node->slots + ((index>>shift) & RADIX_TREE_MAP_MASK));
440 		node = rcu_dereference_raw(*slot);
441 		if (node == NULL)
442 			return NULL;
443 
444 		shift -= RADIX_TREE_MAP_SHIFT;
445 		height--;
446 	} while (height > 0);
447 
448 	return is_slot ? (void *)slot : indirect_to_ptr(node);
449 }
450 
451 /**
452  *	radix_tree_lookup_slot    -    lookup a slot in a radix tree
453  *	@root:		radix tree root
454  *	@index:		index key
455  *
456  *	Returns:  the slot corresponding to the position @index in the
457  *	radix tree @root. This is useful for update-if-exists operations.
458  *
459  *	This function can be called under rcu_read_lock iff the slot is not
460  *	modified by radix_tree_replace_slot, otherwise it must be called
461  *	exclusive from other writers. Any dereference of the slot must be done
462  *	using radix_tree_deref_slot.
463  */
464 void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
465 {
466 	return (void **)radix_tree_lookup_element(root, index, 1);
467 }
468 EXPORT_SYMBOL(radix_tree_lookup_slot);
469 
470 /**
471  *	radix_tree_lookup    -    perform lookup operation on a radix tree
472  *	@root:		radix tree root
473  *	@index:		index key
474  *
475  *	Lookup the item at the position @index in the radix tree @root.
476  *
477  *	This function can be called under rcu_read_lock, however the caller
478  *	must manage lifetimes of leaf nodes (eg. RCU may also be used to free
479  *	them safely). No RCU barriers are required to access or modify the
480  *	returned item, however.
481  */
482 void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
483 {
484 	return radix_tree_lookup_element(root, index, 0);
485 }
486 EXPORT_SYMBOL(radix_tree_lookup);
487 
488 /**
489  *	radix_tree_tag_set - set a tag on a radix tree node
490  *	@root:		radix tree root
491  *	@index:		index key
492  *	@tag: 		tag index
493  *
494  *	Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
495  *	corresponding to @index in the radix tree.  From
496  *	the root all the way down to the leaf node.
497  *
498  *	Returns the address of the tagged item.   Setting a tag on a not-present
499  *	item is a bug.
500  */
501 void *radix_tree_tag_set(struct radix_tree_root *root,
502 			unsigned long index, unsigned int tag)
503 {
504 	unsigned int height, shift;
505 	struct radix_tree_node *slot;
506 
507 	height = root->height;
508 	BUG_ON(index > radix_tree_maxindex(height));
509 
510 	slot = indirect_to_ptr(root->rnode);
511 	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
512 
513 	while (height > 0) {
514 		int offset;
515 
516 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
517 		if (!tag_get(slot, tag, offset))
518 			tag_set(slot, tag, offset);
519 		slot = slot->slots[offset];
520 		BUG_ON(slot == NULL);
521 		shift -= RADIX_TREE_MAP_SHIFT;
522 		height--;
523 	}
524 
525 	/* set the root's tag bit */
526 	if (slot && !root_tag_get(root, tag))
527 		root_tag_set(root, tag);
528 
529 	return slot;
530 }
531 EXPORT_SYMBOL(radix_tree_tag_set);
532 
533 /**
534  *	radix_tree_tag_clear - clear a tag on a radix tree node
535  *	@root:		radix tree root
536  *	@index:		index key
537  *	@tag: 		tag index
538  *
539  *	Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
540  *	corresponding to @index in the radix tree.  If
541  *	this causes the leaf node to have no tags set then clear the tag in the
542  *	next-to-leaf node, etc.
543  *
544  *	Returns the address of the tagged item on success, else NULL.  ie:
545  *	has the same return value and semantics as radix_tree_lookup().
546  */
547 void *radix_tree_tag_clear(struct radix_tree_root *root,
548 			unsigned long index, unsigned int tag)
549 {
550 	struct radix_tree_node *node = NULL;
551 	struct radix_tree_node *slot = NULL;
552 	unsigned int height, shift;
553 	int uninitialized_var(offset);
554 
555 	height = root->height;
556 	if (index > radix_tree_maxindex(height))
557 		goto out;
558 
559 	shift = height * RADIX_TREE_MAP_SHIFT;
560 	slot = indirect_to_ptr(root->rnode);
561 
562 	while (shift) {
563 		if (slot == NULL)
564 			goto out;
565 
566 		shift -= RADIX_TREE_MAP_SHIFT;
567 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
568 		node = slot;
569 		slot = slot->slots[offset];
570 	}
571 
572 	if (slot == NULL)
573 		goto out;
574 
575 	while (node) {
576 		if (!tag_get(node, tag, offset))
577 			goto out;
578 		tag_clear(node, tag, offset);
579 		if (any_tag_set(node, tag))
580 			goto out;
581 
582 		index >>= RADIX_TREE_MAP_SHIFT;
583 		offset = index & RADIX_TREE_MAP_MASK;
584 		node = node->parent;
585 	}
586 
587 	/* clear the root's tag bit */
588 	if (root_tag_get(root, tag))
589 		root_tag_clear(root, tag);
590 
591 out:
592 	return slot;
593 }
594 EXPORT_SYMBOL(radix_tree_tag_clear);
595 
596 /**
597  * radix_tree_tag_get - get a tag on a radix tree node
598  * @root:		radix tree root
599  * @index:		index key
600  * @tag: 		tag index (< RADIX_TREE_MAX_TAGS)
601  *
602  * Return values:
603  *
604  *  0: tag not present or not set
605  *  1: tag set
606  *
607  * Note that the return value of this function may not be relied on, even if
608  * the RCU lock is held, unless tag modification and node deletion are excluded
609  * from concurrency.
610  */
611 int radix_tree_tag_get(struct radix_tree_root *root,
612 			unsigned long index, unsigned int tag)
613 {
614 	unsigned int height, shift;
615 	struct radix_tree_node *node;
616 
617 	/* check the root's tag bit */
618 	if (!root_tag_get(root, tag))
619 		return 0;
620 
621 	node = rcu_dereference_raw(root->rnode);
622 	if (node == NULL)
623 		return 0;
624 
625 	if (!radix_tree_is_indirect_ptr(node))
626 		return (index == 0);
627 	node = indirect_to_ptr(node);
628 
629 	height = node->height;
630 	if (index > radix_tree_maxindex(height))
631 		return 0;
632 
633 	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
634 
635 	for ( ; ; ) {
636 		int offset;
637 
638 		if (node == NULL)
639 			return 0;
640 
641 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
642 		if (!tag_get(node, tag, offset))
643 			return 0;
644 		if (height == 1)
645 			return 1;
646 		node = rcu_dereference_raw(node->slots[offset]);
647 		shift -= RADIX_TREE_MAP_SHIFT;
648 		height--;
649 	}
650 }
651 EXPORT_SYMBOL(radix_tree_tag_get);
652 
653 /**
654  * radix_tree_next_chunk - find next chunk of slots for iteration
655  *
656  * @root:	radix tree root
657  * @iter:	iterator state
658  * @flags:	RADIX_TREE_ITER_* flags and tag index
659  * Returns:	pointer to chunk first slot, or NULL if iteration is over
660  */
661 void **radix_tree_next_chunk(struct radix_tree_root *root,
662 			     struct radix_tree_iter *iter, unsigned flags)
663 {
664 	unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK;
665 	struct radix_tree_node *rnode, *node;
666 	unsigned long index, offset;
667 
668 	if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
669 		return NULL;
670 
671 	/*
672 	 * Catch next_index overflow after ~0UL. iter->index never overflows
673 	 * during iterating; it can be zero only at the beginning.
674 	 * And we cannot overflow iter->next_index in a single step,
675 	 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
676 	 */
677 	index = iter->next_index;
678 	if (!index && iter->index)
679 		return NULL;
680 
681 	rnode = rcu_dereference_raw(root->rnode);
682 	if (radix_tree_is_indirect_ptr(rnode)) {
683 		rnode = indirect_to_ptr(rnode);
684 	} else if (rnode && !index) {
685 		/* Single-slot tree */
686 		iter->index = 0;
687 		iter->next_index = 1;
688 		iter->tags = 1;
689 		return (void **)&root->rnode;
690 	} else
691 		return NULL;
692 
693 restart:
694 	shift = (rnode->height - 1) * RADIX_TREE_MAP_SHIFT;
695 	offset = index >> shift;
696 
697 	/* Index outside of the tree */
698 	if (offset >= RADIX_TREE_MAP_SIZE)
699 		return NULL;
700 
701 	node = rnode;
702 	while (1) {
703 		if ((flags & RADIX_TREE_ITER_TAGGED) ?
704 				!test_bit(offset, node->tags[tag]) :
705 				!node->slots[offset]) {
706 			/* Hole detected */
707 			if (flags & RADIX_TREE_ITER_CONTIG)
708 				return NULL;
709 
710 			if (flags & RADIX_TREE_ITER_TAGGED)
711 				offset = radix_tree_find_next_bit(
712 						node->tags[tag],
713 						RADIX_TREE_MAP_SIZE,
714 						offset + 1);
715 			else
716 				while (++offset	< RADIX_TREE_MAP_SIZE) {
717 					if (node->slots[offset])
718 						break;
719 				}
720 			index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1);
721 			index += offset << shift;
722 			/* Overflow after ~0UL */
723 			if (!index)
724 				return NULL;
725 			if (offset == RADIX_TREE_MAP_SIZE)
726 				goto restart;
727 		}
728 
729 		/* This is leaf-node */
730 		if (!shift)
731 			break;
732 
733 		node = rcu_dereference_raw(node->slots[offset]);
734 		if (node == NULL)
735 			goto restart;
736 		shift -= RADIX_TREE_MAP_SHIFT;
737 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
738 	}
739 
740 	/* Update the iterator state */
741 	iter->index = index;
742 	iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1;
743 
744 	/* Construct iter->tags bit-mask from node->tags[tag] array */
745 	if (flags & RADIX_TREE_ITER_TAGGED) {
746 		unsigned tag_long, tag_bit;
747 
748 		tag_long = offset / BITS_PER_LONG;
749 		tag_bit  = offset % BITS_PER_LONG;
750 		iter->tags = node->tags[tag][tag_long] >> tag_bit;
751 		/* This never happens if RADIX_TREE_TAG_LONGS == 1 */
752 		if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
753 			/* Pick tags from next element */
754 			if (tag_bit)
755 				iter->tags |= node->tags[tag][tag_long + 1] <<
756 						(BITS_PER_LONG - tag_bit);
757 			/* Clip chunk size, here only BITS_PER_LONG tags */
758 			iter->next_index = index + BITS_PER_LONG;
759 		}
760 	}
761 
762 	return node->slots + offset;
763 }
764 EXPORT_SYMBOL(radix_tree_next_chunk);
765 
766 /**
767  * radix_tree_range_tag_if_tagged - for each item in given range set given
768  *				   tag if item has another tag set
769  * @root:		radix tree root
770  * @first_indexp:	pointer to a starting index of a range to scan
771  * @last_index:		last index of a range to scan
772  * @nr_to_tag:		maximum number items to tag
773  * @iftag:		tag index to test
774  * @settag:		tag index to set if tested tag is set
775  *
776  * This function scans range of radix tree from first_index to last_index
777  * (inclusive).  For each item in the range if iftag is set, the function sets
778  * also settag. The function stops either after tagging nr_to_tag items or
779  * after reaching last_index.
780  *
781  * The tags must be set from the leaf level only and propagated back up the
782  * path to the root. We must do this so that we resolve the full path before
783  * setting any tags on intermediate nodes. If we set tags as we descend, then
784  * we can get to the leaf node and find that the index that has the iftag
785  * set is outside the range we are scanning. This reults in dangling tags and
786  * can lead to problems with later tag operations (e.g. livelocks on lookups).
787  *
788  * The function returns number of leaves where the tag was set and sets
789  * *first_indexp to the first unscanned index.
790  * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
791  * be prepared to handle that.
792  */
793 unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
794 		unsigned long *first_indexp, unsigned long last_index,
795 		unsigned long nr_to_tag,
796 		unsigned int iftag, unsigned int settag)
797 {
798 	unsigned int height = root->height;
799 	struct radix_tree_node *node = NULL;
800 	struct radix_tree_node *slot;
801 	unsigned int shift;
802 	unsigned long tagged = 0;
803 	unsigned long index = *first_indexp;
804 
805 	last_index = min(last_index, radix_tree_maxindex(height));
806 	if (index > last_index)
807 		return 0;
808 	if (!nr_to_tag)
809 		return 0;
810 	if (!root_tag_get(root, iftag)) {
811 		*first_indexp = last_index + 1;
812 		return 0;
813 	}
814 	if (height == 0) {
815 		*first_indexp = last_index + 1;
816 		root_tag_set(root, settag);
817 		return 1;
818 	}
819 
820 	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
821 	slot = indirect_to_ptr(root->rnode);
822 
823 	for (;;) {
824 		unsigned long upindex;
825 		int offset;
826 
827 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
828 		if (!slot->slots[offset])
829 			goto next;
830 		if (!tag_get(slot, iftag, offset))
831 			goto next;
832 		if (shift) {
833 			/* Go down one level */
834 			shift -= RADIX_TREE_MAP_SHIFT;
835 			node = slot;
836 			slot = slot->slots[offset];
837 			continue;
838 		}
839 
840 		/* tag the leaf */
841 		tagged++;
842 		tag_set(slot, settag, offset);
843 
844 		/* walk back up the path tagging interior nodes */
845 		upindex = index;
846 		while (node) {
847 			upindex >>= RADIX_TREE_MAP_SHIFT;
848 			offset = upindex & RADIX_TREE_MAP_MASK;
849 
850 			/* stop if we find a node with the tag already set */
851 			if (tag_get(node, settag, offset))
852 				break;
853 			tag_set(node, settag, offset);
854 			node = node->parent;
855 		}
856 
857 		/*
858 		 * Small optimization: now clear that node pointer.
859 		 * Since all of this slot's ancestors now have the tag set
860 		 * from setting it above, we have no further need to walk
861 		 * back up the tree setting tags, until we update slot to
862 		 * point to another radix_tree_node.
863 		 */
864 		node = NULL;
865 
866 next:
867 		/* Go to next item at level determined by 'shift' */
868 		index = ((index >> shift) + 1) << shift;
869 		/* Overflow can happen when last_index is ~0UL... */
870 		if (index > last_index || !index)
871 			break;
872 		if (tagged >= nr_to_tag)
873 			break;
874 		while (((index >> shift) & RADIX_TREE_MAP_MASK) == 0) {
875 			/*
876 			 * We've fully scanned this node. Go up. Because
877 			 * last_index is guaranteed to be in the tree, what
878 			 * we do below cannot wander astray.
879 			 */
880 			slot = slot->parent;
881 			shift += RADIX_TREE_MAP_SHIFT;
882 		}
883 	}
884 	/*
885 	 * We need not to tag the root tag if there is no tag which is set with
886 	 * settag within the range from *first_indexp to last_index.
887 	 */
888 	if (tagged > 0)
889 		root_tag_set(root, settag);
890 	*first_indexp = index;
891 
892 	return tagged;
893 }
894 EXPORT_SYMBOL(radix_tree_range_tag_if_tagged);
895 
896 
897 /**
898  *	radix_tree_next_hole    -    find the next hole (not-present entry)
899  *	@root:		tree root
900  *	@index:		index key
901  *	@max_scan:	maximum range to search
902  *
903  *	Search the set [index, min(index+max_scan-1, MAX_INDEX)] for the lowest
904  *	indexed hole.
905  *
906  *	Returns: the index of the hole if found, otherwise returns an index
907  *	outside of the set specified (in which case 'return - index >= max_scan'
908  *	will be true). In rare cases of index wrap-around, 0 will be returned.
909  *
910  *	radix_tree_next_hole may be called under rcu_read_lock. However, like
911  *	radix_tree_gang_lookup, this will not atomically search a snapshot of
912  *	the tree at a single point in time. For example, if a hole is created
913  *	at index 5, then subsequently a hole is created at index 10,
914  *	radix_tree_next_hole covering both indexes may return 10 if called
915  *	under rcu_read_lock.
916  */
917 unsigned long radix_tree_next_hole(struct radix_tree_root *root,
918 				unsigned long index, unsigned long max_scan)
919 {
920 	unsigned long i;
921 
922 	for (i = 0; i < max_scan; i++) {
923 		if (!radix_tree_lookup(root, index))
924 			break;
925 		index++;
926 		if (index == 0)
927 			break;
928 	}
929 
930 	return index;
931 }
932 EXPORT_SYMBOL(radix_tree_next_hole);
933 
934 /**
935  *	radix_tree_prev_hole    -    find the prev hole (not-present entry)
936  *	@root:		tree root
937  *	@index:		index key
938  *	@max_scan:	maximum range to search
939  *
940  *	Search backwards in the range [max(index-max_scan+1, 0), index]
941  *	for the first hole.
942  *
943  *	Returns: the index of the hole if found, otherwise returns an index
944  *	outside of the set specified (in which case 'index - return >= max_scan'
945  *	will be true). In rare cases of wrap-around, ULONG_MAX will be returned.
946  *
947  *	radix_tree_next_hole may be called under rcu_read_lock. However, like
948  *	radix_tree_gang_lookup, this will not atomically search a snapshot of
949  *	the tree at a single point in time. For example, if a hole is created
950  *	at index 10, then subsequently a hole is created at index 5,
951  *	radix_tree_prev_hole covering both indexes may return 5 if called under
952  *	rcu_read_lock.
953  */
954 unsigned long radix_tree_prev_hole(struct radix_tree_root *root,
955 				   unsigned long index, unsigned long max_scan)
956 {
957 	unsigned long i;
958 
959 	for (i = 0; i < max_scan; i++) {
960 		if (!radix_tree_lookup(root, index))
961 			break;
962 		index--;
963 		if (index == ULONG_MAX)
964 			break;
965 	}
966 
967 	return index;
968 }
969 EXPORT_SYMBOL(radix_tree_prev_hole);
970 
971 /**
972  *	radix_tree_gang_lookup - perform multiple lookup on a radix tree
973  *	@root:		radix tree root
974  *	@results:	where the results of the lookup are placed
975  *	@first_index:	start the lookup from this key
976  *	@max_items:	place up to this many items at *results
977  *
978  *	Performs an index-ascending scan of the tree for present items.  Places
979  *	them at *@results and returns the number of items which were placed at
980  *	*@results.
981  *
982  *	The implementation is naive.
983  *
984  *	Like radix_tree_lookup, radix_tree_gang_lookup may be called under
985  *	rcu_read_lock. In this case, rather than the returned results being
986  *	an atomic snapshot of the tree at a single point in time, the semantics
987  *	of an RCU protected gang lookup are as though multiple radix_tree_lookups
988  *	have been issued in individual locks, and results stored in 'results'.
989  */
990 unsigned int
991 radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
992 			unsigned long first_index, unsigned int max_items)
993 {
994 	struct radix_tree_iter iter;
995 	void **slot;
996 	unsigned int ret = 0;
997 
998 	if (unlikely(!max_items))
999 		return 0;
1000 
1001 	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1002 		results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot));
1003 		if (!results[ret])
1004 			continue;
1005 		if (++ret == max_items)
1006 			break;
1007 	}
1008 
1009 	return ret;
1010 }
1011 EXPORT_SYMBOL(radix_tree_gang_lookup);
1012 
1013 /**
1014  *	radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1015  *	@root:		radix tree root
1016  *	@results:	where the results of the lookup are placed
1017  *	@indices:	where their indices should be placed (but usually NULL)
1018  *	@first_index:	start the lookup from this key
1019  *	@max_items:	place up to this many items at *results
1020  *
1021  *	Performs an index-ascending scan of the tree for present items.  Places
1022  *	their slots at *@results and returns the number of items which were
1023  *	placed at *@results.
1024  *
1025  *	The implementation is naive.
1026  *
1027  *	Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1028  *	be dereferenced with radix_tree_deref_slot, and if using only RCU
1029  *	protection, radix_tree_deref_slot may fail requiring a retry.
1030  */
1031 unsigned int
1032 radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1033 			void ***results, unsigned long *indices,
1034 			unsigned long first_index, unsigned int max_items)
1035 {
1036 	struct radix_tree_iter iter;
1037 	void **slot;
1038 	unsigned int ret = 0;
1039 
1040 	if (unlikely(!max_items))
1041 		return 0;
1042 
1043 	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1044 		results[ret] = slot;
1045 		if (indices)
1046 			indices[ret] = iter.index;
1047 		if (++ret == max_items)
1048 			break;
1049 	}
1050 
1051 	return ret;
1052 }
1053 EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1054 
1055 /**
1056  *	radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1057  *	                             based on a tag
1058  *	@root:		radix tree root
1059  *	@results:	where the results of the lookup are placed
1060  *	@first_index:	start the lookup from this key
1061  *	@max_items:	place up to this many items at *results
1062  *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1063  *
1064  *	Performs an index-ascending scan of the tree for present items which
1065  *	have the tag indexed by @tag set.  Places the items at *@results and
1066  *	returns the number of items which were placed at *@results.
1067  */
1068 unsigned int
1069 radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
1070 		unsigned long first_index, unsigned int max_items,
1071 		unsigned int tag)
1072 {
1073 	struct radix_tree_iter iter;
1074 	void **slot;
1075 	unsigned int ret = 0;
1076 
1077 	if (unlikely(!max_items))
1078 		return 0;
1079 
1080 	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1081 		results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot));
1082 		if (!results[ret])
1083 			continue;
1084 		if (++ret == max_items)
1085 			break;
1086 	}
1087 
1088 	return ret;
1089 }
1090 EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1091 
1092 /**
1093  *	radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1094  *					  radix tree based on a tag
1095  *	@root:		radix tree root
1096  *	@results:	where the results of the lookup are placed
1097  *	@first_index:	start the lookup from this key
1098  *	@max_items:	place up to this many items at *results
1099  *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1100  *
1101  *	Performs an index-ascending scan of the tree for present items which
1102  *	have the tag indexed by @tag set.  Places the slots at *@results and
1103  *	returns the number of slots which were placed at *@results.
1104  */
1105 unsigned int
1106 radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1107 		unsigned long first_index, unsigned int max_items,
1108 		unsigned int tag)
1109 {
1110 	struct radix_tree_iter iter;
1111 	void **slot;
1112 	unsigned int ret = 0;
1113 
1114 	if (unlikely(!max_items))
1115 		return 0;
1116 
1117 	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1118 		results[ret] = slot;
1119 		if (++ret == max_items)
1120 			break;
1121 	}
1122 
1123 	return ret;
1124 }
1125 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1126 
1127 #if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1128 #include <linux/sched.h> /* for cond_resched() */
1129 
1130 /*
1131  * This linear search is at present only useful to shmem_unuse_inode().
1132  */
1133 static unsigned long __locate(struct radix_tree_node *slot, void *item,
1134 			      unsigned long index, unsigned long *found_index)
1135 {
1136 	unsigned int shift, height;
1137 	unsigned long i;
1138 
1139 	height = slot->height;
1140 	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
1141 
1142 	for ( ; height > 1; height--) {
1143 		i = (index >> shift) & RADIX_TREE_MAP_MASK;
1144 		for (;;) {
1145 			if (slot->slots[i] != NULL)
1146 				break;
1147 			index &= ~((1UL << shift) - 1);
1148 			index += 1UL << shift;
1149 			if (index == 0)
1150 				goto out;	/* 32-bit wraparound */
1151 			i++;
1152 			if (i == RADIX_TREE_MAP_SIZE)
1153 				goto out;
1154 		}
1155 
1156 		shift -= RADIX_TREE_MAP_SHIFT;
1157 		slot = rcu_dereference_raw(slot->slots[i]);
1158 		if (slot == NULL)
1159 			goto out;
1160 	}
1161 
1162 	/* Bottom level: check items */
1163 	for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
1164 		if (slot->slots[i] == item) {
1165 			*found_index = index + i;
1166 			index = 0;
1167 			goto out;
1168 		}
1169 	}
1170 	index += RADIX_TREE_MAP_SIZE;
1171 out:
1172 	return index;
1173 }
1174 
1175 /**
1176  *	radix_tree_locate_item - search through radix tree for item
1177  *	@root:		radix tree root
1178  *	@item:		item to be found
1179  *
1180  *	Returns index where item was found, or -1 if not found.
1181  *	Caller must hold no lock (since this time-consuming function needs
1182  *	to be preemptible), and must check afterwards if item is still there.
1183  */
1184 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1185 {
1186 	struct radix_tree_node *node;
1187 	unsigned long max_index;
1188 	unsigned long cur_index = 0;
1189 	unsigned long found_index = -1;
1190 
1191 	do {
1192 		rcu_read_lock();
1193 		node = rcu_dereference_raw(root->rnode);
1194 		if (!radix_tree_is_indirect_ptr(node)) {
1195 			rcu_read_unlock();
1196 			if (node == item)
1197 				found_index = 0;
1198 			break;
1199 		}
1200 
1201 		node = indirect_to_ptr(node);
1202 		max_index = radix_tree_maxindex(node->height);
1203 		if (cur_index > max_index)
1204 			break;
1205 
1206 		cur_index = __locate(node, item, cur_index, &found_index);
1207 		rcu_read_unlock();
1208 		cond_resched();
1209 	} while (cur_index != 0 && cur_index <= max_index);
1210 
1211 	return found_index;
1212 }
1213 #else
1214 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1215 {
1216 	return -1;
1217 }
1218 #endif /* CONFIG_SHMEM && CONFIG_SWAP */
1219 
1220 /**
1221  *	radix_tree_shrink    -    shrink height of a radix tree to minimal
1222  *	@root		radix tree root
1223  */
1224 static inline void radix_tree_shrink(struct radix_tree_root *root)
1225 {
1226 	/* try to shrink tree height */
1227 	while (root->height > 0) {
1228 		struct radix_tree_node *to_free = root->rnode;
1229 		struct radix_tree_node *slot;
1230 
1231 		BUG_ON(!radix_tree_is_indirect_ptr(to_free));
1232 		to_free = indirect_to_ptr(to_free);
1233 
1234 		/*
1235 		 * The candidate node has more than one child, or its child
1236 		 * is not at the leftmost slot, we cannot shrink.
1237 		 */
1238 		if (to_free->count != 1)
1239 			break;
1240 		if (!to_free->slots[0])
1241 			break;
1242 
1243 		/*
1244 		 * We don't need rcu_assign_pointer(), since we are simply
1245 		 * moving the node from one part of the tree to another: if it
1246 		 * was safe to dereference the old pointer to it
1247 		 * (to_free->slots[0]), it will be safe to dereference the new
1248 		 * one (root->rnode) as far as dependent read barriers go.
1249 		 */
1250 		slot = to_free->slots[0];
1251 		if (root->height > 1) {
1252 			slot->parent = NULL;
1253 			slot = ptr_to_indirect(slot);
1254 		}
1255 		root->rnode = slot;
1256 		root->height--;
1257 
1258 		/*
1259 		 * We have a dilemma here. The node's slot[0] must not be
1260 		 * NULLed in case there are concurrent lookups expecting to
1261 		 * find the item. However if this was a bottom-level node,
1262 		 * then it may be subject to the slot pointer being visible
1263 		 * to callers dereferencing it. If item corresponding to
1264 		 * slot[0] is subsequently deleted, these callers would expect
1265 		 * their slot to become empty sooner or later.
1266 		 *
1267 		 * For example, lockless pagecache will look up a slot, deref
1268 		 * the page pointer, and if the page is 0 refcount it means it
1269 		 * was concurrently deleted from pagecache so try the deref
1270 		 * again. Fortunately there is already a requirement for logic
1271 		 * to retry the entire slot lookup -- the indirect pointer
1272 		 * problem (replacing direct root node with an indirect pointer
1273 		 * also results in a stale slot). So tag the slot as indirect
1274 		 * to force callers to retry.
1275 		 */
1276 		if (root->height == 0)
1277 			*((unsigned long *)&to_free->slots[0]) |=
1278 						RADIX_TREE_INDIRECT_PTR;
1279 
1280 		radix_tree_node_free(to_free);
1281 	}
1282 }
1283 
1284 /**
1285  *	radix_tree_delete    -    delete an item from a radix tree
1286  *	@root:		radix tree root
1287  *	@index:		index key
1288  *
1289  *	Remove the item at @index from the radix tree rooted at @root.
1290  *
1291  *	Returns the address of the deleted item, or NULL if it was not present.
1292  */
1293 void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1294 {
1295 	struct radix_tree_node *node = NULL;
1296 	struct radix_tree_node *slot = NULL;
1297 	struct radix_tree_node *to_free;
1298 	unsigned int height, shift;
1299 	int tag;
1300 	int uninitialized_var(offset);
1301 
1302 	height = root->height;
1303 	if (index > radix_tree_maxindex(height))
1304 		goto out;
1305 
1306 	slot = root->rnode;
1307 	if (height == 0) {
1308 		root_tag_clear_all(root);
1309 		root->rnode = NULL;
1310 		goto out;
1311 	}
1312 	slot = indirect_to_ptr(slot);
1313 	shift = height * RADIX_TREE_MAP_SHIFT;
1314 
1315 	do {
1316 		if (slot == NULL)
1317 			goto out;
1318 
1319 		shift -= RADIX_TREE_MAP_SHIFT;
1320 		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
1321 		node = slot;
1322 		slot = slot->slots[offset];
1323 	} while (shift);
1324 
1325 	if (slot == NULL)
1326 		goto out;
1327 
1328 	/*
1329 	 * Clear all tags associated with the item to be deleted.
1330 	 * This way of doing it would be inefficient, but seldom is any set.
1331 	 */
1332 	for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
1333 		if (tag_get(node, tag, offset))
1334 			radix_tree_tag_clear(root, index, tag);
1335 	}
1336 
1337 	to_free = NULL;
1338 	/* Now free the nodes we do not need anymore */
1339 	while (node) {
1340 		node->slots[offset] = NULL;
1341 		node->count--;
1342 		/*
1343 		 * Queue the node for deferred freeing after the
1344 		 * last reference to it disappears (set NULL, above).
1345 		 */
1346 		if (to_free)
1347 			radix_tree_node_free(to_free);
1348 
1349 		if (node->count) {
1350 			if (node == indirect_to_ptr(root->rnode))
1351 				radix_tree_shrink(root);
1352 			goto out;
1353 		}
1354 
1355 		/* Node with zero slots in use so free it */
1356 		to_free = node;
1357 
1358 		index >>= RADIX_TREE_MAP_SHIFT;
1359 		offset = index & RADIX_TREE_MAP_MASK;
1360 		node = node->parent;
1361 	}
1362 
1363 	root_tag_clear_all(root);
1364 	root->height = 0;
1365 	root->rnode = NULL;
1366 	if (to_free)
1367 		radix_tree_node_free(to_free);
1368 
1369 out:
1370 	return slot;
1371 }
1372 EXPORT_SYMBOL(radix_tree_delete);
1373 
1374 /**
1375  *	radix_tree_tagged - test whether any items in the tree are tagged
1376  *	@root:		radix tree root
1377  *	@tag:		tag to test
1378  */
1379 int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1380 {
1381 	return root_tag_get(root, tag);
1382 }
1383 EXPORT_SYMBOL(radix_tree_tagged);
1384 
1385 static void
1386 radix_tree_node_ctor(void *node)
1387 {
1388 	memset(node, 0, sizeof(struct radix_tree_node));
1389 }
1390 
1391 static __init unsigned long __maxindex(unsigned int height)
1392 {
1393 	unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1394 	int shift = RADIX_TREE_INDEX_BITS - width;
1395 
1396 	if (shift < 0)
1397 		return ~0UL;
1398 	if (shift >= BITS_PER_LONG)
1399 		return 0UL;
1400 	return ~0UL >> shift;
1401 }
1402 
1403 static __init void radix_tree_init_maxindex(void)
1404 {
1405 	unsigned int i;
1406 
1407 	for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1408 		height_to_maxindex[i] = __maxindex(i);
1409 }
1410 
1411 static int radix_tree_callback(struct notifier_block *nfb,
1412                             unsigned long action,
1413                             void *hcpu)
1414 {
1415        int cpu = (long)hcpu;
1416        struct radix_tree_preload *rtp;
1417 
1418        /* Free per-cpu pool of perloaded nodes */
1419        if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
1420                rtp = &per_cpu(radix_tree_preloads, cpu);
1421                while (rtp->nr) {
1422                        kmem_cache_free(radix_tree_node_cachep,
1423                                        rtp->nodes[rtp->nr-1]);
1424                        rtp->nodes[rtp->nr-1] = NULL;
1425                        rtp->nr--;
1426                }
1427        }
1428        return NOTIFY_OK;
1429 }
1430 
1431 void __init radix_tree_init(void)
1432 {
1433 	radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1434 			sizeof(struct radix_tree_node), 0,
1435 			SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1436 			radix_tree_node_ctor);
1437 	radix_tree_init_maxindex();
1438 	hotcpu_notifier(radix_tree_callback, 0);
1439 }
1440