xref: /openbmc/linux/lib/test_maple_tree.c (revision cabdf74e6b319c989eb8e812f1854291ae0af1c0)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * test_maple_tree.c: Test the maple tree API
4  * Copyright (c) 2018-2022 Oracle Corporation
5  * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
6  *
7  * Any tests that only require the interface of the tree.
8  */
9 
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
12 
13 #define MTREE_ALLOC_MAX 0x2000000000000Ul
14 #define CONFIG_MAPLE_SEARCH
15 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
16 
17 #ifndef CONFIG_DEBUG_MAPLE_TREE
18 #define mt_dump(mt, fmt)		do {} while (0)
19 #define mt_validate(mt)			do {} while (0)
20 #define mt_cache_shrink()		do {} while (0)
21 #define mas_dump(mas)			do {} while (0)
22 #define mas_wr_dump(mas)		do {} while (0)
23 atomic_t maple_tree_tests_run;
24 atomic_t maple_tree_tests_passed;
25 #undef MT_BUG_ON
26 
27 #define MT_BUG_ON(__tree, __x) do {					\
28 	atomic_inc(&maple_tree_tests_run);				\
29 	if (__x) {							\
30 		pr_info("BUG at %s:%d (%u)\n",				\
31 		__func__, __LINE__, __x);				\
32 		pr_info("Pass: %u Run:%u\n",				\
33 			atomic_read(&maple_tree_tests_passed),		\
34 			atomic_read(&maple_tree_tests_run));		\
35 	} else {							\
36 		atomic_inc(&maple_tree_tests_passed);			\
37 	}								\
38 } while (0)
39 #endif
40 
41 /* #define BENCH_SLOT_STORE */
42 /* #define BENCH_NODE_STORE */
43 /* #define BENCH_AWALK */
44 /* #define BENCH_WALK */
45 /* #define BENCH_MT_FOR_EACH */
46 /* #define BENCH_FORK */
47 
48 #ifdef __KERNEL__
49 #define mt_set_non_kernel(x)		do {} while (0)
50 #define mt_zero_nr_tallocated(x)	do {} while (0)
51 #else
52 #define cond_resched()			do {} while (0)
53 #endif
54 static int __init mtree_insert_index(struct maple_tree *mt,
55 				     unsigned long index, gfp_t gfp)
56 {
57 	return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
58 }
59 
60 static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
61 {
62 	MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
63 	MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
64 }
65 
66 static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
67 				void *ptr)
68 {
69 	return mtree_insert(mt, index, ptr, GFP_KERNEL);
70 }
71 
72 static int __init mtree_test_store_range(struct maple_tree *mt,
73 			unsigned long start, unsigned long end, void *ptr)
74 {
75 	return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
76 }
77 
78 static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
79 				void *ptr)
80 {
81 	return mtree_test_store_range(mt, start, start, ptr);
82 }
83 
84 static int __init mtree_test_insert_range(struct maple_tree *mt,
85 			unsigned long start, unsigned long end, void *ptr)
86 {
87 	return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
88 }
89 
90 static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
91 {
92 	return mtree_load(mt, index);
93 }
94 
95 static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
96 {
97 	return mtree_erase(mt, index);
98 }
99 
100 #if defined(CONFIG_64BIT)
101 static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
102 		unsigned long start, unsigned long end, unsigned long size,
103 		unsigned long expected, int eret, void *ptr)
104 {
105 
106 	unsigned long result = expected + 1;
107 	int ret;
108 
109 	ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
110 			GFP_KERNEL);
111 	MT_BUG_ON(mt, ret != eret);
112 	if (ret)
113 		return;
114 
115 	MT_BUG_ON(mt, result != expected);
116 }
117 
118 static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
119 		unsigned long start, unsigned long end, unsigned long size,
120 		unsigned long expected, int eret, void *ptr)
121 {
122 
123 	unsigned long result = expected + 1;
124 	int ret;
125 
126 	ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
127 			GFP_KERNEL);
128 	MT_BUG_ON(mt, ret != eret);
129 	if (ret)
130 		return;
131 
132 	MT_BUG_ON(mt, result != expected);
133 }
134 #endif
135 
136 static noinline void __init check_load(struct maple_tree *mt,
137 				       unsigned long index, void *ptr)
138 {
139 	void *ret = mtree_test_load(mt, index);
140 
141 	if (ret != ptr)
142 		pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
143 	MT_BUG_ON(mt, ret != ptr);
144 }
145 
146 static noinline void __init check_store_range(struct maple_tree *mt,
147 		unsigned long start, unsigned long end, void *ptr, int expected)
148 {
149 	int ret = -EINVAL;
150 	unsigned long i;
151 
152 	ret = mtree_test_store_range(mt, start, end, ptr);
153 	MT_BUG_ON(mt, ret != expected);
154 
155 	if (ret)
156 		return;
157 
158 	for (i = start; i <= end; i++)
159 		check_load(mt, i, ptr);
160 }
161 
162 static noinline void __init check_insert_range(struct maple_tree *mt,
163 		unsigned long start, unsigned long end, void *ptr, int expected)
164 {
165 	int ret = -EINVAL;
166 	unsigned long i;
167 
168 	ret = mtree_test_insert_range(mt, start, end, ptr);
169 	MT_BUG_ON(mt, ret != expected);
170 
171 	if (ret)
172 		return;
173 
174 	for (i = start; i <= end; i++)
175 		check_load(mt, i, ptr);
176 }
177 
178 static noinline void __init check_insert(struct maple_tree *mt,
179 					 unsigned long index, void *ptr)
180 {
181 	int ret = -EINVAL;
182 
183 	ret = mtree_test_insert(mt, index, ptr);
184 	MT_BUG_ON(mt, ret != 0);
185 }
186 
187 static noinline void __init check_dup_insert(struct maple_tree *mt,
188 				      unsigned long index, void *ptr)
189 {
190 	int ret = -EINVAL;
191 
192 	ret = mtree_test_insert(mt, index, ptr);
193 	MT_BUG_ON(mt, ret != -EEXIST);
194 }
195 
196 
197 static noinline void __init check_index_load(struct maple_tree *mt,
198 					     unsigned long index)
199 {
200 	return check_load(mt, index, xa_mk_value(index & LONG_MAX));
201 }
202 
203 static inline __init int not_empty(struct maple_node *node)
204 {
205 	int i;
206 
207 	if (node->parent)
208 		return 1;
209 
210 	for (i = 0; i < ARRAY_SIZE(node->slot); i++)
211 		if (node->slot[i])
212 			return 1;
213 
214 	return 0;
215 }
216 
217 
218 static noinline void __init check_rev_seq(struct maple_tree *mt,
219 					  unsigned long max, bool verbose)
220 {
221 	unsigned long i = max, j;
222 
223 	MT_BUG_ON(mt, !mtree_empty(mt));
224 
225 	mt_zero_nr_tallocated();
226 	while (i) {
227 		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
228 		for (j = i; j <= max; j++)
229 			check_index_load(mt, j);
230 
231 		check_load(mt, i - 1, NULL);
232 		mt_set_in_rcu(mt);
233 		MT_BUG_ON(mt, !mt_height(mt));
234 		mt_clear_in_rcu(mt);
235 		MT_BUG_ON(mt, !mt_height(mt));
236 		i--;
237 	}
238 	check_load(mt, max + 1, NULL);
239 
240 #ifndef __KERNEL__
241 	if (verbose) {
242 		rcu_barrier();
243 		mt_dump(mt, mt_dump_dec);
244 		pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
245 			__func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
246 			mt_nr_tallocated());
247 	}
248 #endif
249 }
250 
251 static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
252 		bool verbose)
253 {
254 	unsigned long i, j;
255 
256 	MT_BUG_ON(mt, !mtree_empty(mt));
257 
258 	mt_zero_nr_tallocated();
259 	for (i = 0; i <= max; i++) {
260 		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
261 		for (j = 0; j <= i; j++)
262 			check_index_load(mt, j);
263 
264 		if (i)
265 			MT_BUG_ON(mt, !mt_height(mt));
266 		check_load(mt, i + 1, NULL);
267 	}
268 
269 #ifndef __KERNEL__
270 	if (verbose) {
271 		rcu_barrier();
272 		mt_dump(mt, mt_dump_dec);
273 		pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
274 			max, mt_get_alloc_size()/1024, mt_nr_allocated(),
275 			mt_nr_tallocated());
276 	}
277 #endif
278 }
279 
280 static noinline void __init check_lb_not_empty(struct maple_tree *mt)
281 {
282 	unsigned long i, j;
283 	unsigned long huge = 4000UL * 1000 * 1000;
284 
285 
286 	i = huge;
287 	while (i > 4096) {
288 		check_insert(mt, i, (void *) i);
289 		for (j = huge; j >= i; j /= 2) {
290 			check_load(mt, j-1, NULL);
291 			check_load(mt, j, (void *) j);
292 			check_load(mt, j+1, NULL);
293 		}
294 		i /= 2;
295 	}
296 	mtree_destroy(mt);
297 }
298 
299 static noinline void __init check_lower_bound_split(struct maple_tree *mt)
300 {
301 	MT_BUG_ON(mt, !mtree_empty(mt));
302 	check_lb_not_empty(mt);
303 }
304 
305 static noinline void __init check_upper_bound_split(struct maple_tree *mt)
306 {
307 	unsigned long i, j;
308 	unsigned long huge;
309 
310 	MT_BUG_ON(mt, !mtree_empty(mt));
311 
312 	if (MAPLE_32BIT)
313 		huge = 2147483647UL;
314 	else
315 		huge = 4000UL * 1000 * 1000;
316 
317 	i = 4096;
318 	while (i < huge) {
319 		check_insert(mt, i, (void *) i);
320 		for (j = i; j >= huge; j *= 2) {
321 			check_load(mt, j-1, NULL);
322 			check_load(mt, j, (void *) j);
323 			check_load(mt, j+1, NULL);
324 		}
325 		i *= 2;
326 	}
327 	mtree_destroy(mt);
328 }
329 
330 static noinline void __init check_mid_split(struct maple_tree *mt)
331 {
332 	unsigned long huge = 8000UL * 1000 * 1000;
333 
334 	check_insert(mt, huge, (void *) huge);
335 	check_insert(mt, 0, xa_mk_value(0));
336 	check_lb_not_empty(mt);
337 }
338 
339 static noinline void __init check_rev_find(struct maple_tree *mt)
340 {
341 	int i, nr_entries = 200;
342 	void *val;
343 	MA_STATE(mas, mt, 0, 0);
344 
345 	for (i = 0; i <= nr_entries; i++)
346 		mtree_store_range(mt, i*10, i*10 + 5,
347 				  xa_mk_value(i), GFP_KERNEL);
348 
349 	rcu_read_lock();
350 	mas_set(&mas, 1000);
351 	val = mas_find_rev(&mas, 1000);
352 	MT_BUG_ON(mt, val != xa_mk_value(100));
353 	val = mas_find_rev(&mas, 1000);
354 	MT_BUG_ON(mt, val != NULL);
355 
356 	mas_set(&mas, 999);
357 	val = mas_find_rev(&mas, 997);
358 	MT_BUG_ON(mt, val != NULL);
359 
360 	mas_set(&mas, 1000);
361 	val = mas_find_rev(&mas, 900);
362 	MT_BUG_ON(mt, val != xa_mk_value(100));
363 	val = mas_find_rev(&mas, 900);
364 	MT_BUG_ON(mt, val != xa_mk_value(99));
365 
366 	mas_set(&mas, 20);
367 	val = mas_find_rev(&mas, 0);
368 	MT_BUG_ON(mt, val != xa_mk_value(2));
369 	val = mas_find_rev(&mas, 0);
370 	MT_BUG_ON(mt, val != xa_mk_value(1));
371 	val = mas_find_rev(&mas, 0);
372 	MT_BUG_ON(mt, val != xa_mk_value(0));
373 	val = mas_find_rev(&mas, 0);
374 	MT_BUG_ON(mt, val != NULL);
375 	rcu_read_unlock();
376 }
377 
378 static noinline void __init check_find(struct maple_tree *mt)
379 {
380 	unsigned long val = 0;
381 	unsigned long count;
382 	unsigned long max;
383 	unsigned long top;
384 	unsigned long last = 0, index = 0;
385 	void *entry, *entry2;
386 
387 	MA_STATE(mas, mt, 0, 0);
388 
389 	/* Insert 0. */
390 	MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
391 
392 #if defined(CONFIG_64BIT)
393 	top = 4398046511104UL;
394 #else
395 	top = ULONG_MAX;
396 #endif
397 
398 	if (MAPLE_32BIT) {
399 		count = 15;
400 	} else {
401 		count = 20;
402 	}
403 
404 	for (int i = 0; i <= count; i++) {
405 		if (val != 64)
406 			MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
407 		else
408 			MT_BUG_ON(mt, mtree_insert(mt, val,
409 				XA_ZERO_ENTRY, GFP_KERNEL));
410 
411 		val <<= 2;
412 	}
413 
414 	val = 0;
415 	mas_set(&mas, val);
416 	mas_lock(&mas);
417 	while ((entry = mas_find(&mas, 268435456)) != NULL) {
418 		if (val != 64)
419 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
420 		else
421 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
422 
423 		val <<= 2;
424 		/* For zero check. */
425 		if (!val)
426 			val = 1;
427 	}
428 	mas_unlock(&mas);
429 
430 	val = 0;
431 	mas_set(&mas, val);
432 	mas_lock(&mas);
433 	mas_for_each(&mas, entry, ULONG_MAX) {
434 		if (val != 64)
435 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
436 		else
437 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
438 		val <<= 2;
439 		/* For zero check. */
440 		if (!val)
441 			val = 1;
442 	}
443 	mas_unlock(&mas);
444 
445 	/* Test mas_pause */
446 	val = 0;
447 	mas_set(&mas, val);
448 	mas_lock(&mas);
449 	mas_for_each(&mas, entry, ULONG_MAX) {
450 		if (val != 64)
451 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
452 		else
453 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
454 		val <<= 2;
455 		/* For zero check. */
456 		if (!val)
457 			val = 1;
458 
459 		mas_pause(&mas);
460 		mas_unlock(&mas);
461 		mas_lock(&mas);
462 	}
463 	mas_unlock(&mas);
464 
465 	val = 0;
466 	max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
467 	mt_for_each(mt, entry, index, max) {
468 		MT_BUG_ON(mt, xa_mk_value(val) != entry);
469 		val <<= 2;
470 		if (val == 64) /* Skip zero entry. */
471 			val <<= 2;
472 		/* For zero check. */
473 		if (!val)
474 			val = 1;
475 	}
476 
477 	val = 0;
478 	max = 0;
479 	index = 0;
480 	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
481 	mt_for_each(mt, entry, index, ULONG_MAX) {
482 		if (val == top)
483 			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
484 		else
485 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
486 
487 		/* Workaround for 32bit */
488 		if ((val << 2) < val)
489 			val = ULONG_MAX;
490 		else
491 			val <<= 2;
492 
493 		if (val == 64) /* Skip zero entry. */
494 			val <<= 2;
495 		/* For zero check. */
496 		if (!val)
497 			val = 1;
498 		max++;
499 		MT_BUG_ON(mt, max > 25);
500 	}
501 	mtree_erase_index(mt, ULONG_MAX);
502 
503 	mas_reset(&mas);
504 	index = 17;
505 	entry = mt_find(mt, &index, 512);
506 	MT_BUG_ON(mt, xa_mk_value(256) != entry);
507 
508 	mas_reset(&mas);
509 	index = 17;
510 	entry = mt_find(mt, &index, 20);
511 	MT_BUG_ON(mt, entry != NULL);
512 
513 
514 	/* Range check.. */
515 	/* Insert ULONG_MAX */
516 	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
517 
518 	val = 0;
519 	mas_set(&mas, 0);
520 	mas_lock(&mas);
521 	mas_for_each(&mas, entry, ULONG_MAX) {
522 		if (val == 64)
523 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
524 		else if (val == top)
525 			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
526 		else
527 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
528 
529 		/* Workaround for 32bit */
530 		if ((val << 2) < val)
531 			val = ULONG_MAX;
532 		else
533 			val <<= 2;
534 
535 		/* For zero check. */
536 		if (!val)
537 			val = 1;
538 		mas_pause(&mas);
539 		mas_unlock(&mas);
540 		mas_lock(&mas);
541 	}
542 	mas_unlock(&mas);
543 
544 	mas_set(&mas, 1048576);
545 	mas_lock(&mas);
546 	entry = mas_find(&mas, 1048576);
547 	mas_unlock(&mas);
548 	MT_BUG_ON(mas.tree, entry == NULL);
549 
550 	/*
551 	 * Find last value.
552 	 * 1. get the expected value, leveraging the existence of an end entry
553 	 * 2. delete end entry
554 	 * 3. find the last value but searching for ULONG_MAX and then using
555 	 * prev
556 	 */
557 	/* First, get the expected result. */
558 	mas_lock(&mas);
559 	mas_reset(&mas);
560 	mas.index = ULONG_MAX; /* start at max.. */
561 	entry = mas_find(&mas, ULONG_MAX);
562 	entry = mas_prev(&mas, 0);
563 	index = mas.index;
564 	last = mas.last;
565 
566 	/* Erase the last entry. */
567 	mas_reset(&mas);
568 	mas.index = ULONG_MAX;
569 	mas.last = ULONG_MAX;
570 	mas_erase(&mas);
571 
572 	/* Get the previous value from MAS_START */
573 	mas_reset(&mas);
574 	entry2 = mas_prev(&mas, 0);
575 
576 	/* Check results. */
577 	MT_BUG_ON(mt, entry != entry2);
578 	MT_BUG_ON(mt, index != mas.index);
579 	MT_BUG_ON(mt, last != mas.last);
580 
581 
582 	mas.node = MAS_NONE;
583 	mas.index = ULONG_MAX;
584 	mas.last = ULONG_MAX;
585 	entry2 = mas_prev(&mas, 0);
586 	MT_BUG_ON(mt, entry != entry2);
587 
588 	mas_set(&mas, 0);
589 	MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
590 
591 	mas_unlock(&mas);
592 	mtree_destroy(mt);
593 }
594 
595 static noinline void __init check_find_2(struct maple_tree *mt)
596 {
597 	unsigned long i, j;
598 	void *entry;
599 
600 	MA_STATE(mas, mt, 0, 0);
601 	rcu_read_lock();
602 	mas_for_each(&mas, entry, ULONG_MAX)
603 		MT_BUG_ON(mt, true);
604 	rcu_read_unlock();
605 
606 	for (i = 0; i < 256; i++) {
607 		mtree_insert_index(mt, i, GFP_KERNEL);
608 		j = 0;
609 		mas_set(&mas, 0);
610 		rcu_read_lock();
611 		mas_for_each(&mas, entry, ULONG_MAX) {
612 			MT_BUG_ON(mt, entry != xa_mk_value(j));
613 			j++;
614 		}
615 		rcu_read_unlock();
616 		MT_BUG_ON(mt, j != i + 1);
617 	}
618 
619 	for (i = 0; i < 256; i++) {
620 		mtree_erase_index(mt, i);
621 		j = i + 1;
622 		mas_set(&mas, 0);
623 		rcu_read_lock();
624 		mas_for_each(&mas, entry, ULONG_MAX) {
625 			if (xa_is_zero(entry))
626 				continue;
627 
628 			MT_BUG_ON(mt, entry != xa_mk_value(j));
629 			j++;
630 		}
631 		rcu_read_unlock();
632 		MT_BUG_ON(mt, j != 256);
633 	}
634 
635 	/*MT_BUG_ON(mt, !mtree_empty(mt)); */
636 }
637 
638 
639 #if defined(CONFIG_64BIT)
640 static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
641 {
642 	/*
643 	 * Generated by:
644 	 * cat /proc/self/maps | awk '{print $1}'|
645 	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
646 	 */
647 
648 	static const unsigned long range[] = {
649 	/*      Inclusive     , Exclusive. */
650 		0x565234af2000, 0x565234af4000,
651 		0x565234af4000, 0x565234af9000,
652 		0x565234af9000, 0x565234afb000,
653 		0x565234afc000, 0x565234afd000,
654 		0x565234afd000, 0x565234afe000,
655 		0x565235def000, 0x565235e10000,
656 		0x7f36d4bfd000, 0x7f36d4ee2000,
657 		0x7f36d4ee2000, 0x7f36d4f04000,
658 		0x7f36d4f04000, 0x7f36d504c000,
659 		0x7f36d504c000, 0x7f36d5098000,
660 		0x7f36d5098000, 0x7f36d5099000,
661 		0x7f36d5099000, 0x7f36d509d000,
662 		0x7f36d509d000, 0x7f36d509f000,
663 		0x7f36d509f000, 0x7f36d50a5000,
664 		0x7f36d50b9000, 0x7f36d50db000,
665 		0x7f36d50db000, 0x7f36d50dc000,
666 		0x7f36d50dc000, 0x7f36d50fa000,
667 		0x7f36d50fa000, 0x7f36d5102000,
668 		0x7f36d5102000, 0x7f36d5103000,
669 		0x7f36d5103000, 0x7f36d5104000,
670 		0x7f36d5104000, 0x7f36d5105000,
671 		0x7fff5876b000, 0x7fff5878d000,
672 		0x7fff5878e000, 0x7fff58791000,
673 		0x7fff58791000, 0x7fff58793000,
674 	};
675 
676 	static const unsigned long holes[] = {
677 		/*
678 		 * Note: start of hole is INCLUSIVE
679 		 *        end of hole is EXCLUSIVE
680 		 *        (opposite of the above table.)
681 		 * Start of hole, end of hole,  size of hole (+1)
682 		 */
683 		0x565234afb000, 0x565234afc000, 0x1000,
684 		0x565234afe000, 0x565235def000, 0x12F1000,
685 		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
686 	};
687 
688 	/*
689 	 * req_range consists of 4 values.
690 	 * 1. min index
691 	 * 2. max index
692 	 * 3. size
693 	 * 4. number that should be returned.
694 	 * 5. return value
695 	 */
696 	static const unsigned long req_range[] = {
697 		0x565234af9000, /* Min */
698 		0x7fff58791000, /* Max */
699 		0x1000,         /* Size */
700 		0x7fff5878d << 12,  /* First rev hole of size 0x1000 */
701 		0,              /* Return value success. */
702 
703 		0x0,            /* Min */
704 		0x565234AF0 << 12,    /* Max */
705 		0x3000,         /* Size */
706 		0x565234AEE << 12,  /* max - 3. */
707 		0,              /* Return value success. */
708 
709 		0x0,            /* Min */
710 		-1,             /* Max */
711 		0x1000,         /* Size */
712 		562949953421311 << 12,/* First rev hole of size 0x1000 */
713 		0,              /* Return value success. */
714 
715 		0x0,            /* Min */
716 		0x7F36D5109 << 12,    /* Max */
717 		0x4000,         /* Size */
718 		0x7F36D5106 << 12,    /* First rev hole of size 0x4000 */
719 		0,              /* Return value success. */
720 
721 		/* Ascend test. */
722 		0x0,
723 		34148798628 << 12,
724 		19 << 12,
725 		34148797418 << 12,
726 		0x0,
727 
728 		/* Too big test. */
729 		0x0,
730 		18446744073709551615UL,
731 		562915594369134UL << 12,
732 		0x0,
733 		-EBUSY,
734 
735 		/* Single space test. */
736 		34148798725 << 12,
737 		34148798725 << 12,
738 		1 << 12,
739 		34148798725 << 12,
740 		0,
741 	};
742 
743 	int i, range_count = ARRAY_SIZE(range);
744 	int req_range_count = ARRAY_SIZE(req_range);
745 	unsigned long min = 0;
746 
747 	MA_STATE(mas, mt, 0, 0);
748 
749 	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
750 			  GFP_KERNEL);
751 #define DEBUG_REV_RANGE 0
752 	for (i = 0; i < range_count; i += 2) {
753 		/* Inclusive, Inclusive (with the -1) */
754 
755 #if DEBUG_REV_RANGE
756 		pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
757 				(range[i + 1] >> 12) - 1);
758 #endif
759 		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
760 				xa_mk_value(range[i] >> 12), 0);
761 		mt_validate(mt);
762 	}
763 
764 
765 	mas_lock(&mas);
766 	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
767 #if DEBUG_REV_RANGE
768 		pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
769 				min, holes[i+1]>>12, holes[i+2]>>12,
770 				holes[i] >> 12);
771 #endif
772 		MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
773 					holes[i+1] >> 12,
774 					holes[i+2] >> 12));
775 #if DEBUG_REV_RANGE
776 		pr_debug("Found %lu %lu\n", mas.index, mas.last);
777 		pr_debug("gap %lu %lu\n", (holes[i] >> 12),
778 				(holes[i+1] >> 12));
779 #endif
780 		MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
781 		MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
782 		min = holes[i+1] >> 12;
783 		mas_reset(&mas);
784 	}
785 
786 	mas_unlock(&mas);
787 	for (i = 0; i < req_range_count; i += 5) {
788 #if DEBUG_REV_RANGE
789 		pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
790 				i, req_range[i] >> 12,
791 				(req_range[i + 1] >> 12),
792 				req_range[i+2] >> 12,
793 				req_range[i+3] >> 12);
794 #endif
795 		check_mtree_alloc_rrange(mt,
796 				req_range[i]   >> 12, /* start */
797 				req_range[i+1] >> 12, /* end */
798 				req_range[i+2] >> 12, /* size */
799 				req_range[i+3] >> 12, /* expected address */
800 				req_range[i+4],       /* expected return */
801 				xa_mk_value(req_range[i] >> 12)); /* pointer */
802 		mt_validate(mt);
803 	}
804 
805 	mt_set_non_kernel(1);
806 	mtree_erase(mt, 34148798727); /* create a deleted range. */
807 	mtree_erase(mt, 34148798725);
808 	check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
809 			34148798725, 0, mt);
810 
811 	mtree_destroy(mt);
812 }
813 
814 static noinline void __init check_alloc_range(struct maple_tree *mt)
815 {
816 	/*
817 	 * Generated by:
818 	 * cat /proc/self/maps|awk '{print $1}'|
819 	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
820 	 */
821 
822 	static const unsigned long range[] = {
823 	/*      Inclusive     , Exclusive. */
824 		0x565234af2000, 0x565234af4000,
825 		0x565234af4000, 0x565234af9000,
826 		0x565234af9000, 0x565234afb000,
827 		0x565234afc000, 0x565234afd000,
828 		0x565234afd000, 0x565234afe000,
829 		0x565235def000, 0x565235e10000,
830 		0x7f36d4bfd000, 0x7f36d4ee2000,
831 		0x7f36d4ee2000, 0x7f36d4f04000,
832 		0x7f36d4f04000, 0x7f36d504c000,
833 		0x7f36d504c000, 0x7f36d5098000,
834 		0x7f36d5098000, 0x7f36d5099000,
835 		0x7f36d5099000, 0x7f36d509d000,
836 		0x7f36d509d000, 0x7f36d509f000,
837 		0x7f36d509f000, 0x7f36d50a5000,
838 		0x7f36d50b9000, 0x7f36d50db000,
839 		0x7f36d50db000, 0x7f36d50dc000,
840 		0x7f36d50dc000, 0x7f36d50fa000,
841 		0x7f36d50fa000, 0x7f36d5102000,
842 		0x7f36d5102000, 0x7f36d5103000,
843 		0x7f36d5103000, 0x7f36d5104000,
844 		0x7f36d5104000, 0x7f36d5105000,
845 		0x7fff5876b000, 0x7fff5878d000,
846 		0x7fff5878e000, 0x7fff58791000,
847 		0x7fff58791000, 0x7fff58793000,
848 	};
849 	static const unsigned long holes[] = {
850 		/* Start of hole, end of hole,  size of hole (+1) */
851 		0x565234afb000, 0x565234afc000, 0x1000,
852 		0x565234afe000, 0x565235def000, 0x12F1000,
853 		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
854 	};
855 
856 	/*
857 	 * req_range consists of 4 values.
858 	 * 1. min index
859 	 * 2. max index
860 	 * 3. size
861 	 * 4. number that should be returned.
862 	 * 5. return value
863 	 */
864 	static const unsigned long req_range[] = {
865 		0x565234af9000, /* Min */
866 		0x7fff58791000, /* Max */
867 		0x1000,         /* Size */
868 		0x565234afb000, /* First hole in our data of size 1000. */
869 		0,              /* Return value success. */
870 
871 		0x0,            /* Min */
872 		0x7fff58791000, /* Max */
873 		0x1F00,         /* Size */
874 		0x0,            /* First hole in our data of size 2000. */
875 		0,              /* Return value success. */
876 
877 		/* Test ascend. */
878 		34148797436 << 12, /* Min */
879 		0x7fff587AF000,    /* Max */
880 		0x3000,         /* Size */
881 		34148798629 << 12,             /* Expected location */
882 		0,              /* Return value success. */
883 
884 		/* Test failing. */
885 		34148798623 << 12,  /* Min */
886 		34148798683 << 12,  /* Max */
887 		0x15000,            /* Size */
888 		0,             /* Expected location */
889 		-EBUSY,              /* Return value failed. */
890 
891 		/* Test filling entire gap. */
892 		34148798623 << 12,  /* Min */
893 		0x7fff587AF000,    /* Max */
894 		0x10000,           /* Size */
895 		34148798632 << 12,             /* Expected location */
896 		0,              /* Return value success. */
897 
898 		/* Test walking off the end of root. */
899 		0,                  /* Min */
900 		-1,                 /* Max */
901 		-1,                 /* Size */
902 		0,                  /* Expected location */
903 		-EBUSY,             /* Return value failure. */
904 
905 		/* Test looking for too large a hole across entire range. */
906 		0,                  /* Min */
907 		-1,                 /* Max */
908 		4503599618982063UL << 12,  /* Size */
909 		34359052178 << 12,  /* Expected location */
910 		-EBUSY,             /* Return failure. */
911 
912 		/* Test a single entry */
913 		34148798648 << 12,		/* Min */
914 		34148798648 << 12,		/* Max */
915 		4096,			/* Size of 1 */
916 		34148798648 << 12,	/* Location is the same as min/max */
917 		0,			/* Success */
918 	};
919 	int i, range_count = ARRAY_SIZE(range);
920 	int req_range_count = ARRAY_SIZE(req_range);
921 	unsigned long min = 0x565234af2000;
922 	MA_STATE(mas, mt, 0, 0);
923 
924 	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
925 			  GFP_KERNEL);
926 	for (i = 0; i < range_count; i += 2) {
927 #define DEBUG_ALLOC_RANGE 0
928 #if DEBUG_ALLOC_RANGE
929 		pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
930 			 (range[i + 1] >> 12) - 1);
931 		mt_dump(mt, mt_dump_hex);
932 #endif
933 		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
934 				xa_mk_value(range[i] >> 12), 0);
935 		mt_validate(mt);
936 	}
937 
938 
939 
940 	mas_lock(&mas);
941 	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
942 
943 #if DEBUG_ALLOC_RANGE
944 		pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
945 			holes[i+1] >> 12, holes[i+2] >> 12,
946 			min, holes[i+1]);
947 #endif
948 		MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
949 					holes[i+1] >> 12,
950 					holes[i+2] >> 12));
951 		MT_BUG_ON(mt, mas.index != holes[i] >> 12);
952 		min = holes[i+1];
953 		mas_reset(&mas);
954 	}
955 	mas_unlock(&mas);
956 	for (i = 0; i < req_range_count; i += 5) {
957 #if DEBUG_ALLOC_RANGE
958 		pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
959 			 i/5, req_range[i]   >> 12, req_range[i + 1]   >> 12,
960 			 req_range[i + 2]   >> 12, req_range[i + 3]   >> 12,
961 			 req_range[i], req_range[i+1]);
962 #endif
963 		check_mtree_alloc_range(mt,
964 				req_range[i]   >> 12, /* start */
965 				req_range[i+1] >> 12, /* end */
966 				req_range[i+2] >> 12, /* size */
967 				req_range[i+3] >> 12, /* expected address */
968 				req_range[i+4],       /* expected return */
969 				xa_mk_value(req_range[i] >> 12)); /* pointer */
970 		mt_validate(mt);
971 #if DEBUG_ALLOC_RANGE
972 		mt_dump(mt, mt_dump_hex);
973 #endif
974 	}
975 
976 	mtree_destroy(mt);
977 }
978 #endif
979 
980 static noinline void __init check_ranges(struct maple_tree *mt)
981 {
982 	int i, val, val2;
983 	static const unsigned long r[] = {
984 		10, 15,
985 		20, 25,
986 		17, 22, /* Overlaps previous range. */
987 		9, 1000, /* Huge. */
988 		100, 200,
989 		45, 168,
990 		118, 128,
991 			};
992 
993 	MT_BUG_ON(mt, !mtree_empty(mt));
994 	check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
995 	check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
996 	check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
997 	MT_BUG_ON(mt, !mt_height(mt));
998 	/* Store */
999 	check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1000 	check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1001 	check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1002 	MT_BUG_ON(mt, !mt_height(mt));
1003 	mtree_destroy(mt);
1004 	MT_BUG_ON(mt, mt_height(mt));
1005 
1006 	check_seq(mt, 50, false);
1007 	mt_set_non_kernel(4);
1008 	check_store_range(mt, 5, 47,  xa_mk_value(47), 0);
1009 	MT_BUG_ON(mt, !mt_height(mt));
1010 	mtree_destroy(mt);
1011 
1012 	/* Create tree of 1-100 */
1013 	check_seq(mt, 100, false);
1014 	/* Store 45-168 */
1015 	mt_set_non_kernel(10);
1016 	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1017 	MT_BUG_ON(mt, !mt_height(mt));
1018 	mtree_destroy(mt);
1019 
1020 	/* Create tree of 1-200 */
1021 	check_seq(mt, 200, false);
1022 	/* Store 45-168 */
1023 	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1024 	MT_BUG_ON(mt, !mt_height(mt));
1025 	mtree_destroy(mt);
1026 
1027 	check_seq(mt, 30, false);
1028 	check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1029 	MT_BUG_ON(mt, !mt_height(mt));
1030 	mtree_destroy(mt);
1031 
1032 	/* Overwrite across multiple levels. */
1033 	/* Create tree of 1-400 */
1034 	check_seq(mt, 400, false);
1035 	mt_set_non_kernel(50);
1036 	/* Store 118-128 */
1037 	check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1038 	mt_set_non_kernel(50);
1039 	mtree_test_erase(mt, 140);
1040 	mtree_test_erase(mt, 141);
1041 	mtree_test_erase(mt, 142);
1042 	mtree_test_erase(mt, 143);
1043 	mtree_test_erase(mt, 130);
1044 	mtree_test_erase(mt, 131);
1045 	mtree_test_erase(mt, 132);
1046 	mtree_test_erase(mt, 133);
1047 	mtree_test_erase(mt, 134);
1048 	mtree_test_erase(mt, 135);
1049 	check_load(mt, r[12], xa_mk_value(r[12]));
1050 	check_load(mt, r[13], xa_mk_value(r[12]));
1051 	check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1052 	check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1053 	check_load(mt, 135, NULL);
1054 	check_load(mt, 140, NULL);
1055 	mt_set_non_kernel(0);
1056 	MT_BUG_ON(mt, !mt_height(mt));
1057 	mtree_destroy(mt);
1058 
1059 
1060 
1061 	/* Overwrite multiple levels at the end of the tree (slot 7) */
1062 	mt_set_non_kernel(50);
1063 	check_seq(mt, 400, false);
1064 	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1065 	check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1066 
1067 	check_load(mt, 346, xa_mk_value(346));
1068 	for (i = 347; i <= 352; i++)
1069 		check_load(mt, i, xa_mk_value(347));
1070 	for (i = 353; i <= 361; i++)
1071 		check_load(mt, i, xa_mk_value(353));
1072 	check_load(mt, 362, xa_mk_value(362));
1073 	mt_set_non_kernel(0);
1074 	MT_BUG_ON(mt, !mt_height(mt));
1075 	mtree_destroy(mt);
1076 
1077 	mt_set_non_kernel(50);
1078 	check_seq(mt, 400, false);
1079 	check_store_range(mt, 352, 364, NULL, 0);
1080 	check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1081 	check_load(mt, 350, xa_mk_value(350));
1082 	check_load(mt, 351, xa_mk_value(352));
1083 	for (i = 352; i <= 363; i++)
1084 		check_load(mt, i, xa_mk_value(352));
1085 	check_load(mt, 364, NULL);
1086 	check_load(mt, 365, xa_mk_value(365));
1087 	mt_set_non_kernel(0);
1088 	MT_BUG_ON(mt, !mt_height(mt));
1089 	mtree_destroy(mt);
1090 
1091 	mt_set_non_kernel(5);
1092 	check_seq(mt, 400, false);
1093 	check_store_range(mt, 352, 364, NULL, 0);
1094 	check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1095 	check_load(mt, 350, xa_mk_value(350));
1096 	check_load(mt, 351, xa_mk_value(352));
1097 	for (i = 352; i <= 364; i++)
1098 		check_load(mt, i, xa_mk_value(352));
1099 	check_load(mt, 365, xa_mk_value(365));
1100 	mt_set_non_kernel(0);
1101 	MT_BUG_ON(mt, !mt_height(mt));
1102 	mtree_destroy(mt);
1103 
1104 
1105 	mt_set_non_kernel(50);
1106 	check_seq(mt, 400, false);
1107 	check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1108 	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1109 	mt_set_non_kernel(0);
1110 	mt_validate(mt);
1111 	MT_BUG_ON(mt, !mt_height(mt));
1112 	mtree_destroy(mt);
1113 	/*
1114 	 * Interesting cases:
1115 	 * 1. Overwrite the end of a node and end in the first entry of the next
1116 	 * node.
1117 	 * 2. Split a single range
1118 	 * 3. Overwrite the start of a range
1119 	 * 4. Overwrite the end of a range
1120 	 * 5. Overwrite the entire range
1121 	 * 6. Overwrite a range that causes multiple parent nodes to be
1122 	 * combined
1123 	 * 7. Overwrite a range that causes multiple parent nodes and part of
1124 	 * root to be combined
1125 	 * 8. Overwrite the whole tree
1126 	 * 9. Try to overwrite the zero entry of an alloc tree.
1127 	 * 10. Write a range larger than a nodes current pivot
1128 	 */
1129 
1130 	mt_set_non_kernel(50);
1131 	for (i = 0; i <= 500; i++) {
1132 		val = i*5;
1133 		val2 = (i+1)*5;
1134 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1135 	}
1136 	check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1137 	check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1138 	check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1139 	check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1140 	check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1141 	mtree_destroy(mt);
1142 	mt_set_non_kernel(0);
1143 
1144 	mt_set_non_kernel(50);
1145 	for (i = 0; i <= 500; i++) {
1146 		val = i*5;
1147 		val2 = (i+1)*5;
1148 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1149 	}
1150 	check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1151 	check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1152 	check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1153 	check_store_range(mt, 2460, 2470, NULL, 0);
1154 	check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1155 	check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1156 	mt_set_non_kernel(0);
1157 	MT_BUG_ON(mt, !mt_height(mt));
1158 	mtree_destroy(mt);
1159 
1160 	/* Check in-place modifications */
1161 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1162 	/* Append to the start of last range */
1163 	mt_set_non_kernel(50);
1164 	for (i = 0; i <= 500; i++) {
1165 		val = i * 5 + 1;
1166 		val2 = val + 4;
1167 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1168 	}
1169 
1170 	/* Append to the last range without touching any boundaries */
1171 	for (i = 0; i < 10; i++) {
1172 		val = val2 + 5;
1173 		val2 = val + 4;
1174 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1175 	}
1176 
1177 	/* Append to the end of last range */
1178 	val = val2;
1179 	for (i = 0; i < 10; i++) {
1180 		val += 5;
1181 		MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1182 						     xa_mk_value(val)) != 0);
1183 	}
1184 
1185 	/* Overwriting the range and over a part of the next range */
1186 	for (i = 10; i < 30; i += 2) {
1187 		val = i * 5 + 1;
1188 		val2 = val + 5;
1189 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1190 	}
1191 
1192 	/* Overwriting a part of the range and over the next range */
1193 	for (i = 50; i < 70; i += 2) {
1194 		val2 = i * 5;
1195 		val = val2 - 5;
1196 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1197 	}
1198 
1199 	/*
1200 	 * Expand the range, only partially overwriting the previous and
1201 	 * next ranges
1202 	 */
1203 	for (i = 100; i < 130; i += 3) {
1204 		val = i * 5 - 5;
1205 		val2 = i * 5 + 1;
1206 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1207 	}
1208 
1209 	/*
1210 	 * Expand the range, only partially overwriting the previous and
1211 	 * next ranges, in RCU mode
1212 	 */
1213 	mt_set_in_rcu(mt);
1214 	for (i = 150; i < 180; i += 3) {
1215 		val = i * 5 - 5;
1216 		val2 = i * 5 + 1;
1217 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1218 	}
1219 
1220 	MT_BUG_ON(mt, !mt_height(mt));
1221 	mt_validate(mt);
1222 	mt_set_non_kernel(0);
1223 	mtree_destroy(mt);
1224 
1225 	/* Test rebalance gaps */
1226 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1227 	mt_set_non_kernel(50);
1228 	for (i = 0; i <= 50; i++) {
1229 		val = i*10;
1230 		val2 = (i+1)*10;
1231 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1232 	}
1233 	check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1234 	check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1235 	check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1236 	check_store_range(mt, 240, 249, NULL, 0);
1237 	mtree_erase(mt, 200);
1238 	mtree_erase(mt, 210);
1239 	mtree_erase(mt, 220);
1240 	mtree_erase(mt, 230);
1241 	mt_set_non_kernel(0);
1242 	MT_BUG_ON(mt, !mt_height(mt));
1243 	mtree_destroy(mt);
1244 
1245 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1246 	for (i = 0; i <= 500; i++) {
1247 		val = i*10;
1248 		val2 = (i+1)*10;
1249 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1250 	}
1251 	check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1252 	mt_validate(mt);
1253 	MT_BUG_ON(mt, !mt_height(mt));
1254 	mtree_destroy(mt);
1255 
1256 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1257 	for (i = 0; i <= 500; i++) {
1258 		val = i*10;
1259 		val2 = (i+1)*10;
1260 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1261 	}
1262 	check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1263 	check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1264 	check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1265 	check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1266 	check_store_range(mt, 4842, 4849, NULL, 0);
1267 	mt_validate(mt);
1268 	MT_BUG_ON(mt, !mt_height(mt));
1269 	mtree_destroy(mt);
1270 
1271 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1272 	for (i = 0; i <= 1300; i++) {
1273 		val = i*10;
1274 		val2 = (i+1)*10;
1275 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1276 		MT_BUG_ON(mt, mt_height(mt) >= 4);
1277 	}
1278 	/*  Cause a 3 child split all the way up the tree. */
1279 	for (i = 5; i < 215; i += 10)
1280 		check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1281 	for (i = 5; i < 65; i += 10)
1282 		check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1283 
1284 	MT_BUG_ON(mt, mt_height(mt) >= 4);
1285 	for (i = 5; i < 45; i += 10)
1286 		check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1287 	if (!MAPLE_32BIT)
1288 		MT_BUG_ON(mt, mt_height(mt) < 4);
1289 	mtree_destroy(mt);
1290 
1291 
1292 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1293 	for (i = 0; i <= 1200; i++) {
1294 		val = i*10;
1295 		val2 = (i+1)*10;
1296 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1297 		MT_BUG_ON(mt, mt_height(mt) >= 4);
1298 	}
1299 	/* Fill parents and leaves before split. */
1300 	for (i = 5; i < 455; i += 10)
1301 		check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
1302 
1303 	for (i = 1; i < 16; i++)
1304 		check_store_range(mt, 8185 + i, 8185 + i + 1,
1305 				  xa_mk_value(8185+i), 0);
1306 	MT_BUG_ON(mt, mt_height(mt) >= 4);
1307 	/* triple split across multiple levels. */
1308 	check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1309 	if (!MAPLE_32BIT)
1310 		MT_BUG_ON(mt, mt_height(mt) != 4);
1311 }
1312 
1313 static noinline void __init check_next_entry(struct maple_tree *mt)
1314 {
1315 	void *entry = NULL;
1316 	unsigned long limit = 30, i = 0;
1317 	MA_STATE(mas, mt, i, i);
1318 
1319 	MT_BUG_ON(mt, !mtree_empty(mt));
1320 
1321 	check_seq(mt, limit, false);
1322 	rcu_read_lock();
1323 
1324 	/* Check the first one and get ma_state in the correct state. */
1325 	MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1326 	for ( ; i <= limit + 1; i++) {
1327 		entry = mas_next(&mas, limit);
1328 		if (i > limit)
1329 			MT_BUG_ON(mt, entry != NULL);
1330 		else
1331 			MT_BUG_ON(mt, xa_mk_value(i) != entry);
1332 	}
1333 	rcu_read_unlock();
1334 	mtree_destroy(mt);
1335 }
1336 
1337 static noinline void __init check_prev_entry(struct maple_tree *mt)
1338 {
1339 	unsigned long index = 16;
1340 	void *value;
1341 	int i;
1342 
1343 	MA_STATE(mas, mt, index, index);
1344 
1345 	MT_BUG_ON(mt, !mtree_empty(mt));
1346 	check_seq(mt, 30, false);
1347 
1348 	rcu_read_lock();
1349 	value = mas_find(&mas, ULONG_MAX);
1350 	MT_BUG_ON(mt, value != xa_mk_value(index));
1351 	value = mas_prev(&mas, 0);
1352 	MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1353 	rcu_read_unlock();
1354 	mtree_destroy(mt);
1355 
1356 	/* Check limits on prev */
1357 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1358 	mas_lock(&mas);
1359 	for (i = 0; i <= index; i++) {
1360 		mas_set_range(&mas, i*10, i*10+5);
1361 		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1362 	}
1363 
1364 	mas_set(&mas, 20);
1365 	value = mas_walk(&mas);
1366 	MT_BUG_ON(mt, value != xa_mk_value(2));
1367 
1368 	value = mas_prev(&mas, 19);
1369 	MT_BUG_ON(mt, value != NULL);
1370 
1371 	mas_set(&mas, 80);
1372 	value = mas_walk(&mas);
1373 	MT_BUG_ON(mt, value != xa_mk_value(8));
1374 
1375 	value = mas_prev(&mas, 76);
1376 	MT_BUG_ON(mt, value != NULL);
1377 
1378 	mas_unlock(&mas);
1379 }
1380 
1381 static noinline void __init check_root_expand(struct maple_tree *mt)
1382 {
1383 	MA_STATE(mas, mt, 0, 0);
1384 	void *ptr;
1385 
1386 
1387 	mas_lock(&mas);
1388 	mas_set(&mas, 3);
1389 	ptr = mas_walk(&mas);
1390 	MT_BUG_ON(mt, mas.index != 0);
1391 	MT_BUG_ON(mt, ptr != NULL);
1392 	MT_BUG_ON(mt, mas.index != 0);
1393 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1394 
1395 	ptr = &check_prev_entry;
1396 	mas_set(&mas, 1);
1397 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1398 
1399 	mas_set(&mas, 0);
1400 	ptr = mas_walk(&mas);
1401 	MT_BUG_ON(mt, ptr != NULL);
1402 
1403 	mas_set(&mas, 1);
1404 	ptr = mas_walk(&mas);
1405 	MT_BUG_ON(mt, ptr != &check_prev_entry);
1406 
1407 	mas_set(&mas, 2);
1408 	ptr = mas_walk(&mas);
1409 	MT_BUG_ON(mt, ptr != NULL);
1410 	mas_unlock(&mas);
1411 	mtree_destroy(mt);
1412 
1413 
1414 	mt_init_flags(mt, 0);
1415 	mas_lock(&mas);
1416 
1417 	mas_set(&mas, 0);
1418 	ptr = &check_prev_entry;
1419 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1420 
1421 	mas_set(&mas, 5);
1422 	ptr = mas_walk(&mas);
1423 	MT_BUG_ON(mt, ptr != NULL);
1424 	MT_BUG_ON(mt, mas.index != 1);
1425 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1426 
1427 	mas_set_range(&mas, 0, 100);
1428 	ptr = mas_walk(&mas);
1429 	MT_BUG_ON(mt, ptr != &check_prev_entry);
1430 	MT_BUG_ON(mt, mas.last != 0);
1431 	mas_unlock(&mas);
1432 	mtree_destroy(mt);
1433 
1434 	mt_init_flags(mt, 0);
1435 	mas_lock(&mas);
1436 
1437 	mas_set(&mas, 0);
1438 	ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1439 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1440 	ptr = mas_next(&mas, ULONG_MAX);
1441 	MT_BUG_ON(mt, ptr != NULL);
1442 	MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1443 
1444 	mas_set(&mas, 1);
1445 	ptr = mas_prev(&mas, 0);
1446 	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1447 	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1448 
1449 	mas_unlock(&mas);
1450 
1451 	mtree_destroy(mt);
1452 
1453 	mt_init_flags(mt, 0);
1454 	mas_lock(&mas);
1455 	mas_set(&mas, 0);
1456 	ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1457 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1458 	ptr = mas_next(&mas, ULONG_MAX);
1459 	MT_BUG_ON(mt, ptr != NULL);
1460 	MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
1461 
1462 	mas_set(&mas, 1);
1463 	ptr = mas_prev(&mas, 0);
1464 	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1465 	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1466 
1467 
1468 	mas_unlock(&mas);
1469 }
1470 
1471 static noinline void __init check_gap_combining(struct maple_tree *mt)
1472 {
1473 	struct maple_enode *mn1, *mn2;
1474 	void *entry;
1475 	unsigned long singletons = 100;
1476 	static const unsigned long *seq100;
1477 	static const unsigned long seq100_64[] = {
1478 		/* 0-5 */
1479 		74, 75, 76,
1480 		50, 100, 2,
1481 
1482 		/* 6-12 */
1483 		44, 45, 46, 43,
1484 		20, 50, 3,
1485 
1486 		/* 13-20*/
1487 		80, 81, 82,
1488 		76, 2, 79, 85, 4,
1489 	};
1490 
1491 	static const unsigned long seq100_32[] = {
1492 		/* 0-5 */
1493 		61, 62, 63,
1494 		50, 100, 2,
1495 
1496 		/* 6-12 */
1497 		31, 32, 33, 30,
1498 		20, 50, 3,
1499 
1500 		/* 13-20*/
1501 		80, 81, 82,
1502 		76, 2, 79, 85, 4,
1503 	};
1504 
1505 	static const unsigned long seq2000[] = {
1506 		1152, 1151,
1507 		1100, 1200, 2,
1508 	};
1509 	static const unsigned long seq400[] = {
1510 		286, 318,
1511 		256, 260, 266, 270, 275, 280, 290, 398,
1512 		286, 310,
1513 	};
1514 
1515 	unsigned long index;
1516 
1517 	MA_STATE(mas, mt, 0, 0);
1518 
1519 	if (MAPLE_32BIT)
1520 		seq100 = seq100_32;
1521 	else
1522 		seq100 = seq100_64;
1523 
1524 	index = seq100[0];
1525 	mas_set(&mas, index);
1526 	MT_BUG_ON(mt, !mtree_empty(mt));
1527 	check_seq(mt, singletons, false); /* create 100 singletons. */
1528 
1529 	mt_set_non_kernel(1);
1530 	mtree_test_erase(mt, seq100[2]);
1531 	check_load(mt, seq100[2], NULL);
1532 	mtree_test_erase(mt, seq100[1]);
1533 	check_load(mt, seq100[1], NULL);
1534 
1535 	rcu_read_lock();
1536 	entry = mas_find(&mas, ULONG_MAX);
1537 	MT_BUG_ON(mt, entry != xa_mk_value(index));
1538 	mn1 = mas.node;
1539 	mas_next(&mas, ULONG_MAX);
1540 	entry = mas_next(&mas, ULONG_MAX);
1541 	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1542 	mn2 = mas.node;
1543 	MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1544 
1545 	/*
1546 	 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1547 	 * seq100[4]. Search for the gap.
1548 	 */
1549 	mt_set_non_kernel(1);
1550 	mas_reset(&mas);
1551 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1552 					     seq100[5]));
1553 	MT_BUG_ON(mt, mas.index != index + 1);
1554 	rcu_read_unlock();
1555 
1556 	mtree_test_erase(mt, seq100[6]);
1557 	check_load(mt, seq100[6], NULL);
1558 	mtree_test_erase(mt, seq100[7]);
1559 	check_load(mt, seq100[7], NULL);
1560 	mtree_test_erase(mt, seq100[8]);
1561 	index = seq100[9];
1562 
1563 	rcu_read_lock();
1564 	mas.index = index;
1565 	mas.last = index;
1566 	mas_reset(&mas);
1567 	entry = mas_find(&mas, ULONG_MAX);
1568 	MT_BUG_ON(mt, entry != xa_mk_value(index));
1569 	mn1 = mas.node;
1570 	entry = mas_next(&mas, ULONG_MAX);
1571 	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1572 	mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1573 	mn2 = mas.node;
1574 	MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1575 
1576 	/*
1577 	 * At this point, there is a gap of 3 at seq100[6].  Find it by
1578 	 * searching 20 - 50 for size 3.
1579 	 */
1580 	mas_reset(&mas);
1581 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1582 					     seq100[12]));
1583 	MT_BUG_ON(mt, mas.index != seq100[6]);
1584 	rcu_read_unlock();
1585 
1586 	mt_set_non_kernel(1);
1587 	mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1588 	check_load(mt, seq100[13], NULL);
1589 	check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1590 	mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1591 	check_load(mt, seq100[13], NULL);
1592 	check_load(mt, seq100[14], NULL);
1593 
1594 	mas_reset(&mas);
1595 	rcu_read_lock();
1596 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1597 					     seq100[17]));
1598 	MT_BUG_ON(mt, mas.index != seq100[13]);
1599 	mt_validate(mt);
1600 	rcu_read_unlock();
1601 
1602 	/*
1603 	 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1604 	 * gap.
1605 	 */
1606 	mt_set_non_kernel(2);
1607 	mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1608 	mtree_test_erase(mt, seq100[15]);
1609 	mas_reset(&mas);
1610 	rcu_read_lock();
1611 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1612 					     seq100[20]));
1613 	rcu_read_unlock();
1614 	MT_BUG_ON(mt, mas.index != seq100[18]);
1615 	mt_validate(mt);
1616 	mtree_destroy(mt);
1617 
1618 	/* seq 2000 tests are for multi-level tree gaps */
1619 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1620 	check_seq(mt, 2000, false);
1621 	mt_set_non_kernel(1);
1622 	mtree_test_erase(mt, seq2000[0]);
1623 	mtree_test_erase(mt, seq2000[1]);
1624 
1625 	mt_set_non_kernel(2);
1626 	mas_reset(&mas);
1627 	rcu_read_lock();
1628 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1629 					     seq2000[4]));
1630 	MT_BUG_ON(mt, mas.index != seq2000[1]);
1631 	rcu_read_unlock();
1632 	mt_validate(mt);
1633 	mtree_destroy(mt);
1634 
1635 	/* seq 400 tests rebalancing over two levels. */
1636 	mt_set_non_kernel(99);
1637 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1638 	check_seq(mt, 400, false);
1639 	mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1640 	mt_set_non_kernel(0);
1641 	mtree_destroy(mt);
1642 
1643 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1644 	check_seq(mt, 400, false);
1645 	mt_set_non_kernel(50);
1646 	mtree_test_store_range(mt, seq400[2], seq400[9],
1647 			       xa_mk_value(seq400[2]));
1648 	mtree_test_store_range(mt, seq400[3], seq400[9],
1649 			       xa_mk_value(seq400[3]));
1650 	mtree_test_store_range(mt, seq400[4], seq400[9],
1651 			       xa_mk_value(seq400[4]));
1652 	mtree_test_store_range(mt, seq400[5], seq400[9],
1653 			       xa_mk_value(seq400[5]));
1654 	mtree_test_store_range(mt, seq400[0], seq400[9],
1655 			       xa_mk_value(seq400[0]));
1656 	mtree_test_store_range(mt, seq400[6], seq400[9],
1657 			       xa_mk_value(seq400[6]));
1658 	mtree_test_store_range(mt, seq400[7], seq400[9],
1659 			       xa_mk_value(seq400[7]));
1660 	mtree_test_store_range(mt, seq400[8], seq400[9],
1661 			       xa_mk_value(seq400[8]));
1662 	mtree_test_store_range(mt, seq400[10], seq400[11],
1663 			       xa_mk_value(seq400[10]));
1664 	mt_validate(mt);
1665 	mt_set_non_kernel(0);
1666 	mtree_destroy(mt);
1667 }
1668 static noinline void __init check_node_overwrite(struct maple_tree *mt)
1669 {
1670 	int i, max = 4000;
1671 
1672 	for (i = 0; i < max; i++)
1673 		mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1674 
1675 	mtree_test_store_range(mt, 319951, 367950, NULL);
1676 	/*mt_dump(mt, mt_dump_dec); */
1677 	mt_validate(mt);
1678 }
1679 
1680 #if defined(BENCH_SLOT_STORE)
1681 static noinline void __init bench_slot_store(struct maple_tree *mt)
1682 {
1683 	int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1684 
1685 	for (i = 0; i < max; i += 10)
1686 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1687 
1688 	for (i = 0; i < count; i++) {
1689 		mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1690 		mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1691 				  GFP_KERNEL);
1692 	}
1693 }
1694 #endif
1695 
1696 #if defined(BENCH_NODE_STORE)
1697 static noinline void __init bench_node_store(struct maple_tree *mt)
1698 {
1699 	int i, overwrite = 76, max = 240, count = 20000000;
1700 
1701 	for (i = 0; i < max; i += 10)
1702 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1703 
1704 	for (i = 0; i < count; i++) {
1705 		mtree_store_range(mt, overwrite,  overwrite + 15,
1706 				  xa_mk_value(overwrite), GFP_KERNEL);
1707 
1708 		overwrite += 5;
1709 		if (overwrite >= 135)
1710 			overwrite = 76;
1711 	}
1712 }
1713 #endif
1714 
1715 #if defined(BENCH_AWALK)
1716 static noinline void __init bench_awalk(struct maple_tree *mt)
1717 {
1718 	int i, max = 2500, count = 50000000;
1719 	MA_STATE(mas, mt, 1470, 1470);
1720 
1721 	for (i = 0; i < max; i += 10)
1722 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1723 
1724 	mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1725 
1726 	for (i = 0; i < count; i++) {
1727 		mas_empty_area_rev(&mas, 0, 2000, 10);
1728 		mas_reset(&mas);
1729 	}
1730 }
1731 #endif
1732 #if defined(BENCH_WALK)
1733 static noinline void __init bench_walk(struct maple_tree *mt)
1734 {
1735 	int i, max = 2500, count = 550000000;
1736 	MA_STATE(mas, mt, 1470, 1470);
1737 
1738 	for (i = 0; i < max; i += 10)
1739 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1740 
1741 	for (i = 0; i < count; i++) {
1742 		mas_walk(&mas);
1743 		mas_reset(&mas);
1744 	}
1745 
1746 }
1747 #endif
1748 
1749 #if defined(BENCH_MT_FOR_EACH)
1750 static noinline void __init bench_mt_for_each(struct maple_tree *mt)
1751 {
1752 	int i, count = 1000000;
1753 	unsigned long max = 2500, index = 0;
1754 	void *entry;
1755 
1756 	for (i = 0; i < max; i += 5)
1757 		mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1758 
1759 	for (i = 0; i < count; i++) {
1760 		unsigned long j = 0;
1761 
1762 		mt_for_each(mt, entry, index, max) {
1763 			MT_BUG_ON(mt, entry != xa_mk_value(j));
1764 			j += 5;
1765 		}
1766 
1767 		index = 0;
1768 	}
1769 
1770 }
1771 #endif
1772 
1773 /* check_forking - simulate the kernel forking sequence with the tree. */
1774 static noinline void __init check_forking(struct maple_tree *mt)
1775 {
1776 
1777 	struct maple_tree newmt;
1778 	int i, nr_entries = 134;
1779 	void *val;
1780 	MA_STATE(mas, mt, 0, 0);
1781 	MA_STATE(newmas, mt, 0, 0);
1782 
1783 	for (i = 0; i <= nr_entries; i++)
1784 		mtree_store_range(mt, i*10, i*10 + 5,
1785 				  xa_mk_value(i), GFP_KERNEL);
1786 
1787 	mt_set_non_kernel(99999);
1788 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1789 	newmas.tree = &newmt;
1790 	mas_reset(&newmas);
1791 	mas_reset(&mas);
1792 	mas_lock(&newmas);
1793 	mas.index = 0;
1794 	mas.last = 0;
1795 	if (mas_expected_entries(&newmas, nr_entries)) {
1796 		pr_err("OOM!");
1797 		BUG_ON(1);
1798 	}
1799 	rcu_read_lock();
1800 	mas_for_each(&mas, val, ULONG_MAX) {
1801 		newmas.index = mas.index;
1802 		newmas.last = mas.last;
1803 		mas_store(&newmas, val);
1804 	}
1805 	rcu_read_unlock();
1806 	mas_destroy(&newmas);
1807 	mas_unlock(&newmas);
1808 	mt_validate(&newmt);
1809 	mt_set_non_kernel(0);
1810 	mtree_destroy(&newmt);
1811 }
1812 
1813 static noinline void __init check_iteration(struct maple_tree *mt)
1814 {
1815 	int i, nr_entries = 125;
1816 	void *val;
1817 	MA_STATE(mas, mt, 0, 0);
1818 
1819 	for (i = 0; i <= nr_entries; i++)
1820 		mtree_store_range(mt, i * 10, i * 10 + 9,
1821 				  xa_mk_value(i), GFP_KERNEL);
1822 
1823 	mt_set_non_kernel(99999);
1824 
1825 	i = 0;
1826 	mas_lock(&mas);
1827 	mas_for_each(&mas, val, 925) {
1828 		MT_BUG_ON(mt, mas.index != i * 10);
1829 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1830 		/* Overwrite end of entry 92 */
1831 		if (i == 92) {
1832 			mas.index = 925;
1833 			mas.last = 929;
1834 			mas_store(&mas, val);
1835 		}
1836 		i++;
1837 	}
1838 	/* Ensure mas_find() gets the next value */
1839 	val = mas_find(&mas, ULONG_MAX);
1840 	MT_BUG_ON(mt, val != xa_mk_value(i));
1841 
1842 	mas_set(&mas, 0);
1843 	i = 0;
1844 	mas_for_each(&mas, val, 785) {
1845 		MT_BUG_ON(mt, mas.index != i * 10);
1846 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1847 		/* Overwrite start of entry 78 */
1848 		if (i == 78) {
1849 			mas.index = 780;
1850 			mas.last = 785;
1851 			mas_store(&mas, val);
1852 		} else {
1853 			i++;
1854 		}
1855 	}
1856 	val = mas_find(&mas, ULONG_MAX);
1857 	MT_BUG_ON(mt, val != xa_mk_value(i));
1858 
1859 	mas_set(&mas, 0);
1860 	i = 0;
1861 	mas_for_each(&mas, val, 765) {
1862 		MT_BUG_ON(mt, mas.index != i * 10);
1863 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1864 		/* Overwrite end of entry 76 and advance to the end */
1865 		if (i == 76) {
1866 			mas.index = 760;
1867 			mas.last = 765;
1868 			mas_store(&mas, val);
1869 		}
1870 		i++;
1871 	}
1872 	/* Make sure the next find returns the one after 765, 766-769 */
1873 	val = mas_find(&mas, ULONG_MAX);
1874 	MT_BUG_ON(mt, val != xa_mk_value(76));
1875 	mas_unlock(&mas);
1876 	mas_destroy(&mas);
1877 	mt_set_non_kernel(0);
1878 }
1879 
1880 static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
1881 {
1882 
1883 	struct maple_tree newmt;
1884 	int i, nr_entries = 135;
1885 	void *val;
1886 	MA_STATE(mas, mt, 0, 0);
1887 	MA_STATE(newmas, mt, 0, 0);
1888 
1889 	for (i = 0; i <= nr_entries; i++)
1890 		mtree_store_range(mt, i*10, i*10 + 5,
1891 				  xa_mk_value(i), GFP_KERNEL);
1892 
1893 	mt_set_non_kernel(99999);
1894 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1895 	newmas.tree = &newmt;
1896 	rcu_read_lock();
1897 	mas_lock(&newmas);
1898 	mas_reset(&newmas);
1899 	mas_set(&mas, 0);
1900 	mas_for_each(&mas, val, ULONG_MAX) {
1901 		newmas.index = mas.index;
1902 		newmas.last = mas.last;
1903 		mas_store_gfp(&newmas, val, GFP_KERNEL);
1904 	}
1905 	mas_unlock(&newmas);
1906 	rcu_read_unlock();
1907 	mt_validate(&newmt);
1908 	mt_set_non_kernel(0);
1909 	mtree_destroy(&newmt);
1910 }
1911 
1912 #if defined(BENCH_FORK)
1913 static noinline void __init bench_forking(struct maple_tree *mt)
1914 {
1915 
1916 	struct maple_tree newmt;
1917 	int i, nr_entries = 134, nr_fork = 80000;
1918 	void *val;
1919 	MA_STATE(mas, mt, 0, 0);
1920 	MA_STATE(newmas, mt, 0, 0);
1921 
1922 	for (i = 0; i <= nr_entries; i++)
1923 		mtree_store_range(mt, i*10, i*10 + 5,
1924 				  xa_mk_value(i), GFP_KERNEL);
1925 
1926 	for (i = 0; i < nr_fork; i++) {
1927 		mt_set_non_kernel(99999);
1928 		mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1929 		newmas.tree = &newmt;
1930 		mas_reset(&newmas);
1931 		mas_reset(&mas);
1932 		mas.index = 0;
1933 		mas.last = 0;
1934 		rcu_read_lock();
1935 		mas_lock(&newmas);
1936 		if (mas_expected_entries(&newmas, nr_entries)) {
1937 			printk("OOM!");
1938 			BUG_ON(1);
1939 		}
1940 		mas_for_each(&mas, val, ULONG_MAX) {
1941 			newmas.index = mas.index;
1942 			newmas.last = mas.last;
1943 			mas_store(&newmas, val);
1944 		}
1945 		mas_destroy(&newmas);
1946 		mas_unlock(&newmas);
1947 		rcu_read_unlock();
1948 		mt_validate(&newmt);
1949 		mt_set_non_kernel(0);
1950 		mtree_destroy(&newmt);
1951 	}
1952 }
1953 #endif
1954 
1955 static noinline void __init next_prev_test(struct maple_tree *mt)
1956 {
1957 	int i, nr_entries;
1958 	void *val;
1959 	MA_STATE(mas, mt, 0, 0);
1960 	struct maple_enode *mn;
1961 	static const unsigned long *level2;
1962 	static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
1963 						   725};
1964 	static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
1965 						   1760, 1765};
1966 	unsigned long last_index;
1967 
1968 	if (MAPLE_32BIT) {
1969 		nr_entries = 500;
1970 		level2 = level2_32;
1971 		last_index = 0x138e;
1972 	} else {
1973 		nr_entries = 200;
1974 		level2 = level2_64;
1975 		last_index = 0x7d6;
1976 	}
1977 
1978 	for (i = 0; i <= nr_entries; i++)
1979 		mtree_store_range(mt, i*10, i*10 + 5,
1980 				  xa_mk_value(i), GFP_KERNEL);
1981 
1982 	mas_lock(&mas);
1983 	for (i = 0; i <= nr_entries / 2; i++) {
1984 		mas_next(&mas, 1000);
1985 		if (mas_is_none(&mas))
1986 			break;
1987 
1988 	}
1989 	mas_reset(&mas);
1990 	mas_set(&mas, 0);
1991 	i = 0;
1992 	mas_for_each(&mas, val, 1000) {
1993 		i++;
1994 	}
1995 
1996 	mas_reset(&mas);
1997 	mas_set(&mas, 0);
1998 	i = 0;
1999 	mas_for_each(&mas, val, 1000) {
2000 		mas_pause(&mas);
2001 		i++;
2002 	}
2003 
2004 	/*
2005 	 * 680 - 685 = 0x61a00001930c
2006 	 * 686 - 689 = NULL;
2007 	 * 690 - 695 = 0x61a00001930c
2008 	 * Check simple next/prev
2009 	 */
2010 	mas_set(&mas, 686);
2011 	val = mas_walk(&mas);
2012 	MT_BUG_ON(mt, val != NULL);
2013 
2014 	val = mas_next(&mas, 1000);
2015 	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2016 	MT_BUG_ON(mt, mas.index != 690);
2017 	MT_BUG_ON(mt, mas.last != 695);
2018 
2019 	val = mas_prev(&mas, 0);
2020 	MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2021 	MT_BUG_ON(mt, mas.index != 680);
2022 	MT_BUG_ON(mt, mas.last != 685);
2023 
2024 	val = mas_next(&mas, 1000);
2025 	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2026 	MT_BUG_ON(mt, mas.index != 690);
2027 	MT_BUG_ON(mt, mas.last != 695);
2028 
2029 	val = mas_next(&mas, 1000);
2030 	MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2031 	MT_BUG_ON(mt, mas.index != 700);
2032 	MT_BUG_ON(mt, mas.last != 705);
2033 
2034 	/* Check across node boundaries of the tree */
2035 	mas_set(&mas, 70);
2036 	val = mas_walk(&mas);
2037 	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2038 	MT_BUG_ON(mt, mas.index != 70);
2039 	MT_BUG_ON(mt, mas.last != 75);
2040 
2041 	val = mas_next(&mas, 1000);
2042 	MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2043 	MT_BUG_ON(mt, mas.index != 80);
2044 	MT_BUG_ON(mt, mas.last != 85);
2045 
2046 	val = mas_prev(&mas, 70);
2047 	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2048 	MT_BUG_ON(mt, mas.index != 70);
2049 	MT_BUG_ON(mt, mas.last != 75);
2050 
2051 	/* Check across two levels of the tree */
2052 	mas_reset(&mas);
2053 	mas_set(&mas, level2[0]);
2054 	val = mas_walk(&mas);
2055 	MT_BUG_ON(mt, val != NULL);
2056 	val = mas_next(&mas, level2[1]);
2057 	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2058 	MT_BUG_ON(mt, mas.index != level2[2]);
2059 	MT_BUG_ON(mt, mas.last != level2[3]);
2060 	mn = mas.node;
2061 
2062 	val = mas_next(&mas, level2[1]);
2063 	MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2064 	MT_BUG_ON(mt, mas.index != level2[4]);
2065 	MT_BUG_ON(mt, mas.last != level2[5]);
2066 	MT_BUG_ON(mt, mn == mas.node);
2067 
2068 	val = mas_prev(&mas, 0);
2069 	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2070 	MT_BUG_ON(mt, mas.index != level2[2]);
2071 	MT_BUG_ON(mt, mas.last != level2[3]);
2072 
2073 	/* Check running off the end and back on */
2074 	mas_set(&mas, nr_entries * 10);
2075 	val = mas_walk(&mas);
2076 	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2077 	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2078 	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2079 
2080 	val = mas_next(&mas, ULONG_MAX);
2081 	MT_BUG_ON(mt, val != NULL);
2082 	MT_BUG_ON(mt, mas.index != last_index);
2083 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
2084 
2085 	val = mas_prev(&mas, 0);
2086 	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2087 	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2088 	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2089 
2090 	/* Check running off the start and back on */
2091 	mas_reset(&mas);
2092 	mas_set(&mas, 10);
2093 	val = mas_walk(&mas);
2094 	MT_BUG_ON(mt, val != xa_mk_value(1));
2095 	MT_BUG_ON(mt, mas.index != 10);
2096 	MT_BUG_ON(mt, mas.last != 15);
2097 
2098 	val = mas_prev(&mas, 0);
2099 	MT_BUG_ON(mt, val != xa_mk_value(0));
2100 	MT_BUG_ON(mt, mas.index != 0);
2101 	MT_BUG_ON(mt, mas.last != 5);
2102 
2103 	val = mas_prev(&mas, 0);
2104 	MT_BUG_ON(mt, val != NULL);
2105 	MT_BUG_ON(mt, mas.index != 0);
2106 	MT_BUG_ON(mt, mas.last != 5);
2107 	MT_BUG_ON(mt, mas.node != MAS_NONE);
2108 
2109 	mas.index = 0;
2110 	mas.last = 5;
2111 	mas_store(&mas, NULL);
2112 	mas_reset(&mas);
2113 	mas_set(&mas, 10);
2114 	mas_walk(&mas);
2115 
2116 	val = mas_prev(&mas, 0);
2117 	MT_BUG_ON(mt, val != NULL);
2118 	MT_BUG_ON(mt, mas.index != 0);
2119 	MT_BUG_ON(mt, mas.last != 9);
2120 	mas_unlock(&mas);
2121 
2122 	mtree_destroy(mt);
2123 
2124 	mt_init(mt);
2125 	mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2126 	mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
2127 	rcu_read_lock();
2128 	mas_set(&mas, 5);
2129 	val = mas_prev(&mas, 4);
2130 	MT_BUG_ON(mt, val != NULL);
2131 	rcu_read_unlock();
2132 }
2133 
2134 
2135 
2136 /* Test spanning writes that require balancing right sibling or right cousin */
2137 static noinline void __init check_spanning_relatives(struct maple_tree *mt)
2138 {
2139 
2140 	unsigned long i, nr_entries = 1000;
2141 
2142 	for (i = 0; i <= nr_entries; i++)
2143 		mtree_store_range(mt, i*10, i*10 + 5,
2144 				  xa_mk_value(i), GFP_KERNEL);
2145 
2146 
2147 	mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2148 }
2149 
2150 static noinline void __init check_fuzzer(struct maple_tree *mt)
2151 {
2152 	/*
2153 	 * 1. Causes a spanning rebalance of a single root node.
2154 	 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2155 	 * entire right side is consumed.
2156 	 */
2157 	mtree_test_insert(mt, 88, (void *)0xb1);
2158 	mtree_test_insert(mt, 84, (void *)0xa9);
2159 	mtree_test_insert(mt, 2,  (void *)0x5);
2160 	mtree_test_insert(mt, 4,  (void *)0x9);
2161 	mtree_test_insert(mt, 14, (void *)0x1d);
2162 	mtree_test_insert(mt, 7,  (void *)0xf);
2163 	mtree_test_insert(mt, 12, (void *)0x19);
2164 	mtree_test_insert(mt, 18, (void *)0x25);
2165 	mtree_test_store_range(mt, 8, 18, (void *)0x11);
2166 	mtree_destroy(mt);
2167 
2168 
2169 	/*
2170 	 * 2. Cause a spanning rebalance of two nodes in root.
2171 	 * Fixed by setting mast->r->max correctly.
2172 	 */
2173 	mt_init_flags(mt, 0);
2174 	mtree_test_store(mt, 87, (void *)0xaf);
2175 	mtree_test_store(mt, 0, (void *)0x1);
2176 	mtree_test_load(mt, 4);
2177 	mtree_test_insert(mt, 4, (void *)0x9);
2178 	mtree_test_store(mt, 8, (void *)0x11);
2179 	mtree_test_store(mt, 44, (void *)0x59);
2180 	mtree_test_store(mt, 68, (void *)0x89);
2181 	mtree_test_store(mt, 2, (void *)0x5);
2182 	mtree_test_insert(mt, 43, (void *)0x57);
2183 	mtree_test_insert(mt, 24, (void *)0x31);
2184 	mtree_test_insert(mt, 844, (void *)0x699);
2185 	mtree_test_store(mt, 84, (void *)0xa9);
2186 	mtree_test_store(mt, 4, (void *)0x9);
2187 	mtree_test_erase(mt, 4);
2188 	mtree_test_load(mt, 5);
2189 	mtree_test_erase(mt, 0);
2190 	mtree_destroy(mt);
2191 
2192 	/*
2193 	 * 3. Cause a node overflow on copy
2194 	 * Fixed by using the correct check for node size in mas_wr_modify()
2195 	 * Also discovered issue with metadata setting.
2196 	 */
2197 	mt_init_flags(mt, 0);
2198 	mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2199 	mtree_test_store(mt, 4, (void *)0x9);
2200 	mtree_test_erase(mt, 5);
2201 	mtree_test_erase(mt, 0);
2202 	mtree_test_erase(mt, 4);
2203 	mtree_test_store(mt, 5, (void *)0xb);
2204 	mtree_test_erase(mt, 5);
2205 	mtree_test_store(mt, 5, (void *)0xb);
2206 	mtree_test_erase(mt, 5);
2207 	mtree_test_erase(mt, 4);
2208 	mtree_test_store(mt, 4, (void *)0x9);
2209 	mtree_test_store(mt, 444, (void *)0x379);
2210 	mtree_test_store(mt, 0, (void *)0x1);
2211 	mtree_test_load(mt, 0);
2212 	mtree_test_store(mt, 5, (void *)0xb);
2213 	mtree_test_erase(mt, 0);
2214 	mtree_destroy(mt);
2215 
2216 	/*
2217 	 * 4. spanning store failure due to writing incorrect pivot value at
2218 	 * last slot.
2219 	 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2220 	 *
2221 	 */
2222 	mt_init_flags(mt, 0);
2223 	mtree_test_insert(mt, 261, (void *)0x20b);
2224 	mtree_test_store(mt, 516, (void *)0x409);
2225 	mtree_test_store(mt, 6, (void *)0xd);
2226 	mtree_test_insert(mt, 5, (void *)0xb);
2227 	mtree_test_insert(mt, 1256, (void *)0x9d1);
2228 	mtree_test_store(mt, 4, (void *)0x9);
2229 	mtree_test_erase(mt, 1);
2230 	mtree_test_store(mt, 56, (void *)0x71);
2231 	mtree_test_insert(mt, 1, (void *)0x3);
2232 	mtree_test_store(mt, 24, (void *)0x31);
2233 	mtree_test_erase(mt, 1);
2234 	mtree_test_insert(mt, 2263, (void *)0x11af);
2235 	mtree_test_insert(mt, 446, (void *)0x37d);
2236 	mtree_test_store_range(mt, 6, 45, (void *)0xd);
2237 	mtree_test_store_range(mt, 3, 446, (void *)0x7);
2238 	mtree_destroy(mt);
2239 
2240 	/*
2241 	 * 5. mas_wr_extend_null() may overflow slots.
2242 	 * Fix by checking against wr_mas->node_end.
2243 	 */
2244 	mt_init_flags(mt, 0);
2245 	mtree_test_store(mt, 48, (void *)0x61);
2246 	mtree_test_store(mt, 3, (void *)0x7);
2247 	mtree_test_load(mt, 0);
2248 	mtree_test_store(mt, 88, (void *)0xb1);
2249 	mtree_test_store(mt, 81, (void *)0xa3);
2250 	mtree_test_insert(mt, 0, (void *)0x1);
2251 	mtree_test_insert(mt, 8, (void *)0x11);
2252 	mtree_test_insert(mt, 4, (void *)0x9);
2253 	mtree_test_insert(mt, 2480, (void *)0x1361);
2254 	mtree_test_insert(mt, ULONG_MAX,
2255 			  (void *)0xffffffffffffffff);
2256 	mtree_test_erase(mt, ULONG_MAX);
2257 	mtree_destroy(mt);
2258 
2259 	/*
2260 	 * 6.  When reusing a node with an implied pivot and the node is
2261 	 * shrinking, old data would be left in the implied slot
2262 	 * Fixed by checking the last pivot for the mas->max and clear
2263 	 * accordingly.  This only affected the left-most node as that node is
2264 	 * the only one allowed to end in NULL.
2265 	 */
2266 	mt_init_flags(mt, 0);
2267 	mtree_test_erase(mt, 3);
2268 	mtree_test_insert(mt, 22, (void *)0x2d);
2269 	mtree_test_insert(mt, 15, (void *)0x1f);
2270 	mtree_test_load(mt, 2);
2271 	mtree_test_insert(mt, 1, (void *)0x3);
2272 	mtree_test_insert(mt, 1, (void *)0x3);
2273 	mtree_test_insert(mt, 5, (void *)0xb);
2274 	mtree_test_erase(mt, 1);
2275 	mtree_test_insert(mt, 1, (void *)0x3);
2276 	mtree_test_insert(mt, 4, (void *)0x9);
2277 	mtree_test_insert(mt, 1, (void *)0x3);
2278 	mtree_test_erase(mt, 1);
2279 	mtree_test_insert(mt, 2, (void *)0x5);
2280 	mtree_test_insert(mt, 1, (void *)0x3);
2281 	mtree_test_erase(mt, 3);
2282 	mtree_test_insert(mt, 22, (void *)0x2d);
2283 	mtree_test_insert(mt, 15, (void *)0x1f);
2284 	mtree_test_insert(mt, 2, (void *)0x5);
2285 	mtree_test_insert(mt, 1, (void *)0x3);
2286 	mtree_test_insert(mt, 8, (void *)0x11);
2287 	mtree_test_load(mt, 2);
2288 	mtree_test_insert(mt, 1, (void *)0x3);
2289 	mtree_test_insert(mt, 1, (void *)0x3);
2290 	mtree_test_store(mt, 1, (void *)0x3);
2291 	mtree_test_insert(mt, 5, (void *)0xb);
2292 	mtree_test_erase(mt, 1);
2293 	mtree_test_insert(mt, 1, (void *)0x3);
2294 	mtree_test_insert(mt, 4, (void *)0x9);
2295 	mtree_test_insert(mt, 1, (void *)0x3);
2296 	mtree_test_erase(mt, 1);
2297 	mtree_test_insert(mt, 2, (void *)0x5);
2298 	mtree_test_insert(mt, 1, (void *)0x3);
2299 	mtree_test_erase(mt, 3);
2300 	mtree_test_insert(mt, 22, (void *)0x2d);
2301 	mtree_test_insert(mt, 15, (void *)0x1f);
2302 	mtree_test_insert(mt, 2, (void *)0x5);
2303 	mtree_test_insert(mt, 1, (void *)0x3);
2304 	mtree_test_insert(mt, 8, (void *)0x11);
2305 	mtree_test_insert(mt, 12, (void *)0x19);
2306 	mtree_test_erase(mt, 1);
2307 	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2308 	mtree_test_erase(mt, 62);
2309 	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2310 	mtree_test_insert(mt, 11, (void *)0x17);
2311 	mtree_test_insert(mt, 3, (void *)0x7);
2312 	mtree_test_insert(mt, 3, (void *)0x7);
2313 	mtree_test_store(mt, 62, (void *)0x7d);
2314 	mtree_test_erase(mt, 62);
2315 	mtree_test_store_range(mt, 1, 15, (void *)0x3);
2316 	mtree_test_erase(mt, 1);
2317 	mtree_test_insert(mt, 22, (void *)0x2d);
2318 	mtree_test_insert(mt, 12, (void *)0x19);
2319 	mtree_test_erase(mt, 1);
2320 	mtree_test_insert(mt, 3, (void *)0x7);
2321 	mtree_test_store(mt, 62, (void *)0x7d);
2322 	mtree_test_erase(mt, 62);
2323 	mtree_test_insert(mt, 122, (void *)0xf5);
2324 	mtree_test_store(mt, 3, (void *)0x7);
2325 	mtree_test_insert(mt, 0, (void *)0x1);
2326 	mtree_test_store_range(mt, 0, 1, (void *)0x1);
2327 	mtree_test_insert(mt, 85, (void *)0xab);
2328 	mtree_test_insert(mt, 72, (void *)0x91);
2329 	mtree_test_insert(mt, 81, (void *)0xa3);
2330 	mtree_test_insert(mt, 726, (void *)0x5ad);
2331 	mtree_test_insert(mt, 0, (void *)0x1);
2332 	mtree_test_insert(mt, 1, (void *)0x3);
2333 	mtree_test_store(mt, 51, (void *)0x67);
2334 	mtree_test_insert(mt, 611, (void *)0x4c7);
2335 	mtree_test_insert(mt, 485, (void *)0x3cb);
2336 	mtree_test_insert(mt, 1, (void *)0x3);
2337 	mtree_test_erase(mt, 1);
2338 	mtree_test_insert(mt, 0, (void *)0x1);
2339 	mtree_test_insert(mt, 1, (void *)0x3);
2340 	mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2341 	mtree_test_load(mt, 1);
2342 	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2343 	mtree_test_insert(mt, 1, (void *)0x3);
2344 	mtree_test_erase(mt, 1);
2345 	mtree_test_load(mt, 53);
2346 	mtree_test_load(mt, 1);
2347 	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2348 	mtree_test_insert(mt, 222, (void *)0x1bd);
2349 	mtree_test_insert(mt, 485, (void *)0x3cb);
2350 	mtree_test_insert(mt, 1, (void *)0x3);
2351 	mtree_test_erase(mt, 1);
2352 	mtree_test_load(mt, 0);
2353 	mtree_test_insert(mt, 21, (void *)0x2b);
2354 	mtree_test_insert(mt, 3, (void *)0x7);
2355 	mtree_test_store(mt, 621, (void *)0x4db);
2356 	mtree_test_insert(mt, 0, (void *)0x1);
2357 	mtree_test_erase(mt, 5);
2358 	mtree_test_insert(mt, 1, (void *)0x3);
2359 	mtree_test_store(mt, 62, (void *)0x7d);
2360 	mtree_test_erase(mt, 62);
2361 	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2362 	mtree_test_insert(mt, 22, (void *)0x2d);
2363 	mtree_test_insert(mt, 12, (void *)0x19);
2364 	mtree_test_erase(mt, 1);
2365 	mtree_test_insert(mt, 1, (void *)0x3);
2366 	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2367 	mtree_test_erase(mt, 62);
2368 	mtree_test_erase(mt, 1);
2369 	mtree_test_load(mt, 1);
2370 	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2371 	mtree_test_insert(mt, 1, (void *)0x3);
2372 	mtree_test_erase(mt, 1);
2373 	mtree_test_load(mt, 53);
2374 	mtree_test_load(mt, 1);
2375 	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2376 	mtree_test_insert(mt, 222, (void *)0x1bd);
2377 	mtree_test_insert(mt, 485, (void *)0x3cb);
2378 	mtree_test_insert(mt, 1, (void *)0x3);
2379 	mtree_test_erase(mt, 1);
2380 	mtree_test_insert(mt, 1, (void *)0x3);
2381 	mtree_test_load(mt, 0);
2382 	mtree_test_load(mt, 0);
2383 	mtree_destroy(mt);
2384 
2385 	/*
2386 	 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2387 	 * data by overwriting it first - that way metadata is of no concern.
2388 	 */
2389 	mt_init_flags(mt, 0);
2390 	mtree_test_load(mt, 1);
2391 	mtree_test_insert(mt, 102, (void *)0xcd);
2392 	mtree_test_erase(mt, 2);
2393 	mtree_test_erase(mt, 0);
2394 	mtree_test_load(mt, 0);
2395 	mtree_test_insert(mt, 4, (void *)0x9);
2396 	mtree_test_insert(mt, 2, (void *)0x5);
2397 	mtree_test_insert(mt, 110, (void *)0xdd);
2398 	mtree_test_insert(mt, 1, (void *)0x3);
2399 	mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2400 	mtree_test_erase(mt, 2);
2401 	mtree_test_store(mt, 0, (void *)0x1);
2402 	mtree_test_store(mt, 112, (void *)0xe1);
2403 	mtree_test_insert(mt, 21, (void *)0x2b);
2404 	mtree_test_store(mt, 1, (void *)0x3);
2405 	mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2406 	mtree_test_store(mt, 2, (void *)0x5);
2407 	mtree_test_load(mt, 22);
2408 	mtree_test_erase(mt, 2);
2409 	mtree_test_store(mt, 210, (void *)0x1a5);
2410 	mtree_test_store_range(mt, 0, 2, (void *)0x1);
2411 	mtree_test_store(mt, 2, (void *)0x5);
2412 	mtree_test_erase(mt, 2);
2413 	mtree_test_erase(mt, 22);
2414 	mtree_test_erase(mt, 1);
2415 	mtree_test_erase(mt, 2);
2416 	mtree_test_store(mt, 0, (void *)0x1);
2417 	mtree_test_load(mt, 112);
2418 	mtree_test_insert(mt, 2, (void *)0x5);
2419 	mtree_test_erase(mt, 2);
2420 	mtree_test_store(mt, 1, (void *)0x3);
2421 	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2422 	mtree_test_erase(mt, 0);
2423 	mtree_test_erase(mt, 2);
2424 	mtree_test_store(mt, 2, (void *)0x5);
2425 	mtree_test_erase(mt, 0);
2426 	mtree_test_erase(mt, 2);
2427 	mtree_test_store(mt, 0, (void *)0x1);
2428 	mtree_test_store(mt, 0, (void *)0x1);
2429 	mtree_test_erase(mt, 2);
2430 	mtree_test_store(mt, 2, (void *)0x5);
2431 	mtree_test_erase(mt, 2);
2432 	mtree_test_insert(mt, 2, (void *)0x5);
2433 	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2434 	mtree_test_erase(mt, 0);
2435 	mtree_test_erase(mt, 2);
2436 	mtree_test_store(mt, 0, (void *)0x1);
2437 	mtree_test_load(mt, 112);
2438 	mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2439 	mtree_test_store(mt, 2, (void *)0x5);
2440 	mtree_test_load(mt, 110);
2441 	mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2442 	mtree_test_load(mt, 2);
2443 	mtree_test_store(mt, 2, (void *)0x5);
2444 	mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2445 	mtree_test_erase(mt, 12);
2446 	mtree_test_store(mt, 2, (void *)0x5);
2447 	mtree_test_load(mt, 22);
2448 	mtree_destroy(mt);
2449 
2450 
2451 	/*
2452 	 * 8.  When rebalancing or spanning_rebalance(), the max of the new node
2453 	 * may be set incorrectly to the final pivot and not the right max.
2454 	 * Fix by setting the left max to orig right max if the entire node is
2455 	 * consumed.
2456 	 */
2457 	mt_init_flags(mt, 0);
2458 	mtree_test_store(mt, 6, (void *)0xd);
2459 	mtree_test_store(mt, 67, (void *)0x87);
2460 	mtree_test_insert(mt, 15, (void *)0x1f);
2461 	mtree_test_insert(mt, 6716, (void *)0x3479);
2462 	mtree_test_store(mt, 61, (void *)0x7b);
2463 	mtree_test_insert(mt, 13, (void *)0x1b);
2464 	mtree_test_store(mt, 8, (void *)0x11);
2465 	mtree_test_insert(mt, 1, (void *)0x3);
2466 	mtree_test_load(mt, 0);
2467 	mtree_test_erase(mt, 67167);
2468 	mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2469 	mtree_test_insert(mt, 6, (void *)0xd);
2470 	mtree_test_erase(mt, 67);
2471 	mtree_test_insert(mt, 1, (void *)0x3);
2472 	mtree_test_erase(mt, 667167);
2473 	mtree_test_insert(mt, 6, (void *)0xd);
2474 	mtree_test_store(mt, 67, (void *)0x87);
2475 	mtree_test_insert(mt, 5, (void *)0xb);
2476 	mtree_test_erase(mt, 1);
2477 	mtree_test_insert(mt, 6, (void *)0xd);
2478 	mtree_test_erase(mt, 67);
2479 	mtree_test_insert(mt, 15, (void *)0x1f);
2480 	mtree_test_insert(mt, 67167, (void *)0x20cbf);
2481 	mtree_test_insert(mt, 1, (void *)0x3);
2482 	mtree_test_load(mt, 7);
2483 	mtree_test_insert(mt, 16, (void *)0x21);
2484 	mtree_test_insert(mt, 36, (void *)0x49);
2485 	mtree_test_store(mt, 67, (void *)0x87);
2486 	mtree_test_store(mt, 6, (void *)0xd);
2487 	mtree_test_insert(mt, 367, (void *)0x2df);
2488 	mtree_test_insert(mt, 115, (void *)0xe7);
2489 	mtree_test_store(mt, 0, (void *)0x1);
2490 	mtree_test_store_range(mt, 1, 3, (void *)0x3);
2491 	mtree_test_store(mt, 1, (void *)0x3);
2492 	mtree_test_erase(mt, 67167);
2493 	mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2494 	mtree_test_store(mt, 1, (void *)0x3);
2495 	mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2496 	mtree_test_load(mt, 67);
2497 	mtree_test_insert(mt, 1, (void *)0x3);
2498 	mtree_test_erase(mt, 67167);
2499 	mtree_destroy(mt);
2500 
2501 	/*
2502 	 * 9. spanning store to the end of data caused an invalid metadata
2503 	 * length which resulted in a crash eventually.
2504 	 * Fix by checking if there is a value in pivot before incrementing the
2505 	 * metadata end in mab_mas_cp().  To ensure this doesn't happen again,
2506 	 * abstract the two locations this happens into a function called
2507 	 * mas_leaf_set_meta().
2508 	 */
2509 	mt_init_flags(mt, 0);
2510 	mtree_test_insert(mt, 21, (void *)0x2b);
2511 	mtree_test_insert(mt, 12, (void *)0x19);
2512 	mtree_test_insert(mt, 6, (void *)0xd);
2513 	mtree_test_insert(mt, 8, (void *)0x11);
2514 	mtree_test_insert(mt, 2, (void *)0x5);
2515 	mtree_test_insert(mt, 91, (void *)0xb7);
2516 	mtree_test_insert(mt, 18, (void *)0x25);
2517 	mtree_test_insert(mt, 81, (void *)0xa3);
2518 	mtree_test_store_range(mt, 0, 128, (void *)0x1);
2519 	mtree_test_store(mt, 1, (void *)0x3);
2520 	mtree_test_erase(mt, 8);
2521 	mtree_test_insert(mt, 11, (void *)0x17);
2522 	mtree_test_insert(mt, 8, (void *)0x11);
2523 	mtree_test_insert(mt, 21, (void *)0x2b);
2524 	mtree_test_insert(mt, 2, (void *)0x5);
2525 	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2526 	mtree_test_erase(mt, ULONG_MAX - 10);
2527 	mtree_test_store_range(mt, 0, 281, (void *)0x1);
2528 	mtree_test_erase(mt, 2);
2529 	mtree_test_insert(mt, 1211, (void *)0x977);
2530 	mtree_test_insert(mt, 111, (void *)0xdf);
2531 	mtree_test_insert(mt, 13, (void *)0x1b);
2532 	mtree_test_insert(mt, 211, (void *)0x1a7);
2533 	mtree_test_insert(mt, 11, (void *)0x17);
2534 	mtree_test_insert(mt, 5, (void *)0xb);
2535 	mtree_test_insert(mt, 1218, (void *)0x985);
2536 	mtree_test_insert(mt, 61, (void *)0x7b);
2537 	mtree_test_store(mt, 1, (void *)0x3);
2538 	mtree_test_insert(mt, 121, (void *)0xf3);
2539 	mtree_test_insert(mt, 8, (void *)0x11);
2540 	mtree_test_insert(mt, 21, (void *)0x2b);
2541 	mtree_test_insert(mt, 2, (void *)0x5);
2542 	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2543 	mtree_test_erase(mt, ULONG_MAX - 10);
2544 }
2545 
2546 /* duplicate the tree with a specific gap */
2547 static noinline void __init check_dup_gaps(struct maple_tree *mt,
2548 				    unsigned long nr_entries, bool zero_start,
2549 				    unsigned long gap)
2550 {
2551 	unsigned long i = 0;
2552 	struct maple_tree newmt;
2553 	int ret;
2554 	void *tmp;
2555 	MA_STATE(mas, mt, 0, 0);
2556 	MA_STATE(newmas, &newmt, 0, 0);
2557 
2558 	if (!zero_start)
2559 		i = 1;
2560 
2561 	mt_zero_nr_tallocated();
2562 	for (; i <= nr_entries; i++)
2563 		mtree_store_range(mt, i*10, (i+1)*10 - gap,
2564 				  xa_mk_value(i), GFP_KERNEL);
2565 
2566 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2567 	mt_set_non_kernel(99999);
2568 	mas_lock(&newmas);
2569 	ret = mas_expected_entries(&newmas, nr_entries);
2570 	mt_set_non_kernel(0);
2571 	MT_BUG_ON(mt, ret != 0);
2572 
2573 	rcu_read_lock();
2574 	mas_for_each(&mas, tmp, ULONG_MAX) {
2575 		newmas.index = mas.index;
2576 		newmas.last = mas.last;
2577 		mas_store(&newmas, tmp);
2578 	}
2579 	rcu_read_unlock();
2580 	mas_destroy(&newmas);
2581 	mas_unlock(&newmas);
2582 
2583 	mtree_destroy(&newmt);
2584 }
2585 
2586 /* Duplicate many sizes of trees.  Mainly to test expected entry values */
2587 static noinline void __init check_dup(struct maple_tree *mt)
2588 {
2589 	int i;
2590 	int big_start = 100010;
2591 
2592 	/* Check with a value at zero */
2593 	for (i = 10; i < 1000; i++) {
2594 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2595 		check_dup_gaps(mt, i, true, 5);
2596 		mtree_destroy(mt);
2597 		rcu_barrier();
2598 	}
2599 
2600 	cond_resched();
2601 	mt_cache_shrink();
2602 	/* Check with a value at zero, no gap */
2603 	for (i = 1000; i < 2000; i++) {
2604 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2605 		check_dup_gaps(mt, i, true, 0);
2606 		mtree_destroy(mt);
2607 		rcu_barrier();
2608 	}
2609 
2610 	cond_resched();
2611 	mt_cache_shrink();
2612 	/* Check with a value at zero and unreasonably large */
2613 	for (i = big_start; i < big_start + 10; i++) {
2614 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2615 		check_dup_gaps(mt, i, true, 5);
2616 		mtree_destroy(mt);
2617 		rcu_barrier();
2618 	}
2619 
2620 	cond_resched();
2621 	mt_cache_shrink();
2622 	/* Small to medium size not starting at zero*/
2623 	for (i = 200; i < 1000; i++) {
2624 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2625 		check_dup_gaps(mt, i, false, 5);
2626 		mtree_destroy(mt);
2627 		rcu_barrier();
2628 	}
2629 
2630 	cond_resched();
2631 	mt_cache_shrink();
2632 	/* Unreasonably large not starting at zero*/
2633 	for (i = big_start; i < big_start + 10; i++) {
2634 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2635 		check_dup_gaps(mt, i, false, 5);
2636 		mtree_destroy(mt);
2637 		rcu_barrier();
2638 		cond_resched();
2639 		mt_cache_shrink();
2640 	}
2641 
2642 	/* Check non-allocation tree not starting at zero */
2643 	for (i = 1500; i < 3000; i++) {
2644 		mt_init_flags(mt, 0);
2645 		check_dup_gaps(mt, i, false, 5);
2646 		mtree_destroy(mt);
2647 		rcu_barrier();
2648 		cond_resched();
2649 		if (i % 2 == 0)
2650 			mt_cache_shrink();
2651 	}
2652 
2653 	mt_cache_shrink();
2654 	/* Check non-allocation tree starting at zero */
2655 	for (i = 200; i < 1000; i++) {
2656 		mt_init_flags(mt, 0);
2657 		check_dup_gaps(mt, i, true, 5);
2658 		mtree_destroy(mt);
2659 		rcu_barrier();
2660 		cond_resched();
2661 	}
2662 
2663 	mt_cache_shrink();
2664 	/* Unreasonably large */
2665 	for (i = big_start + 5; i < big_start + 10; i++) {
2666 		mt_init_flags(mt, 0);
2667 		check_dup_gaps(mt, i, true, 5);
2668 		mtree_destroy(mt);
2669 		rcu_barrier();
2670 		mt_cache_shrink();
2671 		cond_resched();
2672 	}
2673 }
2674 
2675 static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
2676 {
2677 	int i = 50;
2678 	MA_STATE(mas, mt, 0, 0);
2679 
2680 	mt_set_non_kernel(9999);
2681 	mas_lock(&mas);
2682 	do {
2683 		mas_set_range(&mas, i*10, i*10+9);
2684 		mas_store(&mas, check_bnode_min_spanning);
2685 	} while (i--);
2686 
2687 	mas_set_range(&mas, 240, 509);
2688 	mas_store(&mas, NULL);
2689 	mas_unlock(&mas);
2690 	mas_destroy(&mas);
2691 	mt_set_non_kernel(0);
2692 }
2693 
2694 static noinline void __init check_empty_area_window(struct maple_tree *mt)
2695 {
2696 	unsigned long i, nr_entries = 20;
2697 	MA_STATE(mas, mt, 0, 0);
2698 
2699 	for (i = 1; i <= nr_entries; i++)
2700 		mtree_store_range(mt, i*10, i*10 + 9,
2701 				  xa_mk_value(i), GFP_KERNEL);
2702 
2703 	/* Create another hole besides the one at 0 */
2704 	mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2705 
2706 	/* Check lower bounds that don't fit */
2707 	rcu_read_lock();
2708 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2709 
2710 	mas_reset(&mas);
2711 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2712 
2713 	/* Check lower bound that does fit */
2714 	mas_reset(&mas);
2715 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2716 	MT_BUG_ON(mt, mas.index != 5);
2717 	MT_BUG_ON(mt, mas.last != 9);
2718 	rcu_read_unlock();
2719 
2720 	/* Check one gap that doesn't fit and one that does */
2721 	rcu_read_lock();
2722 	mas_reset(&mas);
2723 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2724 	MT_BUG_ON(mt, mas.index != 161);
2725 	MT_BUG_ON(mt, mas.last != 169);
2726 
2727 	/* Check one gap that does fit above the min */
2728 	mas_reset(&mas);
2729 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2730 	MT_BUG_ON(mt, mas.index != 216);
2731 	MT_BUG_ON(mt, mas.last != 218);
2732 
2733 	/* Check size that doesn't fit any gap */
2734 	mas_reset(&mas);
2735 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2736 
2737 	/*
2738 	 * Check size that doesn't fit the lower end of the window but
2739 	 * does fit the gap
2740 	 */
2741 	mas_reset(&mas);
2742 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2743 
2744 	/*
2745 	 * Check size that doesn't fit the upper end of the window but
2746 	 * does fit the gap
2747 	 */
2748 	mas_reset(&mas);
2749 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2750 
2751 	/* Check mas_empty_area forward */
2752 	mas_reset(&mas);
2753 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2754 	MT_BUG_ON(mt, mas.index != 0);
2755 	MT_BUG_ON(mt, mas.last != 8);
2756 
2757 	mas_reset(&mas);
2758 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2759 	MT_BUG_ON(mt, mas.index != 0);
2760 	MT_BUG_ON(mt, mas.last != 3);
2761 
2762 	mas_reset(&mas);
2763 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2764 
2765 	mas_reset(&mas);
2766 	MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2767 
2768 	mas_reset(&mas);
2769 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
2770 
2771 	mas_reset(&mas);
2772 	mas_empty_area(&mas, 100, 165, 3);
2773 
2774 	mas_reset(&mas);
2775 	MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2776 	rcu_read_unlock();
2777 }
2778 
2779 static noinline void __init check_empty_area_fill(struct maple_tree *mt)
2780 {
2781 	const unsigned long max = 0x25D78000;
2782 	unsigned long size;
2783 	int loop, shift;
2784 	MA_STATE(mas, mt, 0, 0);
2785 
2786 	mt_set_non_kernel(99999);
2787 	for (shift = 12; shift <= 16; shift++) {
2788 		loop = 5000;
2789 		size = 1 << shift;
2790 		while (loop--) {
2791 			mas_set(&mas, 0);
2792 			mas_lock(&mas);
2793 			MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2794 			MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2795 			mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2796 			mas_unlock(&mas);
2797 			mas_reset(&mas);
2798 		}
2799 	}
2800 
2801 	/* No space left. */
2802 	size = 0x1000;
2803 	rcu_read_lock();
2804 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2805 	rcu_read_unlock();
2806 
2807 	/* Fill a depth 3 node to the maximum */
2808 	for (unsigned long i = 629440511; i <= 629440800; i += 6)
2809 		mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2810 	/* Make space in the second-last depth 4 node */
2811 	mtree_erase(mt, 631668735);
2812 	/* Make space in the last depth 4 node */
2813 	mtree_erase(mt, 629506047);
2814 	mas_reset(&mas);
2815 	/* Search from just after the gap in the second-last depth 4 */
2816 	rcu_read_lock();
2817 	MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2818 	rcu_read_unlock();
2819 	mt_set_non_kernel(0);
2820 }
2821 
2822 /*
2823  * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
2824  *
2825  * The table below shows the single entry tree (0-0 pointer) and normal tree
2826  * with nodes.
2827  *
2828  * Function	ENTRY	Start		Result		index & last
2829  *     ┬          ┬       ┬               ┬                ┬
2830  *     │          │       │               │                └─ the final range
2831  *     │          │       │               └─ The node value after execution
2832  *     │          │       └─ The node value before execution
2833  *     │          └─ If the entry exists or does not exists (DNE)
2834  *     └─ The function name
2835  *
2836  * Function	ENTRY	Start		Result		index & last
2837  * mas_next()
2838  *  - after last
2839  *			Single entry tree at 0-0
2840  *			------------------------
2841  *		DNE	MAS_START	MAS_NONE	1 - oo
2842  *		DNE	MAS_PAUSE	MAS_NONE	1 - oo
2843  *		DNE	MAS_ROOT	MAS_NONE	1 - oo
2844  *			when index = 0
2845  *		DNE	MAS_NONE	MAS_ROOT	0
2846  *			when index > 0
2847  *		DNE	MAS_NONE	MAS_NONE	1 - oo
2848  *
2849  *			Normal tree
2850  *			-----------
2851  *		exists	MAS_START	active		range
2852  *		DNE	MAS_START	active		set to last range
2853  *		exists	MAS_PAUSE	active		range
2854  *		DNE	MAS_PAUSE	active		set to last range
2855  *		exists	MAS_NONE	active		range
2856  *		exists	active		active		range
2857  *		DNE	active		active		set to last range
2858  *
2859  * Function	ENTRY	Start		Result		index & last
2860  * mas_prev()
2861  * - before index
2862  *			Single entry tree at 0-0
2863  *			------------------------
2864  *				if index > 0
2865  *		exists	MAS_START	MAS_ROOT	0
2866  *		exists	MAS_PAUSE	MAS_ROOT	0
2867  *		exists	MAS_NONE	MAS_ROOT	0
2868  *
2869  *				if index == 0
2870  *		DNE	MAS_START	MAS_NONE	0
2871  *		DNE	MAS_PAUSE	MAS_NONE	0
2872  *		DNE	MAS_NONE	MAS_NONE	0
2873  *		DNE	MAS_ROOT	MAS_NONE	0
2874  *
2875  *			Normal tree
2876  *			-----------
2877  *		exists	MAS_START	active		range
2878  *		DNE	MAS_START	active		set to min
2879  *		exists	MAS_PAUSE	active		range
2880  *		DNE	MAS_PAUSE	active		set to min
2881  *		exists	MAS_NONE	active		range
2882  *		DNE	MAS_NONE	MAS_NONE	set to min
2883  *		any	MAS_ROOT	MAS_NONE	0
2884  *		exists	active		active		range
2885  *		DNE	active		active		last range
2886  *
2887  * Function	ENTRY	Start		Result		index & last
2888  * mas_find()
2889  *  - at index or next
2890  *			Single entry tree at 0-0
2891  *			------------------------
2892  *				if index >  0
2893  *		DNE	MAS_START	MAS_NONE	0
2894  *		DNE	MAS_PAUSE	MAS_NONE	0
2895  *		DNE	MAS_ROOT	MAS_NONE	0
2896  *		DNE	MAS_NONE	MAS_NONE	0
2897  *				if index ==  0
2898  *		exists	MAS_START	MAS_ROOT	0
2899  *		exists	MAS_PAUSE	MAS_ROOT	0
2900  *		exists	MAS_NONE	MAS_ROOT	0
2901  *
2902  *			Normal tree
2903  *			-----------
2904  *		exists	MAS_START	active		range
2905  *		DNE	MAS_START	active		set to max
2906  *		exists	MAS_PAUSE	active		range
2907  *		DNE	MAS_PAUSE	active		set to max
2908  *		exists	MAS_NONE	active		range
2909  *		exists	active		active		range
2910  *		DNE	active		active		last range (max < last)
2911  *
2912  * Function	ENTRY	Start		Result		index & last
2913  * mas_find_rev()
2914  *  - at index or before
2915  *			Single entry tree at 0-0
2916  *			------------------------
2917  *				if index >  0
2918  *		exists	MAS_START	MAS_ROOT	0
2919  *		exists	MAS_PAUSE	MAS_ROOT	0
2920  *		exists	MAS_NONE	MAS_ROOT	0
2921  *				if index ==  0
2922  *		DNE	MAS_START	MAS_NONE	0
2923  *		DNE	MAS_PAUSE	MAS_NONE	0
2924  *		DNE	MAS_NONE	MAS_NONE	0
2925  *		DNE	MAS_ROOT	MAS_NONE	0
2926  *
2927  *			Normal tree
2928  *			-----------
2929  *		exists	MAS_START	active		range
2930  *		DNE	MAS_START	active		set to min
2931  *		exists	MAS_PAUSE	active		range
2932  *		DNE	MAS_PAUSE	active		set to min
2933  *		exists	MAS_NONE	active		range
2934  *		exists	active		active		range
2935  *		DNE	active		active		last range (min > index)
2936  *
2937  * Function	ENTRY	Start		Result		index & last
2938  * mas_walk()
2939  * - Look up index
2940  *			Single entry tree at 0-0
2941  *			------------------------
2942  *				if index >  0
2943  *		DNE	MAS_START	MAS_ROOT	1 - oo
2944  *		DNE	MAS_PAUSE	MAS_ROOT	1 - oo
2945  *		DNE	MAS_NONE	MAS_ROOT	1 - oo
2946  *		DNE	MAS_ROOT	MAS_ROOT	1 - oo
2947  *				if index ==  0
2948  *		exists	MAS_START	MAS_ROOT	0
2949  *		exists	MAS_PAUSE	MAS_ROOT	0
2950  *		exists	MAS_NONE	MAS_ROOT	0
2951  *		exists	MAS_ROOT	MAS_ROOT	0
2952  *
2953  *			Normal tree
2954  *			-----------
2955  *		exists	MAS_START	active		range
2956  *		DNE	MAS_START	active		range of NULL
2957  *		exists	MAS_PAUSE	active		range
2958  *		DNE	MAS_PAUSE	active		range of NULL
2959  *		exists	MAS_NONE	active		range
2960  *		DNE	MAS_NONE	active		range of NULL
2961  *		exists	active		active		range
2962  *		DNE	active		active		range of NULL
2963  */
2964 
2965 #define mas_active(x)		(((x).node != MAS_ROOT) && \
2966 				 ((x).node != MAS_START) && \
2967 				 ((x).node != MAS_PAUSE) && \
2968 				 ((x).node != MAS_NONE))
2969 static noinline void __init check_state_handling(struct maple_tree *mt)
2970 {
2971 	MA_STATE(mas, mt, 0, 0);
2972 	void *entry, *ptr = (void *) 0x1234500;
2973 	void *ptr2 = &ptr;
2974 	void *ptr3 = &ptr2;
2975 
2976 	/* Check MAS_ROOT First */
2977 	mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
2978 
2979 	mas_lock(&mas);
2980 	/* prev: Start -> none */
2981 	entry = mas_prev(&mas, 0);
2982 	MT_BUG_ON(mt, entry != NULL);
2983 	MT_BUG_ON(mt, mas.node != MAS_NONE);
2984 
2985 	/* prev: Start -> root */
2986 	mas_set(&mas, 10);
2987 	entry = mas_prev(&mas, 0);
2988 	MT_BUG_ON(mt, entry != ptr);
2989 	MT_BUG_ON(mt, mas.index != 0);
2990 	MT_BUG_ON(mt, mas.last != 0);
2991 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
2992 
2993 	/* prev: pause -> root */
2994 	mas_set(&mas, 10);
2995 	mas_pause(&mas);
2996 	entry = mas_prev(&mas, 0);
2997 	MT_BUG_ON(mt, entry != ptr);
2998 	MT_BUG_ON(mt, mas.index != 0);
2999 	MT_BUG_ON(mt, mas.last != 0);
3000 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3001 
3002 	/* next: start -> none */
3003 	mas_set(&mas, 0);
3004 	entry = mas_next(&mas, ULONG_MAX);
3005 	MT_BUG_ON(mt, mas.index != 1);
3006 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3007 	MT_BUG_ON(mt, entry != NULL);
3008 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3009 
3010 	/* next: start -> none */
3011 	mas_set(&mas, 10);
3012 	entry = mas_next(&mas, ULONG_MAX);
3013 	MT_BUG_ON(mt, mas.index != 1);
3014 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3015 	MT_BUG_ON(mt, entry != NULL);
3016 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3017 
3018 	/* find: start -> root */
3019 	mas_set(&mas, 0);
3020 	entry = mas_find(&mas, ULONG_MAX);
3021 	MT_BUG_ON(mt, entry != ptr);
3022 	MT_BUG_ON(mt, mas.index != 0);
3023 	MT_BUG_ON(mt, mas.last != 0);
3024 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3025 
3026 	/* find: root -> none */
3027 	entry = mas_find(&mas, ULONG_MAX);
3028 	MT_BUG_ON(mt, entry != NULL);
3029 	MT_BUG_ON(mt, mas.index != 1);
3030 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3031 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3032 
3033 	/* find: none -> none */
3034 	entry = mas_find(&mas, ULONG_MAX);
3035 	MT_BUG_ON(mt, entry != NULL);
3036 	MT_BUG_ON(mt, mas.index != 1);
3037 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3038 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3039 
3040 	/* find: start -> none */
3041 	mas_set(&mas, 10);
3042 	entry = mas_find(&mas, ULONG_MAX);
3043 	MT_BUG_ON(mt, entry != NULL);
3044 	MT_BUG_ON(mt, mas.index != 1);
3045 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3046 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3047 
3048 	/* find_rev: none -> root */
3049 	entry = mas_find_rev(&mas, 0);
3050 	MT_BUG_ON(mt, entry != ptr);
3051 	MT_BUG_ON(mt, mas.index != 0);
3052 	MT_BUG_ON(mt, mas.last != 0);
3053 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3054 
3055 	/* find_rev: start -> root */
3056 	mas_set(&mas, 0);
3057 	entry = mas_find_rev(&mas, 0);
3058 	MT_BUG_ON(mt, entry != ptr);
3059 	MT_BUG_ON(mt, mas.index != 0);
3060 	MT_BUG_ON(mt, mas.last != 0);
3061 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3062 
3063 	/* find_rev: root -> none */
3064 	entry = mas_find_rev(&mas, 0);
3065 	MT_BUG_ON(mt, entry != NULL);
3066 	MT_BUG_ON(mt, mas.index != 0);
3067 	MT_BUG_ON(mt, mas.last != 0);
3068 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3069 
3070 	/* find_rev: none -> none */
3071 	entry = mas_find_rev(&mas, 0);
3072 	MT_BUG_ON(mt, entry != NULL);
3073 	MT_BUG_ON(mt, mas.index != 0);
3074 	MT_BUG_ON(mt, mas.last != 0);
3075 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3076 
3077 	/* find_rev: start -> root */
3078 	mas_set(&mas, 10);
3079 	entry = mas_find_rev(&mas, 0);
3080 	MT_BUG_ON(mt, entry != ptr);
3081 	MT_BUG_ON(mt, mas.index != 0);
3082 	MT_BUG_ON(mt, mas.last != 0);
3083 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3084 
3085 	/* walk: start -> none */
3086 	mas_set(&mas, 10);
3087 	entry = mas_walk(&mas);
3088 	MT_BUG_ON(mt, entry != NULL);
3089 	MT_BUG_ON(mt, mas.index != 1);
3090 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3091 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3092 
3093 	/* walk: pause -> none*/
3094 	mas_set(&mas, 10);
3095 	mas_pause(&mas);
3096 	entry = mas_walk(&mas);
3097 	MT_BUG_ON(mt, entry != NULL);
3098 	MT_BUG_ON(mt, mas.index != 1);
3099 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3100 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3101 
3102 	/* walk: none -> none */
3103 	mas.index = mas.last = 10;
3104 	entry = mas_walk(&mas);
3105 	MT_BUG_ON(mt, entry != NULL);
3106 	MT_BUG_ON(mt, mas.index != 1);
3107 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3108 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3109 
3110 	/* walk: none -> none */
3111 	entry = mas_walk(&mas);
3112 	MT_BUG_ON(mt, entry != NULL);
3113 	MT_BUG_ON(mt, mas.index != 1);
3114 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3115 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3116 
3117 	/* walk: start -> root */
3118 	mas_set(&mas, 0);
3119 	entry = mas_walk(&mas);
3120 	MT_BUG_ON(mt, entry != ptr);
3121 	MT_BUG_ON(mt, mas.index != 0);
3122 	MT_BUG_ON(mt, mas.last != 0);
3123 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3124 
3125 	/* walk: pause -> root */
3126 	mas_set(&mas, 0);
3127 	mas_pause(&mas);
3128 	entry = mas_walk(&mas);
3129 	MT_BUG_ON(mt, entry != ptr);
3130 	MT_BUG_ON(mt, mas.index != 0);
3131 	MT_BUG_ON(mt, mas.last != 0);
3132 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3133 
3134 	/* walk: none -> root */
3135 	mas.node = MAS_NONE;
3136 	entry = mas_walk(&mas);
3137 	MT_BUG_ON(mt, entry != ptr);
3138 	MT_BUG_ON(mt, mas.index != 0);
3139 	MT_BUG_ON(mt, mas.last != 0);
3140 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3141 
3142 	/* walk: root -> root */
3143 	entry = mas_walk(&mas);
3144 	MT_BUG_ON(mt, entry != ptr);
3145 	MT_BUG_ON(mt, mas.index != 0);
3146 	MT_BUG_ON(mt, mas.last != 0);
3147 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3148 
3149 	/* walk: root -> none */
3150 	mas_set(&mas, 10);
3151 	entry = mas_walk(&mas);
3152 	MT_BUG_ON(mt, entry != NULL);
3153 	MT_BUG_ON(mt, mas.index != 1);
3154 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3155 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3156 
3157 	/* walk: none -> root */
3158 	mas.index = mas.last = 0;
3159 	entry = mas_walk(&mas);
3160 	MT_BUG_ON(mt, entry != ptr);
3161 	MT_BUG_ON(mt, mas.index != 0);
3162 	MT_BUG_ON(mt, mas.last != 0);
3163 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3164 
3165 	mas_unlock(&mas);
3166 
3167 	/* Check when there is an actual node */
3168 	mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3169 	mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3170 	mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3171 	mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3172 
3173 	mas_lock(&mas);
3174 
3175 	/* next: start ->active */
3176 	mas_set(&mas, 0);
3177 	entry = mas_next(&mas, ULONG_MAX);
3178 	MT_BUG_ON(mt, entry != ptr);
3179 	MT_BUG_ON(mt, mas.index != 0x1000);
3180 	MT_BUG_ON(mt, mas.last != 0x1500);
3181 	MT_BUG_ON(mt, !mas_active(mas));
3182 
3183 	/* next: pause ->active */
3184 	mas_set(&mas, 0);
3185 	mas_pause(&mas);
3186 	entry = mas_next(&mas, ULONG_MAX);
3187 	MT_BUG_ON(mt, entry != ptr);
3188 	MT_BUG_ON(mt, mas.index != 0x1000);
3189 	MT_BUG_ON(mt, mas.last != 0x1500);
3190 	MT_BUG_ON(mt, !mas_active(mas));
3191 
3192 	/* next: none ->active */
3193 	mas.index = mas.last = 0;
3194 	mas.offset = 0;
3195 	mas.node = MAS_NONE;
3196 	entry = mas_next(&mas, ULONG_MAX);
3197 	MT_BUG_ON(mt, entry != ptr);
3198 	MT_BUG_ON(mt, mas.index != 0x1000);
3199 	MT_BUG_ON(mt, mas.last != 0x1500);
3200 	MT_BUG_ON(mt, !mas_active(mas));
3201 
3202 	/* next:active ->active */
3203 	entry = mas_next(&mas, ULONG_MAX);
3204 	MT_BUG_ON(mt, entry != ptr2);
3205 	MT_BUG_ON(mt, mas.index != 0x2000);
3206 	MT_BUG_ON(mt, mas.last != 0x2500);
3207 	MT_BUG_ON(mt, !mas_active(mas));
3208 
3209 	/* next:active -> active out of range*/
3210 	entry = mas_next(&mas, 0x2999);
3211 	MT_BUG_ON(mt, entry != NULL);
3212 	MT_BUG_ON(mt, mas.index != 0x2501);
3213 	MT_BUG_ON(mt, mas.last != 0x2fff);
3214 	MT_BUG_ON(mt, !mas_active(mas));
3215 
3216 	/* Continue after out of range*/
3217 	entry = mas_next(&mas, ULONG_MAX);
3218 	MT_BUG_ON(mt, entry != ptr3);
3219 	MT_BUG_ON(mt, mas.index != 0x3000);
3220 	MT_BUG_ON(mt, mas.last != 0x3500);
3221 	MT_BUG_ON(mt, !mas_active(mas));
3222 
3223 	/* next:active -> active out of range*/
3224 	entry = mas_next(&mas, ULONG_MAX);
3225 	MT_BUG_ON(mt, entry != NULL);
3226 	MT_BUG_ON(mt, mas.index != 0x3501);
3227 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3228 	MT_BUG_ON(mt, !mas_active(mas));
3229 
3230 	/* next: none -> active, skip value at location */
3231 	mas_set(&mas, 0);
3232 	entry = mas_next(&mas, ULONG_MAX);
3233 	mas.node = MAS_NONE;
3234 	mas.offset = 0;
3235 	entry = mas_next(&mas, ULONG_MAX);
3236 	MT_BUG_ON(mt, entry != ptr2);
3237 	MT_BUG_ON(mt, mas.index != 0x2000);
3238 	MT_BUG_ON(mt, mas.last != 0x2500);
3239 	MT_BUG_ON(mt, !mas_active(mas));
3240 
3241 	/* prev:active ->active */
3242 	entry = mas_prev(&mas, 0);
3243 	MT_BUG_ON(mt, entry != ptr);
3244 	MT_BUG_ON(mt, mas.index != 0x1000);
3245 	MT_BUG_ON(mt, mas.last != 0x1500);
3246 	MT_BUG_ON(mt, !mas_active(mas));
3247 
3248 	/* prev:active -> active out of range*/
3249 	entry = mas_prev(&mas, 0);
3250 	MT_BUG_ON(mt, entry != NULL);
3251 	MT_BUG_ON(mt, mas.index != 0);
3252 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3253 	MT_BUG_ON(mt, !mas_active(mas));
3254 
3255 	/* prev: pause ->active */
3256 	mas_set(&mas, 0x3600);
3257 	entry = mas_prev(&mas, 0);
3258 	MT_BUG_ON(mt, entry != ptr3);
3259 	mas_pause(&mas);
3260 	entry = mas_prev(&mas, 0);
3261 	MT_BUG_ON(mt, entry != ptr2);
3262 	MT_BUG_ON(mt, mas.index != 0x2000);
3263 	MT_BUG_ON(mt, mas.last != 0x2500);
3264 	MT_BUG_ON(mt, !mas_active(mas));
3265 
3266 	/* prev:active -> active out of range*/
3267 	entry = mas_prev(&mas, 0x1600);
3268 	MT_BUG_ON(mt, entry != NULL);
3269 	MT_BUG_ON(mt, mas.index != 0x1501);
3270 	MT_BUG_ON(mt, mas.last != 0x1FFF);
3271 	MT_BUG_ON(mt, !mas_active(mas));
3272 
3273 	/* prev: active ->active, continue*/
3274 	entry = mas_prev(&mas, 0);
3275 	MT_BUG_ON(mt, entry != ptr);
3276 	MT_BUG_ON(mt, mas.index != 0x1000);
3277 	MT_BUG_ON(mt, mas.last != 0x1500);
3278 	MT_BUG_ON(mt, !mas_active(mas));
3279 
3280 	/* find: start ->active */
3281 	mas_set(&mas, 0);
3282 	entry = mas_find(&mas, ULONG_MAX);
3283 	MT_BUG_ON(mt, entry != ptr);
3284 	MT_BUG_ON(mt, mas.index != 0x1000);
3285 	MT_BUG_ON(mt, mas.last != 0x1500);
3286 	MT_BUG_ON(mt, !mas_active(mas));
3287 
3288 	/* find: pause ->active */
3289 	mas_set(&mas, 0);
3290 	mas_pause(&mas);
3291 	entry = mas_find(&mas, ULONG_MAX);
3292 	MT_BUG_ON(mt, entry != ptr);
3293 	MT_BUG_ON(mt, mas.index != 0x1000);
3294 	MT_BUG_ON(mt, mas.last != 0x1500);
3295 	MT_BUG_ON(mt, !mas_active(mas));
3296 
3297 	/* find: start ->active on value */;
3298 	mas_set(&mas, 1200);
3299 	entry = mas_find(&mas, ULONG_MAX);
3300 	MT_BUG_ON(mt, entry != ptr);
3301 	MT_BUG_ON(mt, mas.index != 0x1000);
3302 	MT_BUG_ON(mt, mas.last != 0x1500);
3303 	MT_BUG_ON(mt, !mas_active(mas));
3304 
3305 	/* find:active ->active */
3306 	entry = mas_find(&mas, ULONG_MAX);
3307 	MT_BUG_ON(mt, entry != ptr2);
3308 	MT_BUG_ON(mt, mas.index != 0x2000);
3309 	MT_BUG_ON(mt, mas.last != 0x2500);
3310 	MT_BUG_ON(mt, !mas_active(mas));
3311 
3312 
3313 	/* find:active -> active (NULL)*/
3314 	entry = mas_find(&mas, 0x2700);
3315 	MT_BUG_ON(mt, entry != NULL);
3316 	MT_BUG_ON(mt, mas.index != 0x2501);
3317 	MT_BUG_ON(mt, mas.last != 0x2FFF);
3318 	MT_BUG_ON(mt, !mas_active(mas));
3319 
3320 	/* find: none ->active */
3321 	entry = mas_find(&mas, 0x5000);
3322 	MT_BUG_ON(mt, entry != ptr3);
3323 	MT_BUG_ON(mt, mas.index != 0x3000);
3324 	MT_BUG_ON(mt, mas.last != 0x3500);
3325 	MT_BUG_ON(mt, !mas_active(mas));
3326 
3327 	/* find:active -> active (NULL) end*/
3328 	entry = mas_find(&mas, ULONG_MAX);
3329 	MT_BUG_ON(mt, entry != NULL);
3330 	MT_BUG_ON(mt, mas.index != 0x3501);
3331 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3332 	MT_BUG_ON(mt, !mas_active(mas));
3333 
3334 	/* find_rev: active (END) ->active */
3335 	entry = mas_find_rev(&mas, 0);
3336 	MT_BUG_ON(mt, entry != ptr3);
3337 	MT_BUG_ON(mt, mas.index != 0x3000);
3338 	MT_BUG_ON(mt, mas.last != 0x3500);
3339 	MT_BUG_ON(mt, !mas_active(mas));
3340 
3341 	/* find_rev:active ->active */
3342 	entry = mas_find_rev(&mas, 0);
3343 	MT_BUG_ON(mt, entry != ptr2);
3344 	MT_BUG_ON(mt, mas.index != 0x2000);
3345 	MT_BUG_ON(mt, mas.last != 0x2500);
3346 	MT_BUG_ON(mt, !mas_active(mas));
3347 
3348 	/* find_rev: pause ->active */
3349 	mas_pause(&mas);
3350 	entry = mas_find_rev(&mas, 0);
3351 	MT_BUG_ON(mt, entry != ptr);
3352 	MT_BUG_ON(mt, mas.index != 0x1000);
3353 	MT_BUG_ON(mt, mas.last != 0x1500);
3354 	MT_BUG_ON(mt, !mas_active(mas));
3355 
3356 	/* find_rev:active -> active */
3357 	entry = mas_find_rev(&mas, 0);
3358 	MT_BUG_ON(mt, entry != NULL);
3359 	MT_BUG_ON(mt, mas.index != 0);
3360 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3361 	MT_BUG_ON(mt, !mas_active(mas));
3362 
3363 	/* find_rev: start ->active */
3364 	mas_set(&mas, 0x1200);
3365 	entry = mas_find_rev(&mas, 0);
3366 	MT_BUG_ON(mt, entry != ptr);
3367 	MT_BUG_ON(mt, mas.index != 0x1000);
3368 	MT_BUG_ON(mt, mas.last != 0x1500);
3369 	MT_BUG_ON(mt, !mas_active(mas));
3370 
3371 	/* mas_walk start ->active */
3372 	mas_set(&mas, 0x1200);
3373 	entry = mas_walk(&mas);
3374 	MT_BUG_ON(mt, entry != ptr);
3375 	MT_BUG_ON(mt, mas.index != 0x1000);
3376 	MT_BUG_ON(mt, mas.last != 0x1500);
3377 	MT_BUG_ON(mt, !mas_active(mas));
3378 
3379 	/* mas_walk start ->active */
3380 	mas_set(&mas, 0x1600);
3381 	entry = mas_walk(&mas);
3382 	MT_BUG_ON(mt, entry != NULL);
3383 	MT_BUG_ON(mt, mas.index != 0x1501);
3384 	MT_BUG_ON(mt, mas.last != 0x1fff);
3385 	MT_BUG_ON(mt, !mas_active(mas));
3386 
3387 	/* mas_walk pause ->active */
3388 	mas_set(&mas, 0x1200);
3389 	mas_pause(&mas);
3390 	entry = mas_walk(&mas);
3391 	MT_BUG_ON(mt, entry != ptr);
3392 	MT_BUG_ON(mt, mas.index != 0x1000);
3393 	MT_BUG_ON(mt, mas.last != 0x1500);
3394 	MT_BUG_ON(mt, !mas_active(mas));
3395 
3396 	/* mas_walk pause -> active */
3397 	mas_set(&mas, 0x1600);
3398 	mas_pause(&mas);
3399 	entry = mas_walk(&mas);
3400 	MT_BUG_ON(mt, entry != NULL);
3401 	MT_BUG_ON(mt, mas.index != 0x1501);
3402 	MT_BUG_ON(mt, mas.last != 0x1fff);
3403 	MT_BUG_ON(mt, !mas_active(mas));
3404 
3405 	/* mas_walk none -> active */
3406 	mas_set(&mas, 0x1200);
3407 	mas.node = MAS_NONE;
3408 	entry = mas_walk(&mas);
3409 	MT_BUG_ON(mt, entry != ptr);
3410 	MT_BUG_ON(mt, mas.index != 0x1000);
3411 	MT_BUG_ON(mt, mas.last != 0x1500);
3412 	MT_BUG_ON(mt, !mas_active(mas));
3413 
3414 	/* mas_walk none -> active */
3415 	mas_set(&mas, 0x1600);
3416 	mas.node = MAS_NONE;
3417 	entry = mas_walk(&mas);
3418 	MT_BUG_ON(mt, entry != NULL);
3419 	MT_BUG_ON(mt, mas.index != 0x1501);
3420 	MT_BUG_ON(mt, mas.last != 0x1fff);
3421 	MT_BUG_ON(mt, !mas_active(mas));
3422 
3423 	/* mas_walk active -> active */
3424 	mas.index = 0x1200;
3425 	mas.last = 0x1200;
3426 	mas.offset = 0;
3427 	entry = mas_walk(&mas);
3428 	MT_BUG_ON(mt, entry != ptr);
3429 	MT_BUG_ON(mt, mas.index != 0x1000);
3430 	MT_BUG_ON(mt, mas.last != 0x1500);
3431 	MT_BUG_ON(mt, !mas_active(mas));
3432 
3433 	/* mas_walk active -> active */
3434 	mas.index = 0x1600;
3435 	mas.last = 0x1600;
3436 	entry = mas_walk(&mas);
3437 	MT_BUG_ON(mt, entry != NULL);
3438 	MT_BUG_ON(mt, mas.index != 0x1501);
3439 	MT_BUG_ON(mt, mas.last != 0x1fff);
3440 	MT_BUG_ON(mt, !mas_active(mas));
3441 
3442 	mas_unlock(&mas);
3443 }
3444 
3445 static DEFINE_MTREE(tree);
3446 static int __init maple_tree_seed(void)
3447 {
3448 	unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3449 				1001, 1002, 1003, 1005, 0,
3450 				5003, 5002};
3451 	void *ptr = &set;
3452 
3453 	pr_info("\nTEST STARTING\n\n");
3454 
3455 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3456 	check_root_expand(&tree);
3457 	mtree_destroy(&tree);
3458 
3459 #if defined(BENCH_SLOT_STORE)
3460 #define BENCH
3461 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3462 	bench_slot_store(&tree);
3463 	mtree_destroy(&tree);
3464 	goto skip;
3465 #endif
3466 #if defined(BENCH_NODE_STORE)
3467 #define BENCH
3468 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3469 	bench_node_store(&tree);
3470 	mtree_destroy(&tree);
3471 	goto skip;
3472 #endif
3473 #if defined(BENCH_AWALK)
3474 #define BENCH
3475 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3476 	bench_awalk(&tree);
3477 	mtree_destroy(&tree);
3478 	goto skip;
3479 #endif
3480 #if defined(BENCH_WALK)
3481 #define BENCH
3482 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3483 	bench_walk(&tree);
3484 	mtree_destroy(&tree);
3485 	goto skip;
3486 #endif
3487 #if defined(BENCH_FORK)
3488 #define BENCH
3489 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3490 	bench_forking(&tree);
3491 	mtree_destroy(&tree);
3492 	goto skip;
3493 #endif
3494 #if defined(BENCH_MT_FOR_EACH)
3495 #define BENCH
3496 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3497 	bench_mt_for_each(&tree);
3498 	mtree_destroy(&tree);
3499 	goto skip;
3500 #endif
3501 
3502 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3503 	check_iteration(&tree);
3504 	mtree_destroy(&tree);
3505 
3506 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3507 	check_forking(&tree);
3508 	mtree_destroy(&tree);
3509 
3510 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3511 	check_mas_store_gfp(&tree);
3512 	mtree_destroy(&tree);
3513 
3514 	/* Test ranges (store and insert) */
3515 	mt_init_flags(&tree, 0);
3516 	check_ranges(&tree);
3517 	mtree_destroy(&tree);
3518 
3519 #if defined(CONFIG_64BIT)
3520 	/* These tests have ranges outside of 4GB */
3521 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3522 	check_alloc_range(&tree);
3523 	mtree_destroy(&tree);
3524 
3525 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3526 	check_alloc_rev_range(&tree);
3527 	mtree_destroy(&tree);
3528 #endif
3529 
3530 	mt_init_flags(&tree, 0);
3531 
3532 	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
3533 
3534 	check_insert(&tree, set[9], &tree);     /* Insert 0 */
3535 	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
3536 	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
3537 
3538 	check_insert(&tree, set[10], ptr);      /* Insert 5003 */
3539 	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
3540 	check_load(&tree, set[11], NULL);       /* See if 5002 -> NULL */
3541 	check_load(&tree, set[10], ptr);       /* See if 5003 -> ptr */
3542 
3543 	/* Clear out the tree */
3544 	mtree_destroy(&tree);
3545 
3546 	/* Try to insert, insert a dup, and load back what was inserted. */
3547 	mt_init_flags(&tree, 0);
3548 	check_insert(&tree, set[0], &tree);     /* Insert 5015 */
3549 	check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3550 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3551 
3552 	/*
3553 	 * Second set of tests try to load a value that doesn't exist, inserts
3554 	 * a second value, then loads the value again
3555 	 */
3556 	check_load(&tree, set[1], NULL);        /* See if 5014 -> NULL */
3557 	check_insert(&tree, set[1], ptr);       /* insert 5014 -> ptr */
3558 	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
3559 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3560 	/*
3561 	 * Tree currently contains:
3562 	 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3563 	 */
3564 	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
3565 	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
3566 
3567 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3568 	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
3569 	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
3570 	check_load(&tree, set[7], &tree);       /* 1003 = &tree ? */
3571 
3572 	/* Clear out tree */
3573 	mtree_destroy(&tree);
3574 
3575 	mt_init_flags(&tree, 0);
3576 	/* Test inserting into a NULL hole. */
3577 	check_insert(&tree, set[5], ptr);       /* insert 1001 -> ptr */
3578 	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
3579 	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
3580 	check_load(&tree, set[5], ptr);         /* See if 1001 -> ptr */
3581 	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
3582 	check_load(&tree, set[7], &tree);       /* See if 1003 -> &tree */
3583 
3584 	/* Clear out the tree */
3585 	mtree_destroy(&tree);
3586 
3587 	mt_init_flags(&tree, 0);
3588 	/*
3589 	 *       set[] = {5015, 5014, 5017, 25, 1000,
3590 	 *                1001, 1002, 1003, 1005, 0,
3591 	 *                5003, 5002};
3592 	 */
3593 
3594 	check_insert(&tree, set[0], ptr); /* 5015 */
3595 	check_insert(&tree, set[1], &tree); /* 5014 */
3596 	check_insert(&tree, set[2], ptr); /* 5017 */
3597 	check_insert(&tree, set[3], &tree); /* 25 */
3598 	check_load(&tree, set[0], ptr);
3599 	check_load(&tree, set[1], &tree);
3600 	check_load(&tree, set[2], ptr);
3601 	check_load(&tree, set[3], &tree);
3602 	check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3603 	check_load(&tree, set[0], ptr);
3604 	check_load(&tree, set[1], &tree);
3605 	check_load(&tree, set[2], ptr);
3606 	check_load(&tree, set[3], &tree); /*25 */
3607 	check_load(&tree, set[4], ptr);
3608 	check_insert(&tree, set[5], &tree); /* 1001 */
3609 	check_load(&tree, set[0], ptr);
3610 	check_load(&tree, set[1], &tree);
3611 	check_load(&tree, set[2], ptr);
3612 	check_load(&tree, set[3], &tree);
3613 	check_load(&tree, set[4], ptr);
3614 	check_load(&tree, set[5], &tree);
3615 	check_insert(&tree, set[6], ptr);
3616 	check_load(&tree, set[0], ptr);
3617 	check_load(&tree, set[1], &tree);
3618 	check_load(&tree, set[2], ptr);
3619 	check_load(&tree, set[3], &tree);
3620 	check_load(&tree, set[4], ptr);
3621 	check_load(&tree, set[5], &tree);
3622 	check_load(&tree, set[6], ptr);
3623 	check_insert(&tree, set[7], &tree);
3624 	check_load(&tree, set[0], ptr);
3625 	check_insert(&tree, set[8], ptr);
3626 
3627 	check_insert(&tree, set[9], &tree);
3628 
3629 	check_load(&tree, set[0], ptr);
3630 	check_load(&tree, set[1], &tree);
3631 	check_load(&tree, set[2], ptr);
3632 	check_load(&tree, set[3], &tree);
3633 	check_load(&tree, set[4], ptr);
3634 	check_load(&tree, set[5], &tree);
3635 	check_load(&tree, set[6], ptr);
3636 	check_load(&tree, set[9], &tree);
3637 	mtree_destroy(&tree);
3638 
3639 	mt_init_flags(&tree, 0);
3640 	check_seq(&tree, 16, false);
3641 	mtree_destroy(&tree);
3642 
3643 	mt_init_flags(&tree, 0);
3644 	check_seq(&tree, 1000, true);
3645 	mtree_destroy(&tree);
3646 
3647 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3648 	check_rev_seq(&tree, 1000, true);
3649 	mtree_destroy(&tree);
3650 
3651 	check_lower_bound_split(&tree);
3652 	check_upper_bound_split(&tree);
3653 	check_mid_split(&tree);
3654 
3655 	mt_init_flags(&tree, 0);
3656 	check_next_entry(&tree);
3657 	check_find(&tree);
3658 	check_find_2(&tree);
3659 	mtree_destroy(&tree);
3660 
3661 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3662 	check_prev_entry(&tree);
3663 	mtree_destroy(&tree);
3664 
3665 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3666 	check_gap_combining(&tree);
3667 	mtree_destroy(&tree);
3668 
3669 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3670 	check_node_overwrite(&tree);
3671 	mtree_destroy(&tree);
3672 
3673 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3674 	next_prev_test(&tree);
3675 	mtree_destroy(&tree);
3676 
3677 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3678 	check_spanning_relatives(&tree);
3679 	mtree_destroy(&tree);
3680 
3681 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3682 	check_rev_find(&tree);
3683 	mtree_destroy(&tree);
3684 
3685 	mt_init_flags(&tree, 0);
3686 	check_fuzzer(&tree);
3687 	mtree_destroy(&tree);
3688 
3689 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3690 	check_dup(&tree);
3691 	mtree_destroy(&tree);
3692 
3693 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3694 	check_bnode_min_spanning(&tree);
3695 	mtree_destroy(&tree);
3696 
3697 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3698 	check_empty_area_window(&tree);
3699 	mtree_destroy(&tree);
3700 
3701 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3702 	check_empty_area_fill(&tree);
3703 	mtree_destroy(&tree);
3704 
3705 
3706 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3707 	check_state_handling(&tree);
3708 	mtree_destroy(&tree);
3709 
3710 #if defined(BENCH)
3711 skip:
3712 #endif
3713 	rcu_barrier();
3714 	pr_info("maple_tree: %u of %u tests passed\n",
3715 			atomic_read(&maple_tree_tests_passed),
3716 			atomic_read(&maple_tree_tests_run));
3717 	if (atomic_read(&maple_tree_tests_run) ==
3718 	    atomic_read(&maple_tree_tests_passed))
3719 		return 0;
3720 
3721 	return -EINVAL;
3722 }
3723 
3724 static void __exit maple_tree_harvest(void)
3725 {
3726 
3727 }
3728 
3729 module_init(maple_tree_seed);
3730 module_exit(maple_tree_harvest);
3731 MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
3732 MODULE_LICENSE("GPL");
3733