xref: /openbmc/linux/lib/test_maple_tree.c (revision 9f7cc57f)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * test_maple_tree.c: Test the maple tree API
4  * Copyright (c) 2018-2022 Oracle Corporation
5  * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
6  *
7  * Any tests that only require the interface of the tree.
8  */
9 
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
12 
13 #define MTREE_ALLOC_MAX 0x2000000000000Ul
14 #ifndef CONFIG_DEBUG_MAPLE_TREE
15 #define CONFIG_DEBUG_MAPLE_TREE
16 #endif
17 #define CONFIG_MAPLE_SEARCH
18 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
19 
20 /* #define BENCH_SLOT_STORE */
21 /* #define BENCH_NODE_STORE */
22 /* #define BENCH_AWALK */
23 /* #define BENCH_WALK */
24 /* #define BENCH_MT_FOR_EACH */
25 /* #define BENCH_FORK */
26 
27 #ifdef __KERNEL__
28 #define mt_set_non_kernel(x)		do {} while (0)
29 #define mt_zero_nr_tallocated(x)	do {} while (0)
30 #else
31 #define cond_resched()			do {} while (0)
32 #endif
33 static
34 int mtree_insert_index(struct maple_tree *mt, unsigned long index, gfp_t gfp)
35 {
36 	return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
37 }
38 
39 static void mtree_erase_index(struct maple_tree *mt, unsigned long index)
40 {
41 	MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
42 	MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
43 }
44 
45 static int mtree_test_insert(struct maple_tree *mt, unsigned long index,
46 				void *ptr)
47 {
48 	return mtree_insert(mt, index, ptr, GFP_KERNEL);
49 }
50 
51 static int mtree_test_store_range(struct maple_tree *mt, unsigned long start,
52 				unsigned long end, void *ptr)
53 {
54 	return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
55 }
56 
57 static int mtree_test_store(struct maple_tree *mt, unsigned long start,
58 				void *ptr)
59 {
60 	return mtree_test_store_range(mt, start, start, ptr);
61 }
62 
63 static int mtree_test_insert_range(struct maple_tree *mt, unsigned long start,
64 				unsigned long end, void *ptr)
65 {
66 	return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
67 }
68 
69 static void *mtree_test_load(struct maple_tree *mt, unsigned long index)
70 {
71 	return mtree_load(mt, index);
72 }
73 
74 static void *mtree_test_erase(struct maple_tree *mt, unsigned long index)
75 {
76 	return mtree_erase(mt, index);
77 }
78 
79 #if defined(CONFIG_64BIT)
80 static noinline void check_mtree_alloc_range(struct maple_tree *mt,
81 		unsigned long start, unsigned long end, unsigned long size,
82 		unsigned long expected, int eret, void *ptr)
83 {
84 
85 	unsigned long result = expected + 1;
86 	int ret;
87 
88 	ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
89 			GFP_KERNEL);
90 	MT_BUG_ON(mt, ret != eret);
91 	if (ret)
92 		return;
93 
94 	MT_BUG_ON(mt, result != expected);
95 }
96 
97 static noinline void check_mtree_alloc_rrange(struct maple_tree *mt,
98 		unsigned long start, unsigned long end, unsigned long size,
99 		unsigned long expected, int eret, void *ptr)
100 {
101 
102 	unsigned long result = expected + 1;
103 	int ret;
104 
105 	ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end - 1,
106 			GFP_KERNEL);
107 	MT_BUG_ON(mt, ret != eret);
108 	if (ret)
109 		return;
110 
111 	MT_BUG_ON(mt, result != expected);
112 }
113 #endif
114 
115 static noinline void check_load(struct maple_tree *mt, unsigned long index,
116 				void *ptr)
117 {
118 	void *ret = mtree_test_load(mt, index);
119 
120 	if (ret != ptr)
121 		pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
122 	MT_BUG_ON(mt, ret != ptr);
123 }
124 
125 static noinline void check_store_range(struct maple_tree *mt,
126 		unsigned long start, unsigned long end, void *ptr, int expected)
127 {
128 	int ret = -EINVAL;
129 	unsigned long i;
130 
131 	ret = mtree_test_store_range(mt, start, end, ptr);
132 	MT_BUG_ON(mt, ret != expected);
133 
134 	if (ret)
135 		return;
136 
137 	for (i = start; i <= end; i++)
138 		check_load(mt, i, ptr);
139 }
140 
141 static noinline void check_insert_range(struct maple_tree *mt,
142 		unsigned long start, unsigned long end, void *ptr, int expected)
143 {
144 	int ret = -EINVAL;
145 	unsigned long i;
146 
147 	ret = mtree_test_insert_range(mt, start, end, ptr);
148 	MT_BUG_ON(mt, ret != expected);
149 
150 	if (ret)
151 		return;
152 
153 	for (i = start; i <= end; i++)
154 		check_load(mt, i, ptr);
155 }
156 
157 static noinline void check_insert(struct maple_tree *mt, unsigned long index,
158 		void *ptr)
159 {
160 	int ret = -EINVAL;
161 
162 	ret = mtree_test_insert(mt, index, ptr);
163 	MT_BUG_ON(mt, ret != 0);
164 }
165 
166 static noinline void check_dup_insert(struct maple_tree *mt,
167 				      unsigned long index, void *ptr)
168 {
169 	int ret = -EINVAL;
170 
171 	ret = mtree_test_insert(mt, index, ptr);
172 	MT_BUG_ON(mt, ret != -EEXIST);
173 }
174 
175 
176 static noinline
177 void check_index_load(struct maple_tree *mt, unsigned long index)
178 {
179 	return check_load(mt, index, xa_mk_value(index & LONG_MAX));
180 }
181 
182 static inline int not_empty(struct maple_node *node)
183 {
184 	int i;
185 
186 	if (node->parent)
187 		return 1;
188 
189 	for (i = 0; i < ARRAY_SIZE(node->slot); i++)
190 		if (node->slot[i])
191 			return 1;
192 
193 	return 0;
194 }
195 
196 
197 static noinline void check_rev_seq(struct maple_tree *mt, unsigned long max,
198 		bool verbose)
199 {
200 	unsigned long i = max, j;
201 
202 	MT_BUG_ON(mt, !mtree_empty(mt));
203 
204 	mt_zero_nr_tallocated();
205 	while (i) {
206 		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
207 		for (j = i; j <= max; j++)
208 			check_index_load(mt, j);
209 
210 		check_load(mt, i - 1, NULL);
211 		mt_set_in_rcu(mt);
212 		MT_BUG_ON(mt, !mt_height(mt));
213 		mt_clear_in_rcu(mt);
214 		MT_BUG_ON(mt, !mt_height(mt));
215 		i--;
216 	}
217 	check_load(mt, max + 1, NULL);
218 
219 #ifndef __KERNEL__
220 	if (verbose) {
221 		rcu_barrier();
222 		mt_dump(mt);
223 		pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
224 			__func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
225 			mt_nr_tallocated());
226 	}
227 #endif
228 }
229 
230 static noinline void check_seq(struct maple_tree *mt, unsigned long max,
231 		bool verbose)
232 {
233 	unsigned long i, j;
234 
235 	MT_BUG_ON(mt, !mtree_empty(mt));
236 
237 	mt_zero_nr_tallocated();
238 	for (i = 0; i <= max; i++) {
239 		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
240 		for (j = 0; j <= i; j++)
241 			check_index_load(mt, j);
242 
243 		if (i)
244 			MT_BUG_ON(mt, !mt_height(mt));
245 		check_load(mt, i + 1, NULL);
246 	}
247 
248 #ifndef __KERNEL__
249 	if (verbose) {
250 		rcu_barrier();
251 		mt_dump(mt);
252 		pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
253 			max, mt_get_alloc_size()/1024, mt_nr_allocated(),
254 			mt_nr_tallocated());
255 	}
256 #endif
257 }
258 
259 static noinline void check_lb_not_empty(struct maple_tree *mt)
260 {
261 	unsigned long i, j;
262 	unsigned long huge = 4000UL * 1000 * 1000;
263 
264 
265 	i = huge;
266 	while (i > 4096) {
267 		check_insert(mt, i, (void *) i);
268 		for (j = huge; j >= i; j /= 2) {
269 			check_load(mt, j-1, NULL);
270 			check_load(mt, j, (void *) j);
271 			check_load(mt, j+1, NULL);
272 		}
273 		i /= 2;
274 	}
275 	mtree_destroy(mt);
276 }
277 
278 static noinline void check_lower_bound_split(struct maple_tree *mt)
279 {
280 	MT_BUG_ON(mt, !mtree_empty(mt));
281 	check_lb_not_empty(mt);
282 }
283 
284 static noinline void check_upper_bound_split(struct maple_tree *mt)
285 {
286 	unsigned long i, j;
287 	unsigned long huge;
288 
289 	MT_BUG_ON(mt, !mtree_empty(mt));
290 
291 	if (MAPLE_32BIT)
292 		huge = 2147483647UL;
293 	else
294 		huge = 4000UL * 1000 * 1000;
295 
296 	i = 4096;
297 	while (i < huge) {
298 		check_insert(mt, i, (void *) i);
299 		for (j = i; j >= huge; j *= 2) {
300 			check_load(mt, j-1, NULL);
301 			check_load(mt, j, (void *) j);
302 			check_load(mt, j+1, NULL);
303 		}
304 		i *= 2;
305 	}
306 	mtree_destroy(mt);
307 }
308 
309 static noinline void check_mid_split(struct maple_tree *mt)
310 {
311 	unsigned long huge = 8000UL * 1000 * 1000;
312 
313 	check_insert(mt, huge, (void *) huge);
314 	check_insert(mt, 0, xa_mk_value(0));
315 	check_lb_not_empty(mt);
316 }
317 
318 static noinline void check_rev_find(struct maple_tree *mt)
319 {
320 	int i, nr_entries = 200;
321 	void *val;
322 	MA_STATE(mas, mt, 0, 0);
323 
324 	for (i = 0; i <= nr_entries; i++)
325 		mtree_store_range(mt, i*10, i*10 + 5,
326 				  xa_mk_value(i), GFP_KERNEL);
327 
328 	rcu_read_lock();
329 	mas_set(&mas, 1000);
330 	val = mas_find_rev(&mas, 1000);
331 	MT_BUG_ON(mt, val != xa_mk_value(100));
332 	val = mas_find_rev(&mas, 1000);
333 	MT_BUG_ON(mt, val != NULL);
334 
335 	mas_set(&mas, 999);
336 	val = mas_find_rev(&mas, 997);
337 	MT_BUG_ON(mt, val != NULL);
338 
339 	mas_set(&mas, 1000);
340 	val = mas_find_rev(&mas, 900);
341 	MT_BUG_ON(mt, val != xa_mk_value(100));
342 	val = mas_find_rev(&mas, 900);
343 	MT_BUG_ON(mt, val != xa_mk_value(99));
344 
345 	mas_set(&mas, 20);
346 	val = mas_find_rev(&mas, 0);
347 	MT_BUG_ON(mt, val != xa_mk_value(2));
348 	val = mas_find_rev(&mas, 0);
349 	MT_BUG_ON(mt, val != xa_mk_value(1));
350 	val = mas_find_rev(&mas, 0);
351 	MT_BUG_ON(mt, val != xa_mk_value(0));
352 	val = mas_find_rev(&mas, 0);
353 	MT_BUG_ON(mt, val != NULL);
354 	rcu_read_unlock();
355 }
356 
357 static noinline void check_find(struct maple_tree *mt)
358 {
359 	unsigned long val = 0;
360 	unsigned long count;
361 	unsigned long max;
362 	unsigned long top;
363 	unsigned long last = 0, index = 0;
364 	void *entry, *entry2;
365 
366 	MA_STATE(mas, mt, 0, 0);
367 
368 	/* Insert 0. */
369 	MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
370 
371 #if defined(CONFIG_64BIT)
372 	top = 4398046511104UL;
373 #else
374 	top = ULONG_MAX;
375 #endif
376 
377 	if (MAPLE_32BIT) {
378 		count = 15;
379 	} else {
380 		count = 20;
381 	}
382 
383 	for (int i = 0; i <= count; i++) {
384 		if (val != 64)
385 			MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
386 		else
387 			MT_BUG_ON(mt, mtree_insert(mt, val,
388 				XA_ZERO_ENTRY, GFP_KERNEL));
389 
390 		val <<= 2;
391 	}
392 
393 	val = 0;
394 	mas_set(&mas, val);
395 	mas_lock(&mas);
396 	while ((entry = mas_find(&mas, 268435456)) != NULL) {
397 		if (val != 64)
398 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
399 		else
400 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
401 
402 		val <<= 2;
403 		/* For zero check. */
404 		if (!val)
405 			val = 1;
406 	}
407 	mas_unlock(&mas);
408 
409 	val = 0;
410 	mas_set(&mas, val);
411 	mas_lock(&mas);
412 	mas_for_each(&mas, entry, ULONG_MAX) {
413 		if (val != 64)
414 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
415 		else
416 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
417 		val <<= 2;
418 		/* For zero check. */
419 		if (!val)
420 			val = 1;
421 	}
422 	mas_unlock(&mas);
423 
424 	/* Test mas_pause */
425 	val = 0;
426 	mas_set(&mas, val);
427 	mas_lock(&mas);
428 	mas_for_each(&mas, entry, ULONG_MAX) {
429 		if (val != 64)
430 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
431 		else
432 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
433 		val <<= 2;
434 		/* For zero check. */
435 		if (!val)
436 			val = 1;
437 
438 		mas_pause(&mas);
439 		mas_unlock(&mas);
440 		mas_lock(&mas);
441 	}
442 	mas_unlock(&mas);
443 
444 	val = 0;
445 	max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
446 	mt_for_each(mt, entry, index, max) {
447 		MT_BUG_ON(mt, xa_mk_value(val) != entry);
448 		val <<= 2;
449 		if (val == 64) /* Skip zero entry. */
450 			val <<= 2;
451 		/* For zero check. */
452 		if (!val)
453 			val = 1;
454 	}
455 
456 	val = 0;
457 	max = 0;
458 	index = 0;
459 	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
460 	mt_for_each(mt, entry, index, ULONG_MAX) {
461 		if (val == top)
462 			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
463 		else
464 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
465 
466 		/* Workaround for 32bit */
467 		if ((val << 2) < val)
468 			val = ULONG_MAX;
469 		else
470 			val <<= 2;
471 
472 		if (val == 64) /* Skip zero entry. */
473 			val <<= 2;
474 		/* For zero check. */
475 		if (!val)
476 			val = 1;
477 		max++;
478 		MT_BUG_ON(mt, max > 25);
479 	}
480 	mtree_erase_index(mt, ULONG_MAX);
481 
482 	mas_reset(&mas);
483 	index = 17;
484 	entry = mt_find(mt, &index, 512);
485 	MT_BUG_ON(mt, xa_mk_value(256) != entry);
486 
487 	mas_reset(&mas);
488 	index = 17;
489 	entry = mt_find(mt, &index, 20);
490 	MT_BUG_ON(mt, entry != NULL);
491 
492 
493 	/* Range check.. */
494 	/* Insert ULONG_MAX */
495 	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
496 
497 	val = 0;
498 	mas_set(&mas, 0);
499 	mas_lock(&mas);
500 	mas_for_each(&mas, entry, ULONG_MAX) {
501 		if (val == 64)
502 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
503 		else if (val == top)
504 			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
505 		else
506 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
507 
508 		/* Workaround for 32bit */
509 		if ((val << 2) < val)
510 			val = ULONG_MAX;
511 		else
512 			val <<= 2;
513 
514 		/* For zero check. */
515 		if (!val)
516 			val = 1;
517 		mas_pause(&mas);
518 		mas_unlock(&mas);
519 		mas_lock(&mas);
520 	}
521 	mas_unlock(&mas);
522 
523 	mas_set(&mas, 1048576);
524 	mas_lock(&mas);
525 	entry = mas_find(&mas, 1048576);
526 	mas_unlock(&mas);
527 	MT_BUG_ON(mas.tree, entry == NULL);
528 
529 	/*
530 	 * Find last value.
531 	 * 1. get the expected value, leveraging the existence of an end entry
532 	 * 2. delete end entry
533 	 * 3. find the last value but searching for ULONG_MAX and then using
534 	 * prev
535 	 */
536 	/* First, get the expected result. */
537 	mas_lock(&mas);
538 	mas_reset(&mas);
539 	mas.index = ULONG_MAX; /* start at max.. */
540 	entry = mas_find(&mas, ULONG_MAX);
541 	entry = mas_prev(&mas, 0);
542 	index = mas.index;
543 	last = mas.last;
544 
545 	/* Erase the last entry. */
546 	mas_reset(&mas);
547 	mas.index = ULONG_MAX;
548 	mas.last = ULONG_MAX;
549 	mas_erase(&mas);
550 
551 	/* Get the previous value from MAS_START */
552 	mas_reset(&mas);
553 	entry2 = mas_prev(&mas, 0);
554 
555 	/* Check results. */
556 	MT_BUG_ON(mt, entry != entry2);
557 	MT_BUG_ON(mt, index != mas.index);
558 	MT_BUG_ON(mt, last != mas.last);
559 
560 
561 	mas.node = MAS_NONE;
562 	mas.index = ULONG_MAX;
563 	mas.last = ULONG_MAX;
564 	entry2 = mas_prev(&mas, 0);
565 	MT_BUG_ON(mt, entry != entry2);
566 
567 	mas_set(&mas, 0);
568 	MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
569 
570 	mas_unlock(&mas);
571 	mtree_destroy(mt);
572 }
573 
574 static noinline void check_find_2(struct maple_tree *mt)
575 {
576 	unsigned long i, j;
577 	void *entry;
578 
579 	MA_STATE(mas, mt, 0, 0);
580 	rcu_read_lock();
581 	mas_for_each(&mas, entry, ULONG_MAX)
582 		MT_BUG_ON(mt, true);
583 	rcu_read_unlock();
584 
585 	for (i = 0; i < 256; i++) {
586 		mtree_insert_index(mt, i, GFP_KERNEL);
587 		j = 0;
588 		mas_set(&mas, 0);
589 		rcu_read_lock();
590 		mas_for_each(&mas, entry, ULONG_MAX) {
591 			MT_BUG_ON(mt, entry != xa_mk_value(j));
592 			j++;
593 		}
594 		rcu_read_unlock();
595 		MT_BUG_ON(mt, j != i + 1);
596 	}
597 
598 	for (i = 0; i < 256; i++) {
599 		mtree_erase_index(mt, i);
600 		j = i + 1;
601 		mas_set(&mas, 0);
602 		rcu_read_lock();
603 		mas_for_each(&mas, entry, ULONG_MAX) {
604 			if (xa_is_zero(entry))
605 				continue;
606 
607 			MT_BUG_ON(mt, entry != xa_mk_value(j));
608 			j++;
609 		}
610 		rcu_read_unlock();
611 		MT_BUG_ON(mt, j != 256);
612 	}
613 
614 	/*MT_BUG_ON(mt, !mtree_empty(mt)); */
615 }
616 
617 
618 #if defined(CONFIG_64BIT)
619 static noinline void check_alloc_rev_range(struct maple_tree *mt)
620 {
621 	/*
622 	 * Generated by:
623 	 * cat /proc/self/maps | awk '{print $1}'|
624 	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
625 	 */
626 
627 	unsigned long range[] = {
628 	/*      Inclusive     , Exclusive. */
629 		0x565234af2000, 0x565234af4000,
630 		0x565234af4000, 0x565234af9000,
631 		0x565234af9000, 0x565234afb000,
632 		0x565234afc000, 0x565234afd000,
633 		0x565234afd000, 0x565234afe000,
634 		0x565235def000, 0x565235e10000,
635 		0x7f36d4bfd000, 0x7f36d4ee2000,
636 		0x7f36d4ee2000, 0x7f36d4f04000,
637 		0x7f36d4f04000, 0x7f36d504c000,
638 		0x7f36d504c000, 0x7f36d5098000,
639 		0x7f36d5098000, 0x7f36d5099000,
640 		0x7f36d5099000, 0x7f36d509d000,
641 		0x7f36d509d000, 0x7f36d509f000,
642 		0x7f36d509f000, 0x7f36d50a5000,
643 		0x7f36d50b9000, 0x7f36d50db000,
644 		0x7f36d50db000, 0x7f36d50dc000,
645 		0x7f36d50dc000, 0x7f36d50fa000,
646 		0x7f36d50fa000, 0x7f36d5102000,
647 		0x7f36d5102000, 0x7f36d5103000,
648 		0x7f36d5103000, 0x7f36d5104000,
649 		0x7f36d5104000, 0x7f36d5105000,
650 		0x7fff5876b000, 0x7fff5878d000,
651 		0x7fff5878e000, 0x7fff58791000,
652 		0x7fff58791000, 0x7fff58793000,
653 	};
654 
655 	unsigned long holes[] = {
656 		/*
657 		 * Note: start of hole is INCLUSIVE
658 		 *        end of hole is EXCLUSIVE
659 		 *        (opposite of the above table.)
660 		 * Start of hole, end of hole,  size of hole (+1)
661 		 */
662 		0x565234afb000, 0x565234afc000, 0x1000,
663 		0x565234afe000, 0x565235def000, 0x12F1000,
664 		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
665 	};
666 
667 	/*
668 	 * req_range consists of 4 values.
669 	 * 1. min index
670 	 * 2. max index
671 	 * 3. size
672 	 * 4. number that should be returned.
673 	 * 5. return value
674 	 */
675 	unsigned long req_range[] = {
676 		0x565234af9000, /* Min */
677 		0x7fff58791000, /* Max */
678 		0x1000,         /* Size */
679 		0x7fff5878d << 12,  /* First rev hole of size 0x1000 */
680 		0,              /* Return value success. */
681 
682 		0x0,            /* Min */
683 		0x565234AF1 << 12,    /* Max */
684 		0x3000,         /* Size */
685 		0x565234AEE << 12,  /* max - 3. */
686 		0,              /* Return value success. */
687 
688 		0x0,            /* Min */
689 		-1,             /* Max */
690 		0x1000,         /* Size */
691 		562949953421311 << 12,/* First rev hole of size 0x1000 */
692 		0,              /* Return value success. */
693 
694 		0x0,            /* Min */
695 		0x7F36D510A << 12,    /* Max */
696 		0x4000,         /* Size */
697 		0x7F36D5106 << 12,    /* First rev hole of size 0x4000 */
698 		0,              /* Return value success. */
699 
700 		/* Ascend test. */
701 		0x0,
702 		34148798629 << 12,
703 		19 << 12,
704 		34148797418 << 12,
705 		0x0,
706 
707 		/* Too big test. */
708 		0x0,
709 		18446744073709551615UL,
710 		562915594369134UL << 12,
711 		0x0,
712 		-EBUSY,
713 
714 	};
715 
716 	int i, range_count = ARRAY_SIZE(range);
717 	int req_range_count = ARRAY_SIZE(req_range);
718 	unsigned long min = 0;
719 
720 	MA_STATE(mas, mt, 0, 0);
721 
722 	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
723 			  GFP_KERNEL);
724 #define DEBUG_REV_RANGE 0
725 	for (i = 0; i < range_count; i += 2) {
726 		/* Inclusive, Inclusive (with the -1) */
727 
728 #if DEBUG_REV_RANGE
729 		pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
730 				(range[i + 1] >> 12) - 1);
731 #endif
732 		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
733 				xa_mk_value(range[i] >> 12), 0);
734 		mt_validate(mt);
735 	}
736 
737 
738 	mas_lock(&mas);
739 	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
740 #if DEBUG_REV_RANGE
741 		pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
742 				min, holes[i+1]>>12, holes[i+2]>>12,
743 				holes[i] >> 12);
744 #endif
745 		MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
746 					holes[i+1] >> 12,
747 					holes[i+2] >> 12));
748 #if DEBUG_REV_RANGE
749 		pr_debug("Found %lu %lu\n", mas.index, mas.last);
750 		pr_debug("gap %lu %lu\n", (holes[i] >> 12),
751 				(holes[i+1] >> 12));
752 #endif
753 		MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
754 		MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
755 		min = holes[i+1] >> 12;
756 		mas_reset(&mas);
757 	}
758 
759 	mas_unlock(&mas);
760 	for (i = 0; i < req_range_count; i += 5) {
761 #if DEBUG_REV_RANGE
762 		pr_debug("\tReverse request between %lu-%lu size %lu, should get %lu\n",
763 				req_range[i] >> 12,
764 				(req_range[i + 1] >> 12) - 1,
765 				req_range[i+2] >> 12,
766 				req_range[i+3] >> 12);
767 #endif
768 		check_mtree_alloc_rrange(mt,
769 				req_range[i]   >> 12, /* start */
770 				req_range[i+1] >> 12, /* end */
771 				req_range[i+2] >> 12, /* size */
772 				req_range[i+3] >> 12, /* expected address */
773 				req_range[i+4],       /* expected return */
774 				xa_mk_value(req_range[i] >> 12)); /* pointer */
775 		mt_validate(mt);
776 	}
777 
778 	mt_set_non_kernel(1);
779 	mtree_erase(mt, 34148798727); /* create a deleted range. */
780 	check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
781 			34148798725, 0, mt);
782 
783 	mtree_destroy(mt);
784 }
785 
786 static noinline void check_alloc_range(struct maple_tree *mt)
787 {
788 	/*
789 	 * Generated by:
790 	 * cat /proc/self/maps|awk '{print $1}'|
791 	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
792 	 */
793 
794 	unsigned long range[] = {
795 	/*      Inclusive     , Exclusive. */
796 		0x565234af2000, 0x565234af4000,
797 		0x565234af4000, 0x565234af9000,
798 		0x565234af9000, 0x565234afb000,
799 		0x565234afc000, 0x565234afd000,
800 		0x565234afd000, 0x565234afe000,
801 		0x565235def000, 0x565235e10000,
802 		0x7f36d4bfd000, 0x7f36d4ee2000,
803 		0x7f36d4ee2000, 0x7f36d4f04000,
804 		0x7f36d4f04000, 0x7f36d504c000,
805 		0x7f36d504c000, 0x7f36d5098000,
806 		0x7f36d5098000, 0x7f36d5099000,
807 		0x7f36d5099000, 0x7f36d509d000,
808 		0x7f36d509d000, 0x7f36d509f000,
809 		0x7f36d509f000, 0x7f36d50a5000,
810 		0x7f36d50b9000, 0x7f36d50db000,
811 		0x7f36d50db000, 0x7f36d50dc000,
812 		0x7f36d50dc000, 0x7f36d50fa000,
813 		0x7f36d50fa000, 0x7f36d5102000,
814 		0x7f36d5102000, 0x7f36d5103000,
815 		0x7f36d5103000, 0x7f36d5104000,
816 		0x7f36d5104000, 0x7f36d5105000,
817 		0x7fff5876b000, 0x7fff5878d000,
818 		0x7fff5878e000, 0x7fff58791000,
819 		0x7fff58791000, 0x7fff58793000,
820 	};
821 	unsigned long holes[] = {
822 		/* Start of hole, end of hole,  size of hole (+1) */
823 		0x565234afb000, 0x565234afc000, 0x1000,
824 		0x565234afe000, 0x565235def000, 0x12F1000,
825 		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
826 	};
827 
828 	/*
829 	 * req_range consists of 4 values.
830 	 * 1. min index
831 	 * 2. max index
832 	 * 3. size
833 	 * 4. number that should be returned.
834 	 * 5. return value
835 	 */
836 	unsigned long req_range[] = {
837 		0x565234af9000, /* Min */
838 		0x7fff58791000, /* Max */
839 		0x1000,         /* Size */
840 		0x565234afb000, /* First hole in our data of size 1000. */
841 		0,              /* Return value success. */
842 
843 		0x0,            /* Min */
844 		0x7fff58791000, /* Max */
845 		0x1F00,         /* Size */
846 		0x0,            /* First hole in our data of size 2000. */
847 		0,              /* Return value success. */
848 
849 		/* Test ascend. */
850 		34148797436 << 12, /* Min */
851 		0x7fff587AF000,    /* Max */
852 		0x3000,         /* Size */
853 		34148798629 << 12,             /* Expected location */
854 		0,              /* Return value success. */
855 
856 		/* Test failing. */
857 		34148798623 << 12,  /* Min */
858 		34148798683 << 12,  /* Max */
859 		0x15000,            /* Size */
860 		0,             /* Expected location */
861 		-EBUSY,              /* Return value failed. */
862 
863 		/* Test filling entire gap. */
864 		34148798623 << 12,  /* Min */
865 		0x7fff587AF000,    /* Max */
866 		0x10000,           /* Size */
867 		34148798632 << 12,             /* Expected location */
868 		0,              /* Return value success. */
869 
870 		/* Test walking off the end of root. */
871 		0,                  /* Min */
872 		-1,                 /* Max */
873 		-1,                 /* Size */
874 		0,                  /* Expected location */
875 		-EBUSY,             /* Return value failure. */
876 
877 		/* Test looking for too large a hole across entire range. */
878 		0,                  /* Min */
879 		-1,                 /* Max */
880 		4503599618982063UL << 12,  /* Size */
881 		34359052178 << 12,  /* Expected location */
882 		-EBUSY,             /* Return failure. */
883 	};
884 	int i, range_count = ARRAY_SIZE(range);
885 	int req_range_count = ARRAY_SIZE(req_range);
886 	unsigned long min = 0x565234af2000;
887 	MA_STATE(mas, mt, 0, 0);
888 
889 	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
890 			  GFP_KERNEL);
891 	for (i = 0; i < range_count; i += 2) {
892 #define DEBUG_ALLOC_RANGE 0
893 #if DEBUG_ALLOC_RANGE
894 		pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
895 			 (range[i + 1] >> 12) - 1);
896 		mt_dump(mt);
897 #endif
898 		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
899 				xa_mk_value(range[i] >> 12), 0);
900 		mt_validate(mt);
901 	}
902 
903 
904 
905 	mas_lock(&mas);
906 	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
907 
908 #if DEBUG_ALLOC_RANGE
909 		pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
910 			holes[i+1] >> 12, holes[i+2] >> 12,
911 			min, holes[i+1]);
912 #endif
913 		MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
914 					holes[i+1] >> 12,
915 					holes[i+2] >> 12));
916 		MT_BUG_ON(mt, mas.index != holes[i] >> 12);
917 		min = holes[i+1];
918 		mas_reset(&mas);
919 	}
920 	mas_unlock(&mas);
921 	for (i = 0; i < req_range_count; i += 5) {
922 #if DEBUG_ALLOC_RANGE
923 		pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
924 			 i/5, req_range[i]   >> 12, req_range[i + 1]   >> 12,
925 			 req_range[i + 2]   >> 12, req_range[i + 3]   >> 12,
926 			 req_range[i], req_range[i+1]);
927 #endif
928 		check_mtree_alloc_range(mt,
929 				req_range[i]   >> 12, /* start */
930 				req_range[i+1] >> 12, /* end */
931 				req_range[i+2] >> 12, /* size */
932 				req_range[i+3] >> 12, /* expected address */
933 				req_range[i+4],       /* expected return */
934 				xa_mk_value(req_range[i] >> 12)); /* pointer */
935 		mt_validate(mt);
936 #if DEBUG_ALLOC_RANGE
937 		mt_dump(mt);
938 #endif
939 	}
940 
941 	mtree_destroy(mt);
942 }
943 #endif
944 
945 static noinline void check_ranges(struct maple_tree *mt)
946 {
947 	int i, val, val2;
948 	unsigned long r[] = {
949 		10, 15,
950 		20, 25,
951 		17, 22, /* Overlaps previous range. */
952 		9, 1000, /* Huge. */
953 		100, 200,
954 		45, 168,
955 		118, 128,
956 			};
957 
958 	MT_BUG_ON(mt, !mtree_empty(mt));
959 	check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
960 	check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
961 	check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
962 	MT_BUG_ON(mt, !mt_height(mt));
963 	/* Store */
964 	check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
965 	check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
966 	check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
967 	MT_BUG_ON(mt, !mt_height(mt));
968 	mtree_destroy(mt);
969 	MT_BUG_ON(mt, mt_height(mt));
970 
971 	check_seq(mt, 50, false);
972 	mt_set_non_kernel(4);
973 	check_store_range(mt, 5, 47,  xa_mk_value(47), 0);
974 	MT_BUG_ON(mt, !mt_height(mt));
975 	mtree_destroy(mt);
976 
977 	/* Create tree of 1-100 */
978 	check_seq(mt, 100, false);
979 	/* Store 45-168 */
980 	mt_set_non_kernel(10);
981 	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
982 	MT_BUG_ON(mt, !mt_height(mt));
983 	mtree_destroy(mt);
984 
985 	/* Create tree of 1-200 */
986 	check_seq(mt, 200, false);
987 	/* Store 45-168 */
988 	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
989 	MT_BUG_ON(mt, !mt_height(mt));
990 	mtree_destroy(mt);
991 
992 	check_seq(mt, 30, false);
993 	check_store_range(mt, 6, 18, xa_mk_value(6), 0);
994 	MT_BUG_ON(mt, !mt_height(mt));
995 	mtree_destroy(mt);
996 
997 	/* Overwrite across multiple levels. */
998 	/* Create tree of 1-400 */
999 	check_seq(mt, 400, false);
1000 	mt_set_non_kernel(50);
1001 	/* Store 118-128 */
1002 	check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1003 	mt_set_non_kernel(50);
1004 	mtree_test_erase(mt, 140);
1005 	mtree_test_erase(mt, 141);
1006 	mtree_test_erase(mt, 142);
1007 	mtree_test_erase(mt, 143);
1008 	mtree_test_erase(mt, 130);
1009 	mtree_test_erase(mt, 131);
1010 	mtree_test_erase(mt, 132);
1011 	mtree_test_erase(mt, 133);
1012 	mtree_test_erase(mt, 134);
1013 	mtree_test_erase(mt, 135);
1014 	check_load(mt, r[12], xa_mk_value(r[12]));
1015 	check_load(mt, r[13], xa_mk_value(r[12]));
1016 	check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1017 	check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1018 	check_load(mt, 135, NULL);
1019 	check_load(mt, 140, NULL);
1020 	mt_set_non_kernel(0);
1021 	MT_BUG_ON(mt, !mt_height(mt));
1022 	mtree_destroy(mt);
1023 
1024 
1025 
1026 	/* Overwrite multiple levels at the end of the tree (slot 7) */
1027 	mt_set_non_kernel(50);
1028 	check_seq(mt, 400, false);
1029 	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1030 	check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1031 
1032 	check_load(mt, 346, xa_mk_value(346));
1033 	for (i = 347; i <= 352; i++)
1034 		check_load(mt, i, xa_mk_value(347));
1035 	for (i = 353; i <= 361; i++)
1036 		check_load(mt, i, xa_mk_value(353));
1037 	check_load(mt, 362, xa_mk_value(362));
1038 	mt_set_non_kernel(0);
1039 	MT_BUG_ON(mt, !mt_height(mt));
1040 	mtree_destroy(mt);
1041 
1042 	mt_set_non_kernel(50);
1043 	check_seq(mt, 400, false);
1044 	check_store_range(mt, 352, 364, NULL, 0);
1045 	check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1046 	check_load(mt, 350, xa_mk_value(350));
1047 	check_load(mt, 351, xa_mk_value(352));
1048 	for (i = 352; i <= 363; i++)
1049 		check_load(mt, i, xa_mk_value(352));
1050 	check_load(mt, 364, NULL);
1051 	check_load(mt, 365, xa_mk_value(365));
1052 	mt_set_non_kernel(0);
1053 	MT_BUG_ON(mt, !mt_height(mt));
1054 	mtree_destroy(mt);
1055 
1056 	mt_set_non_kernel(5);
1057 	check_seq(mt, 400, false);
1058 	check_store_range(mt, 352, 364, NULL, 0);
1059 	check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1060 	check_load(mt, 350, xa_mk_value(350));
1061 	check_load(mt, 351, xa_mk_value(352));
1062 	for (i = 352; i <= 364; i++)
1063 		check_load(mt, i, xa_mk_value(352));
1064 	check_load(mt, 365, xa_mk_value(365));
1065 	mt_set_non_kernel(0);
1066 	MT_BUG_ON(mt, !mt_height(mt));
1067 	mtree_destroy(mt);
1068 
1069 
1070 	mt_set_non_kernel(50);
1071 	check_seq(mt, 400, false);
1072 	check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1073 	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1074 	mt_set_non_kernel(0);
1075 	mt_validate(mt);
1076 	MT_BUG_ON(mt, !mt_height(mt));
1077 	mtree_destroy(mt);
1078 	/*
1079 	 * Interesting cases:
1080 	 * 1. Overwrite the end of a node and end in the first entry of the next
1081 	 * node.
1082 	 * 2. Split a single range
1083 	 * 3. Overwrite the start of a range
1084 	 * 4. Overwrite the end of a range
1085 	 * 5. Overwrite the entire range
1086 	 * 6. Overwrite a range that causes multiple parent nodes to be
1087 	 * combined
1088 	 * 7. Overwrite a range that causes multiple parent nodes and part of
1089 	 * root to be combined
1090 	 * 8. Overwrite the whole tree
1091 	 * 9. Try to overwrite the zero entry of an alloc tree.
1092 	 * 10. Write a range larger than a nodes current pivot
1093 	 */
1094 
1095 	mt_set_non_kernel(50);
1096 	for (i = 0; i <= 500; i++) {
1097 		val = i*5;
1098 		val2 = (i+1)*5;
1099 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1100 	}
1101 	check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1102 	check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1103 	check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1104 	check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1105 	check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1106 	mtree_destroy(mt);
1107 	mt_set_non_kernel(0);
1108 
1109 	mt_set_non_kernel(50);
1110 	for (i = 0; i <= 500; i++) {
1111 		val = i*5;
1112 		val2 = (i+1)*5;
1113 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1114 	}
1115 	check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1116 	check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1117 	check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1118 	check_store_range(mt, 2460, 2470, NULL, 0);
1119 	check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1120 	check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1121 	mt_set_non_kernel(0);
1122 	MT_BUG_ON(mt, !mt_height(mt));
1123 	mtree_destroy(mt);
1124 
1125 	/* Test rebalance gaps */
1126 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1127 	mt_set_non_kernel(50);
1128 	for (i = 0; i <= 50; i++) {
1129 		val = i*10;
1130 		val2 = (i+1)*10;
1131 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1132 	}
1133 	check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1134 	check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1135 	check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1136 	check_store_range(mt, 240, 249, NULL, 0);
1137 	mtree_erase(mt, 200);
1138 	mtree_erase(mt, 210);
1139 	mtree_erase(mt, 220);
1140 	mtree_erase(mt, 230);
1141 	mt_set_non_kernel(0);
1142 	MT_BUG_ON(mt, !mt_height(mt));
1143 	mtree_destroy(mt);
1144 
1145 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1146 	for (i = 0; i <= 500; i++) {
1147 		val = i*10;
1148 		val2 = (i+1)*10;
1149 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1150 	}
1151 	check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1152 	mt_validate(mt);
1153 	MT_BUG_ON(mt, !mt_height(mt));
1154 	mtree_destroy(mt);
1155 
1156 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1157 	for (i = 0; i <= 500; i++) {
1158 		val = i*10;
1159 		val2 = (i+1)*10;
1160 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1161 	}
1162 	check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1163 	check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1164 	check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1165 	check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1166 	check_store_range(mt, 4842, 4849, NULL, 0);
1167 	mt_validate(mt);
1168 	MT_BUG_ON(mt, !mt_height(mt));
1169 	mtree_destroy(mt);
1170 
1171 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1172 	for (i = 0; i <= 1300; i++) {
1173 		val = i*10;
1174 		val2 = (i+1)*10;
1175 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1176 		MT_BUG_ON(mt, mt_height(mt) >= 4);
1177 	}
1178 	/*  Cause a 3 child split all the way up the tree. */
1179 	for (i = 5; i < 215; i += 10)
1180 		check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1181 	for (i = 5; i < 65; i += 10)
1182 		check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1183 
1184 	MT_BUG_ON(mt, mt_height(mt) >= 4);
1185 	for (i = 5; i < 45; i += 10)
1186 		check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1187 	if (!MAPLE_32BIT)
1188 		MT_BUG_ON(mt, mt_height(mt) < 4);
1189 	mtree_destroy(mt);
1190 
1191 
1192 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1193 	for (i = 0; i <= 1200; i++) {
1194 		val = i*10;
1195 		val2 = (i+1)*10;
1196 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1197 		MT_BUG_ON(mt, mt_height(mt) >= 4);
1198 	}
1199 	/* Fill parents and leaves before split. */
1200 	for (i = 5; i < 455; i += 10)
1201 		check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
1202 
1203 	for (i = 1; i < 16; i++)
1204 		check_store_range(mt, 8185 + i, 8185 + i + 1,
1205 				  xa_mk_value(8185+i), 0);
1206 	MT_BUG_ON(mt, mt_height(mt) >= 4);
1207 	/* triple split across multiple levels. */
1208 	check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1209 	if (!MAPLE_32BIT)
1210 		MT_BUG_ON(mt, mt_height(mt) != 4);
1211 }
1212 
1213 static noinline void check_next_entry(struct maple_tree *mt)
1214 {
1215 	void *entry = NULL;
1216 	unsigned long limit = 30, i = 0;
1217 	MA_STATE(mas, mt, i, i);
1218 
1219 	MT_BUG_ON(mt, !mtree_empty(mt));
1220 
1221 	check_seq(mt, limit, false);
1222 	rcu_read_lock();
1223 
1224 	/* Check the first one and get ma_state in the correct state. */
1225 	MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1226 	for ( ; i <= limit + 1; i++) {
1227 		entry = mas_next(&mas, limit);
1228 		if (i > limit)
1229 			MT_BUG_ON(mt, entry != NULL);
1230 		else
1231 			MT_BUG_ON(mt, xa_mk_value(i) != entry);
1232 	}
1233 	rcu_read_unlock();
1234 	mtree_destroy(mt);
1235 }
1236 
1237 static noinline void check_prev_entry(struct maple_tree *mt)
1238 {
1239 	unsigned long index = 16;
1240 	void *value;
1241 	int i;
1242 
1243 	MA_STATE(mas, mt, index, index);
1244 
1245 	MT_BUG_ON(mt, !mtree_empty(mt));
1246 	check_seq(mt, 30, false);
1247 
1248 	rcu_read_lock();
1249 	value = mas_find(&mas, ULONG_MAX);
1250 	MT_BUG_ON(mt, value != xa_mk_value(index));
1251 	value = mas_prev(&mas, 0);
1252 	MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1253 	rcu_read_unlock();
1254 	mtree_destroy(mt);
1255 
1256 	/* Check limits on prev */
1257 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1258 	mas_lock(&mas);
1259 	for (i = 0; i <= index; i++) {
1260 		mas_set_range(&mas, i*10, i*10+5);
1261 		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1262 	}
1263 
1264 	mas_set(&mas, 20);
1265 	value = mas_walk(&mas);
1266 	MT_BUG_ON(mt, value != xa_mk_value(2));
1267 
1268 	value = mas_prev(&mas, 19);
1269 	MT_BUG_ON(mt, value != NULL);
1270 
1271 	mas_set(&mas, 80);
1272 	value = mas_walk(&mas);
1273 	MT_BUG_ON(mt, value != xa_mk_value(8));
1274 
1275 	value = mas_prev(&mas, 76);
1276 	MT_BUG_ON(mt, value != NULL);
1277 
1278 	mas_unlock(&mas);
1279 }
1280 
1281 static noinline void check_root_expand(struct maple_tree *mt)
1282 {
1283 	MA_STATE(mas, mt, 0, 0);
1284 	void *ptr;
1285 
1286 
1287 	mas_lock(&mas);
1288 	mas_set(&mas, 3);
1289 	ptr = mas_walk(&mas);
1290 	MT_BUG_ON(mt, ptr != NULL);
1291 	MT_BUG_ON(mt, mas.index != 0);
1292 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1293 
1294 	ptr = &check_prev_entry;
1295 	mas_set(&mas, 1);
1296 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1297 
1298 	mas_set(&mas, 0);
1299 	ptr = mas_walk(&mas);
1300 	MT_BUG_ON(mt, ptr != NULL);
1301 
1302 	mas_set(&mas, 1);
1303 	ptr = mas_walk(&mas);
1304 	MT_BUG_ON(mt, ptr != &check_prev_entry);
1305 
1306 	mas_set(&mas, 2);
1307 	ptr = mas_walk(&mas);
1308 	MT_BUG_ON(mt, ptr != NULL);
1309 	mas_unlock(&mas);
1310 	mtree_destroy(mt);
1311 
1312 
1313 	mt_init_flags(mt, 0);
1314 	mas_lock(&mas);
1315 
1316 	mas_set(&mas, 0);
1317 	ptr = &check_prev_entry;
1318 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1319 
1320 	mas_set(&mas, 5);
1321 	ptr = mas_walk(&mas);
1322 	MT_BUG_ON(mt, ptr != NULL);
1323 	MT_BUG_ON(mt, mas.index != 1);
1324 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1325 
1326 	mas_set_range(&mas, 0, 100);
1327 	ptr = mas_walk(&mas);
1328 	MT_BUG_ON(mt, ptr != &check_prev_entry);
1329 	MT_BUG_ON(mt, mas.last != 0);
1330 	mas_unlock(&mas);
1331 	mtree_destroy(mt);
1332 
1333 	mt_init_flags(mt, 0);
1334 	mas_lock(&mas);
1335 
1336 	mas_set(&mas, 0);
1337 	ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1338 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1339 	ptr = mas_next(&mas, ULONG_MAX);
1340 	MT_BUG_ON(mt, ptr != NULL);
1341 	MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1342 
1343 	mas_set(&mas, 1);
1344 	ptr = mas_prev(&mas, 0);
1345 	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1346 	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1347 
1348 	mas_unlock(&mas);
1349 
1350 	mtree_destroy(mt);
1351 
1352 	mt_init_flags(mt, 0);
1353 	mas_lock(&mas);
1354 	mas_set(&mas, 0);
1355 	ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1356 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1357 	ptr = mas_next(&mas, ULONG_MAX);
1358 	MT_BUG_ON(mt, ptr != NULL);
1359 	MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1360 
1361 	mas_set(&mas, 1);
1362 	ptr = mas_prev(&mas, 0);
1363 	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1364 	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1365 
1366 
1367 	mas_unlock(&mas);
1368 }
1369 
1370 static noinline void check_gap_combining(struct maple_tree *mt)
1371 {
1372 	struct maple_enode *mn1, *mn2;
1373 	void *entry;
1374 	unsigned long singletons = 100;
1375 	unsigned long *seq100;
1376 	unsigned long seq100_64[] = {
1377 		/* 0-5 */
1378 		74, 75, 76,
1379 		50, 100, 2,
1380 
1381 		/* 6-12 */
1382 		44, 45, 46, 43,
1383 		20, 50, 3,
1384 
1385 		/* 13-20*/
1386 		80, 81, 82,
1387 		76, 2, 79, 85, 4,
1388 	};
1389 
1390 	unsigned long seq100_32[] = {
1391 		/* 0-5 */
1392 		61, 62, 63,
1393 		50, 100, 2,
1394 
1395 		/* 6-12 */
1396 		31, 32, 33, 30,
1397 		20, 50, 3,
1398 
1399 		/* 13-20*/
1400 		80, 81, 82,
1401 		76, 2, 79, 85, 4,
1402 	};
1403 
1404 	unsigned long seq2000[] = {
1405 		1152, 1151,
1406 		1100, 1200, 2,
1407 	};
1408 	unsigned long seq400[] = {
1409 		286, 318,
1410 		256, 260, 266, 270, 275, 280, 290, 398,
1411 		286, 310,
1412 	};
1413 
1414 	unsigned long index;
1415 
1416 	MA_STATE(mas, mt, 0, 0);
1417 
1418 	if (MAPLE_32BIT)
1419 		seq100 = seq100_32;
1420 	else
1421 		seq100 = seq100_64;
1422 
1423 	index = seq100[0];
1424 	mas_set(&mas, index);
1425 	MT_BUG_ON(mt, !mtree_empty(mt));
1426 	check_seq(mt, singletons, false); /* create 100 singletons. */
1427 
1428 	mt_set_non_kernel(1);
1429 	mtree_test_erase(mt, seq100[2]);
1430 	check_load(mt, seq100[2], NULL);
1431 	mtree_test_erase(mt, seq100[1]);
1432 	check_load(mt, seq100[1], NULL);
1433 
1434 	rcu_read_lock();
1435 	entry = mas_find(&mas, ULONG_MAX);
1436 	MT_BUG_ON(mt, entry != xa_mk_value(index));
1437 	mn1 = mas.node;
1438 	mas_next(&mas, ULONG_MAX);
1439 	entry = mas_next(&mas, ULONG_MAX);
1440 	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1441 	mn2 = mas.node;
1442 	MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1443 
1444 	/*
1445 	 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1446 	 * seq100[4]. Search for the gap.
1447 	 */
1448 	mt_set_non_kernel(1);
1449 	mas_reset(&mas);
1450 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1451 					     seq100[5]));
1452 	MT_BUG_ON(mt, mas.index != index + 1);
1453 	rcu_read_unlock();
1454 
1455 	mtree_test_erase(mt, seq100[6]);
1456 	check_load(mt, seq100[6], NULL);
1457 	mtree_test_erase(mt, seq100[7]);
1458 	check_load(mt, seq100[7], NULL);
1459 	mtree_test_erase(mt, seq100[8]);
1460 	index = seq100[9];
1461 
1462 	rcu_read_lock();
1463 	mas.index = index;
1464 	mas.last = index;
1465 	mas_reset(&mas);
1466 	entry = mas_find(&mas, ULONG_MAX);
1467 	MT_BUG_ON(mt, entry != xa_mk_value(index));
1468 	mn1 = mas.node;
1469 	entry = mas_next(&mas, ULONG_MAX);
1470 	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1471 	mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1472 	mn2 = mas.node;
1473 	MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1474 
1475 	/*
1476 	 * At this point, there is a gap of 3 at seq100[6].  Find it by
1477 	 * searching 20 - 50 for size 3.
1478 	 */
1479 	mas_reset(&mas);
1480 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1481 					     seq100[12]));
1482 	MT_BUG_ON(mt, mas.index != seq100[6]);
1483 	rcu_read_unlock();
1484 
1485 	mt_set_non_kernel(1);
1486 	mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1487 	check_load(mt, seq100[13], NULL);
1488 	check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1489 	mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1490 	check_load(mt, seq100[13], NULL);
1491 	check_load(mt, seq100[14], NULL);
1492 
1493 	mas_reset(&mas);
1494 	rcu_read_lock();
1495 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1496 					     seq100[17]));
1497 	MT_BUG_ON(mt, mas.index != seq100[13]);
1498 	mt_validate(mt);
1499 	rcu_read_unlock();
1500 
1501 	/*
1502 	 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1503 	 * gap.
1504 	 */
1505 	mt_set_non_kernel(2);
1506 	mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1507 	mtree_test_erase(mt, seq100[15]);
1508 	mas_reset(&mas);
1509 	rcu_read_lock();
1510 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1511 					     seq100[20]));
1512 	rcu_read_unlock();
1513 	MT_BUG_ON(mt, mas.index != seq100[18]);
1514 	mt_validate(mt);
1515 	mtree_destroy(mt);
1516 
1517 	/* seq 2000 tests are for multi-level tree gaps */
1518 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1519 	check_seq(mt, 2000, false);
1520 	mt_set_non_kernel(1);
1521 	mtree_test_erase(mt, seq2000[0]);
1522 	mtree_test_erase(mt, seq2000[1]);
1523 
1524 	mt_set_non_kernel(2);
1525 	mas_reset(&mas);
1526 	rcu_read_lock();
1527 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1528 					     seq2000[4]));
1529 	MT_BUG_ON(mt, mas.index != seq2000[1]);
1530 	rcu_read_unlock();
1531 	mt_validate(mt);
1532 	mtree_destroy(mt);
1533 
1534 	/* seq 400 tests rebalancing over two levels. */
1535 	mt_set_non_kernel(99);
1536 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1537 	check_seq(mt, 400, false);
1538 	mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1539 	mt_set_non_kernel(0);
1540 	mtree_destroy(mt);
1541 
1542 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1543 	check_seq(mt, 400, false);
1544 	mt_set_non_kernel(50);
1545 	mtree_test_store_range(mt, seq400[2], seq400[9],
1546 			       xa_mk_value(seq400[2]));
1547 	mtree_test_store_range(mt, seq400[3], seq400[9],
1548 			       xa_mk_value(seq400[3]));
1549 	mtree_test_store_range(mt, seq400[4], seq400[9],
1550 			       xa_mk_value(seq400[4]));
1551 	mtree_test_store_range(mt, seq400[5], seq400[9],
1552 			       xa_mk_value(seq400[5]));
1553 	mtree_test_store_range(mt, seq400[0], seq400[9],
1554 			       xa_mk_value(seq400[0]));
1555 	mtree_test_store_range(mt, seq400[6], seq400[9],
1556 			       xa_mk_value(seq400[6]));
1557 	mtree_test_store_range(mt, seq400[7], seq400[9],
1558 			       xa_mk_value(seq400[7]));
1559 	mtree_test_store_range(mt, seq400[8], seq400[9],
1560 			       xa_mk_value(seq400[8]));
1561 	mtree_test_store_range(mt, seq400[10], seq400[11],
1562 			       xa_mk_value(seq400[10]));
1563 	mt_validate(mt);
1564 	mt_set_non_kernel(0);
1565 	mtree_destroy(mt);
1566 }
1567 static noinline void check_node_overwrite(struct maple_tree *mt)
1568 {
1569 	int i, max = 4000;
1570 
1571 	for (i = 0; i < max; i++)
1572 		mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1573 
1574 	mtree_test_store_range(mt, 319951, 367950, NULL);
1575 	/*mt_dump(mt); */
1576 	mt_validate(mt);
1577 }
1578 
1579 #if defined(BENCH_SLOT_STORE)
1580 static noinline void bench_slot_store(struct maple_tree *mt)
1581 {
1582 	int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1583 
1584 	for (i = 0; i < max; i += 10)
1585 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1586 
1587 	for (i = 0; i < count; i++) {
1588 		mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1589 		mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1590 				  GFP_KERNEL);
1591 	}
1592 }
1593 #endif
1594 
1595 #if defined(BENCH_NODE_STORE)
1596 static noinline void bench_node_store(struct maple_tree *mt)
1597 {
1598 	int i, overwrite = 76, max = 240, count = 20000000;
1599 
1600 	for (i = 0; i < max; i += 10)
1601 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1602 
1603 	for (i = 0; i < count; i++) {
1604 		mtree_store_range(mt, overwrite,  overwrite + 15,
1605 				  xa_mk_value(overwrite), GFP_KERNEL);
1606 
1607 		overwrite += 5;
1608 		if (overwrite >= 135)
1609 			overwrite = 76;
1610 	}
1611 }
1612 #endif
1613 
1614 #if defined(BENCH_AWALK)
1615 static noinline void bench_awalk(struct maple_tree *mt)
1616 {
1617 	int i, max = 2500, count = 50000000;
1618 	MA_STATE(mas, mt, 1470, 1470);
1619 
1620 	for (i = 0; i < max; i += 10)
1621 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1622 
1623 	mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1624 
1625 	for (i = 0; i < count; i++) {
1626 		mas_empty_area_rev(&mas, 0, 2000, 10);
1627 		mas_reset(&mas);
1628 	}
1629 }
1630 #endif
1631 #if defined(BENCH_WALK)
1632 static noinline void bench_walk(struct maple_tree *mt)
1633 {
1634 	int i, max = 2500, count = 550000000;
1635 	MA_STATE(mas, mt, 1470, 1470);
1636 
1637 	for (i = 0; i < max; i += 10)
1638 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1639 
1640 	for (i = 0; i < count; i++) {
1641 		mas_walk(&mas);
1642 		mas_reset(&mas);
1643 	}
1644 
1645 }
1646 #endif
1647 
1648 #if defined(BENCH_MT_FOR_EACH)
1649 static noinline void bench_mt_for_each(struct maple_tree *mt)
1650 {
1651 	int i, count = 1000000;
1652 	unsigned long max = 2500, index = 0;
1653 	void *entry;
1654 
1655 	for (i = 0; i < max; i += 5)
1656 		mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1657 
1658 	for (i = 0; i < count; i++) {
1659 		unsigned long j = 0;
1660 
1661 		mt_for_each(mt, entry, index, max) {
1662 			MT_BUG_ON(mt, entry != xa_mk_value(j));
1663 			j += 5;
1664 		}
1665 
1666 		index = 0;
1667 	}
1668 
1669 }
1670 #endif
1671 
1672 /* check_forking - simulate the kernel forking sequence with the tree. */
1673 static noinline void check_forking(struct maple_tree *mt)
1674 {
1675 
1676 	struct maple_tree newmt;
1677 	int i, nr_entries = 134;
1678 	void *val;
1679 	MA_STATE(mas, mt, 0, 0);
1680 	MA_STATE(newmas, mt, 0, 0);
1681 
1682 	for (i = 0; i <= nr_entries; i++)
1683 		mtree_store_range(mt, i*10, i*10 + 5,
1684 				  xa_mk_value(i), GFP_KERNEL);
1685 
1686 	mt_set_non_kernel(99999);
1687 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1688 	newmas.tree = &newmt;
1689 	mas_reset(&newmas);
1690 	mas_reset(&mas);
1691 	mas_lock(&newmas);
1692 	mas.index = 0;
1693 	mas.last = 0;
1694 	if (mas_expected_entries(&newmas, nr_entries)) {
1695 		pr_err("OOM!");
1696 		BUG_ON(1);
1697 	}
1698 	rcu_read_lock();
1699 	mas_for_each(&mas, val, ULONG_MAX) {
1700 		newmas.index = mas.index;
1701 		newmas.last = mas.last;
1702 		mas_store(&newmas, val);
1703 	}
1704 	rcu_read_unlock();
1705 	mas_destroy(&newmas);
1706 	mas_unlock(&newmas);
1707 	mt_validate(&newmt);
1708 	mt_set_non_kernel(0);
1709 	mtree_destroy(&newmt);
1710 }
1711 
1712 static noinline void check_iteration(struct maple_tree *mt)
1713 {
1714 	int i, nr_entries = 125;
1715 	void *val;
1716 	MA_STATE(mas, mt, 0, 0);
1717 
1718 	for (i = 0; i <= nr_entries; i++)
1719 		mtree_store_range(mt, i * 10, i * 10 + 9,
1720 				  xa_mk_value(i), GFP_KERNEL);
1721 
1722 	mt_set_non_kernel(99999);
1723 
1724 	i = 0;
1725 	mas_lock(&mas);
1726 	mas_for_each(&mas, val, 925) {
1727 		MT_BUG_ON(mt, mas.index != i * 10);
1728 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1729 		/* Overwrite end of entry 92 */
1730 		if (i == 92) {
1731 			mas.index = 925;
1732 			mas.last = 929;
1733 			mas_store(&mas, val);
1734 		}
1735 		i++;
1736 	}
1737 	/* Ensure mas_find() gets the next value */
1738 	val = mas_find(&mas, ULONG_MAX);
1739 	MT_BUG_ON(mt, val != xa_mk_value(i));
1740 
1741 	mas_set(&mas, 0);
1742 	i = 0;
1743 	mas_for_each(&mas, val, 785) {
1744 		MT_BUG_ON(mt, mas.index != i * 10);
1745 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1746 		/* Overwrite start of entry 78 */
1747 		if (i == 78) {
1748 			mas.index = 780;
1749 			mas.last = 785;
1750 			mas_store(&mas, val);
1751 		} else {
1752 			i++;
1753 		}
1754 	}
1755 	val = mas_find(&mas, ULONG_MAX);
1756 	MT_BUG_ON(mt, val != xa_mk_value(i));
1757 
1758 	mas_set(&mas, 0);
1759 	i = 0;
1760 	mas_for_each(&mas, val, 765) {
1761 		MT_BUG_ON(mt, mas.index != i * 10);
1762 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1763 		/* Overwrite end of entry 76 and advance to the end */
1764 		if (i == 76) {
1765 			mas.index = 760;
1766 			mas.last = 765;
1767 			mas_store(&mas, val);
1768 			mas_next(&mas, ULONG_MAX);
1769 		}
1770 		i++;
1771 	}
1772 	/* Make sure the next find returns the one after 765, 766-769 */
1773 	val = mas_find(&mas, ULONG_MAX);
1774 	MT_BUG_ON(mt, val != xa_mk_value(76));
1775 	mas_unlock(&mas);
1776 	mas_destroy(&mas);
1777 	mt_set_non_kernel(0);
1778 }
1779 
1780 static noinline void check_mas_store_gfp(struct maple_tree *mt)
1781 {
1782 
1783 	struct maple_tree newmt;
1784 	int i, nr_entries = 135;
1785 	void *val;
1786 	MA_STATE(mas, mt, 0, 0);
1787 	MA_STATE(newmas, mt, 0, 0);
1788 
1789 	for (i = 0; i <= nr_entries; i++)
1790 		mtree_store_range(mt, i*10, i*10 + 5,
1791 				  xa_mk_value(i), GFP_KERNEL);
1792 
1793 	mt_set_non_kernel(99999);
1794 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1795 	newmas.tree = &newmt;
1796 	rcu_read_lock();
1797 	mas_lock(&newmas);
1798 	mas_reset(&newmas);
1799 	mas_set(&mas, 0);
1800 	mas_for_each(&mas, val, ULONG_MAX) {
1801 		newmas.index = mas.index;
1802 		newmas.last = mas.last;
1803 		mas_store_gfp(&newmas, val, GFP_KERNEL);
1804 	}
1805 	mas_unlock(&newmas);
1806 	rcu_read_unlock();
1807 	mt_validate(&newmt);
1808 	mt_set_non_kernel(0);
1809 	mtree_destroy(&newmt);
1810 }
1811 
1812 #if defined(BENCH_FORK)
1813 static noinline void bench_forking(struct maple_tree *mt)
1814 {
1815 
1816 	struct maple_tree newmt;
1817 	int i, nr_entries = 134, nr_fork = 80000;
1818 	void *val;
1819 	MA_STATE(mas, mt, 0, 0);
1820 	MA_STATE(newmas, mt, 0, 0);
1821 
1822 	for (i = 0; i <= nr_entries; i++)
1823 		mtree_store_range(mt, i*10, i*10 + 5,
1824 				  xa_mk_value(i), GFP_KERNEL);
1825 
1826 	for (i = 0; i < nr_fork; i++) {
1827 		mt_set_non_kernel(99999);
1828 		mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1829 		newmas.tree = &newmt;
1830 		mas_reset(&newmas);
1831 		mas_reset(&mas);
1832 		mas.index = 0;
1833 		mas.last = 0;
1834 		rcu_read_lock();
1835 		mas_lock(&newmas);
1836 		if (mas_expected_entries(&newmas, nr_entries)) {
1837 			printk("OOM!");
1838 			BUG_ON(1);
1839 		}
1840 		mas_for_each(&mas, val, ULONG_MAX) {
1841 			newmas.index = mas.index;
1842 			newmas.last = mas.last;
1843 			mas_store(&newmas, val);
1844 		}
1845 		mas_destroy(&newmas);
1846 		mas_unlock(&newmas);
1847 		rcu_read_unlock();
1848 		mt_validate(&newmt);
1849 		mt_set_non_kernel(0);
1850 		mtree_destroy(&newmt);
1851 	}
1852 }
1853 #endif
1854 
1855 static noinline void next_prev_test(struct maple_tree *mt)
1856 {
1857 	int i, nr_entries;
1858 	void *val;
1859 	MA_STATE(mas, mt, 0, 0);
1860 	struct maple_enode *mn;
1861 	unsigned long *level2;
1862 	unsigned long level2_64[] = {707, 1000, 710, 715, 720, 725};
1863 	unsigned long level2_32[] = {1747, 2000, 1750, 1755, 1760, 1765};
1864 
1865 	if (MAPLE_32BIT) {
1866 		nr_entries = 500;
1867 		level2 = level2_32;
1868 	} else {
1869 		nr_entries = 200;
1870 		level2 = level2_64;
1871 	}
1872 
1873 	for (i = 0; i <= nr_entries; i++)
1874 		mtree_store_range(mt, i*10, i*10 + 5,
1875 				  xa_mk_value(i), GFP_KERNEL);
1876 
1877 	mas_lock(&mas);
1878 	for (i = 0; i <= nr_entries / 2; i++) {
1879 		mas_next(&mas, 1000);
1880 		if (mas_is_none(&mas))
1881 			break;
1882 
1883 	}
1884 	mas_reset(&mas);
1885 	mas_set(&mas, 0);
1886 	i = 0;
1887 	mas_for_each(&mas, val, 1000) {
1888 		i++;
1889 	}
1890 
1891 	mas_reset(&mas);
1892 	mas_set(&mas, 0);
1893 	i = 0;
1894 	mas_for_each(&mas, val, 1000) {
1895 		mas_pause(&mas);
1896 		i++;
1897 	}
1898 
1899 	/*
1900 	 * 680 - 685 = 0x61a00001930c
1901 	 * 686 - 689 = NULL;
1902 	 * 690 - 695 = 0x61a00001930c
1903 	 * Check simple next/prev
1904 	 */
1905 	mas_set(&mas, 686);
1906 	val = mas_walk(&mas);
1907 	MT_BUG_ON(mt, val != NULL);
1908 
1909 	val = mas_next(&mas, 1000);
1910 	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
1911 	MT_BUG_ON(mt, mas.index != 690);
1912 	MT_BUG_ON(mt, mas.last != 695);
1913 
1914 	val = mas_prev(&mas, 0);
1915 	MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
1916 	MT_BUG_ON(mt, mas.index != 680);
1917 	MT_BUG_ON(mt, mas.last != 685);
1918 
1919 	val = mas_next(&mas, 1000);
1920 	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
1921 	MT_BUG_ON(mt, mas.index != 690);
1922 	MT_BUG_ON(mt, mas.last != 695);
1923 
1924 	val = mas_next(&mas, 1000);
1925 	MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
1926 	MT_BUG_ON(mt, mas.index != 700);
1927 	MT_BUG_ON(mt, mas.last != 705);
1928 
1929 	/* Check across node boundaries of the tree */
1930 	mas_set(&mas, 70);
1931 	val = mas_walk(&mas);
1932 	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
1933 	MT_BUG_ON(mt, mas.index != 70);
1934 	MT_BUG_ON(mt, mas.last != 75);
1935 
1936 	val = mas_next(&mas, 1000);
1937 	MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
1938 	MT_BUG_ON(mt, mas.index != 80);
1939 	MT_BUG_ON(mt, mas.last != 85);
1940 
1941 	val = mas_prev(&mas, 70);
1942 	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
1943 	MT_BUG_ON(mt, mas.index != 70);
1944 	MT_BUG_ON(mt, mas.last != 75);
1945 
1946 	/* Check across two levels of the tree */
1947 	mas_reset(&mas);
1948 	mas_set(&mas, level2[0]);
1949 	val = mas_walk(&mas);
1950 	MT_BUG_ON(mt, val != NULL);
1951 	val = mas_next(&mas, level2[1]);
1952 	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
1953 	MT_BUG_ON(mt, mas.index != level2[2]);
1954 	MT_BUG_ON(mt, mas.last != level2[3]);
1955 	mn = mas.node;
1956 
1957 	val = mas_next(&mas, level2[1]);
1958 	MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
1959 	MT_BUG_ON(mt, mas.index != level2[4]);
1960 	MT_BUG_ON(mt, mas.last != level2[5]);
1961 	MT_BUG_ON(mt, mn == mas.node);
1962 
1963 	val = mas_prev(&mas, 0);
1964 	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
1965 	MT_BUG_ON(mt, mas.index != level2[2]);
1966 	MT_BUG_ON(mt, mas.last != level2[3]);
1967 
1968 	/* Check running off the end and back on */
1969 	mas_set(&mas, nr_entries * 10);
1970 	val = mas_walk(&mas);
1971 	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
1972 	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
1973 	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
1974 
1975 	val = mas_next(&mas, ULONG_MAX);
1976 	MT_BUG_ON(mt, val != NULL);
1977 	MT_BUG_ON(mt, mas.index != ULONG_MAX);
1978 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1979 
1980 	val = mas_prev(&mas, 0);
1981 	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
1982 	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
1983 	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
1984 
1985 	/* Check running off the start and back on */
1986 	mas_reset(&mas);
1987 	mas_set(&mas, 10);
1988 	val = mas_walk(&mas);
1989 	MT_BUG_ON(mt, val != xa_mk_value(1));
1990 	MT_BUG_ON(mt, mas.index != 10);
1991 	MT_BUG_ON(mt, mas.last != 15);
1992 
1993 	val = mas_prev(&mas, 0);
1994 	MT_BUG_ON(mt, val != xa_mk_value(0));
1995 	MT_BUG_ON(mt, mas.index != 0);
1996 	MT_BUG_ON(mt, mas.last != 5);
1997 
1998 	val = mas_prev(&mas, 0);
1999 	MT_BUG_ON(mt, val != NULL);
2000 	MT_BUG_ON(mt, mas.index != 0);
2001 	MT_BUG_ON(mt, mas.last != 0);
2002 
2003 	mas.index = 0;
2004 	mas.last = 5;
2005 	mas_store(&mas, NULL);
2006 	mas_reset(&mas);
2007 	mas_set(&mas, 10);
2008 	mas_walk(&mas);
2009 
2010 	val = mas_prev(&mas, 0);
2011 	MT_BUG_ON(mt, val != NULL);
2012 	MT_BUG_ON(mt, mas.index != 0);
2013 	MT_BUG_ON(mt, mas.last != 0);
2014 	mas_unlock(&mas);
2015 
2016 	mtree_destroy(mt);
2017 
2018 	mt_init(mt);
2019 	mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2020 	mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
2021 	rcu_read_lock();
2022 	mas_set(&mas, 5);
2023 	val = mas_prev(&mas, 4);
2024 	MT_BUG_ON(mt, val != NULL);
2025 	rcu_read_unlock();
2026 }
2027 
2028 
2029 
2030 /* Test spanning writes that require balancing right sibling or right cousin */
2031 static noinline void check_spanning_relatives(struct maple_tree *mt)
2032 {
2033 
2034 	unsigned long i, nr_entries = 1000;
2035 
2036 	for (i = 0; i <= nr_entries; i++)
2037 		mtree_store_range(mt, i*10, i*10 + 5,
2038 				  xa_mk_value(i), GFP_KERNEL);
2039 
2040 
2041 	mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2042 }
2043 
2044 static noinline void check_fuzzer(struct maple_tree *mt)
2045 {
2046 	/*
2047 	 * 1. Causes a spanning rebalance of a single root node.
2048 	 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2049 	 * entire right side is consumed.
2050 	 */
2051 	mtree_test_insert(mt, 88, (void *)0xb1);
2052 	mtree_test_insert(mt, 84, (void *)0xa9);
2053 	mtree_test_insert(mt, 2,  (void *)0x5);
2054 	mtree_test_insert(mt, 4,  (void *)0x9);
2055 	mtree_test_insert(mt, 14, (void *)0x1d);
2056 	mtree_test_insert(mt, 7,  (void *)0xf);
2057 	mtree_test_insert(mt, 12, (void *)0x19);
2058 	mtree_test_insert(mt, 18, (void *)0x25);
2059 	mtree_test_store_range(mt, 8, 18, (void *)0x11);
2060 	mtree_destroy(mt);
2061 
2062 
2063 	/*
2064 	 * 2. Cause a spanning rebalance of two nodes in root.
2065 	 * Fixed by setting mast->r->max correctly.
2066 	 */
2067 	mt_init_flags(mt, 0);
2068 	mtree_test_store(mt, 87, (void *)0xaf);
2069 	mtree_test_store(mt, 0, (void *)0x1);
2070 	mtree_test_load(mt, 4);
2071 	mtree_test_insert(mt, 4, (void *)0x9);
2072 	mtree_test_store(mt, 8, (void *)0x11);
2073 	mtree_test_store(mt, 44, (void *)0x59);
2074 	mtree_test_store(mt, 68, (void *)0x89);
2075 	mtree_test_store(mt, 2, (void *)0x5);
2076 	mtree_test_insert(mt, 43, (void *)0x57);
2077 	mtree_test_insert(mt, 24, (void *)0x31);
2078 	mtree_test_insert(mt, 844, (void *)0x699);
2079 	mtree_test_store(mt, 84, (void *)0xa9);
2080 	mtree_test_store(mt, 4, (void *)0x9);
2081 	mtree_test_erase(mt, 4);
2082 	mtree_test_load(mt, 5);
2083 	mtree_test_erase(mt, 0);
2084 	mtree_destroy(mt);
2085 
2086 	/*
2087 	 * 3. Cause a node overflow on copy
2088 	 * Fixed by using the correct check for node size in mas_wr_modify()
2089 	 * Also discovered issue with metadata setting.
2090 	 */
2091 	mt_init_flags(mt, 0);
2092 	mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2093 	mtree_test_store(mt, 4, (void *)0x9);
2094 	mtree_test_erase(mt, 5);
2095 	mtree_test_erase(mt, 0);
2096 	mtree_test_erase(mt, 4);
2097 	mtree_test_store(mt, 5, (void *)0xb);
2098 	mtree_test_erase(mt, 5);
2099 	mtree_test_store(mt, 5, (void *)0xb);
2100 	mtree_test_erase(mt, 5);
2101 	mtree_test_erase(mt, 4);
2102 	mtree_test_store(mt, 4, (void *)0x9);
2103 	mtree_test_store(mt, 444, (void *)0x379);
2104 	mtree_test_store(mt, 0, (void *)0x1);
2105 	mtree_test_load(mt, 0);
2106 	mtree_test_store(mt, 5, (void *)0xb);
2107 	mtree_test_erase(mt, 0);
2108 	mtree_destroy(mt);
2109 
2110 	/*
2111 	 * 4. spanning store failure due to writing incorrect pivot value at
2112 	 * last slot.
2113 	 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2114 	 *
2115 	 */
2116 	mt_init_flags(mt, 0);
2117 	mtree_test_insert(mt, 261, (void *)0x20b);
2118 	mtree_test_store(mt, 516, (void *)0x409);
2119 	mtree_test_store(mt, 6, (void *)0xd);
2120 	mtree_test_insert(mt, 5, (void *)0xb);
2121 	mtree_test_insert(mt, 1256, (void *)0x9d1);
2122 	mtree_test_store(mt, 4, (void *)0x9);
2123 	mtree_test_erase(mt, 1);
2124 	mtree_test_store(mt, 56, (void *)0x71);
2125 	mtree_test_insert(mt, 1, (void *)0x3);
2126 	mtree_test_store(mt, 24, (void *)0x31);
2127 	mtree_test_erase(mt, 1);
2128 	mtree_test_insert(mt, 2263, (void *)0x11af);
2129 	mtree_test_insert(mt, 446, (void *)0x37d);
2130 	mtree_test_store_range(mt, 6, 45, (void *)0xd);
2131 	mtree_test_store_range(mt, 3, 446, (void *)0x7);
2132 	mtree_destroy(mt);
2133 
2134 	/*
2135 	 * 5. mas_wr_extend_null() may overflow slots.
2136 	 * Fix by checking against wr_mas->node_end.
2137 	 */
2138 	mt_init_flags(mt, 0);
2139 	mtree_test_store(mt, 48, (void *)0x61);
2140 	mtree_test_store(mt, 3, (void *)0x7);
2141 	mtree_test_load(mt, 0);
2142 	mtree_test_store(mt, 88, (void *)0xb1);
2143 	mtree_test_store(mt, 81, (void *)0xa3);
2144 	mtree_test_insert(mt, 0, (void *)0x1);
2145 	mtree_test_insert(mt, 8, (void *)0x11);
2146 	mtree_test_insert(mt, 4, (void *)0x9);
2147 	mtree_test_insert(mt, 2480, (void *)0x1361);
2148 	mtree_test_insert(mt, ULONG_MAX,
2149 			  (void *)0xffffffffffffffff);
2150 	mtree_test_erase(mt, ULONG_MAX);
2151 	mtree_destroy(mt);
2152 
2153 	/*
2154 	 * 6.  When reusing a node with an implied pivot and the node is
2155 	 * shrinking, old data would be left in the implied slot
2156 	 * Fixed by checking the last pivot for the mas->max and clear
2157 	 * accordingly.  This only affected the left-most node as that node is
2158 	 * the only one allowed to end in NULL.
2159 	 */
2160 	mt_init_flags(mt, 0);
2161 	mtree_test_erase(mt, 3);
2162 	mtree_test_insert(mt, 22, (void *)0x2d);
2163 	mtree_test_insert(mt, 15, (void *)0x1f);
2164 	mtree_test_load(mt, 2);
2165 	mtree_test_insert(mt, 1, (void *)0x3);
2166 	mtree_test_insert(mt, 1, (void *)0x3);
2167 	mtree_test_insert(mt, 5, (void *)0xb);
2168 	mtree_test_erase(mt, 1);
2169 	mtree_test_insert(mt, 1, (void *)0x3);
2170 	mtree_test_insert(mt, 4, (void *)0x9);
2171 	mtree_test_insert(mt, 1, (void *)0x3);
2172 	mtree_test_erase(mt, 1);
2173 	mtree_test_insert(mt, 2, (void *)0x5);
2174 	mtree_test_insert(mt, 1, (void *)0x3);
2175 	mtree_test_erase(mt, 3);
2176 	mtree_test_insert(mt, 22, (void *)0x2d);
2177 	mtree_test_insert(mt, 15, (void *)0x1f);
2178 	mtree_test_insert(mt, 2, (void *)0x5);
2179 	mtree_test_insert(mt, 1, (void *)0x3);
2180 	mtree_test_insert(mt, 8, (void *)0x11);
2181 	mtree_test_load(mt, 2);
2182 	mtree_test_insert(mt, 1, (void *)0x3);
2183 	mtree_test_insert(mt, 1, (void *)0x3);
2184 	mtree_test_store(mt, 1, (void *)0x3);
2185 	mtree_test_insert(mt, 5, (void *)0xb);
2186 	mtree_test_erase(mt, 1);
2187 	mtree_test_insert(mt, 1, (void *)0x3);
2188 	mtree_test_insert(mt, 4, (void *)0x9);
2189 	mtree_test_insert(mt, 1, (void *)0x3);
2190 	mtree_test_erase(mt, 1);
2191 	mtree_test_insert(mt, 2, (void *)0x5);
2192 	mtree_test_insert(mt, 1, (void *)0x3);
2193 	mtree_test_erase(mt, 3);
2194 	mtree_test_insert(mt, 22, (void *)0x2d);
2195 	mtree_test_insert(mt, 15, (void *)0x1f);
2196 	mtree_test_insert(mt, 2, (void *)0x5);
2197 	mtree_test_insert(mt, 1, (void *)0x3);
2198 	mtree_test_insert(mt, 8, (void *)0x11);
2199 	mtree_test_insert(mt, 12, (void *)0x19);
2200 	mtree_test_erase(mt, 1);
2201 	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2202 	mtree_test_erase(mt, 62);
2203 	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2204 	mtree_test_insert(mt, 11, (void *)0x17);
2205 	mtree_test_insert(mt, 3, (void *)0x7);
2206 	mtree_test_insert(mt, 3, (void *)0x7);
2207 	mtree_test_store(mt, 62, (void *)0x7d);
2208 	mtree_test_erase(mt, 62);
2209 	mtree_test_store_range(mt, 1, 15, (void *)0x3);
2210 	mtree_test_erase(mt, 1);
2211 	mtree_test_insert(mt, 22, (void *)0x2d);
2212 	mtree_test_insert(mt, 12, (void *)0x19);
2213 	mtree_test_erase(mt, 1);
2214 	mtree_test_insert(mt, 3, (void *)0x7);
2215 	mtree_test_store(mt, 62, (void *)0x7d);
2216 	mtree_test_erase(mt, 62);
2217 	mtree_test_insert(mt, 122, (void *)0xf5);
2218 	mtree_test_store(mt, 3, (void *)0x7);
2219 	mtree_test_insert(mt, 0, (void *)0x1);
2220 	mtree_test_store_range(mt, 0, 1, (void *)0x1);
2221 	mtree_test_insert(mt, 85, (void *)0xab);
2222 	mtree_test_insert(mt, 72, (void *)0x91);
2223 	mtree_test_insert(mt, 81, (void *)0xa3);
2224 	mtree_test_insert(mt, 726, (void *)0x5ad);
2225 	mtree_test_insert(mt, 0, (void *)0x1);
2226 	mtree_test_insert(mt, 1, (void *)0x3);
2227 	mtree_test_store(mt, 51, (void *)0x67);
2228 	mtree_test_insert(mt, 611, (void *)0x4c7);
2229 	mtree_test_insert(mt, 485, (void *)0x3cb);
2230 	mtree_test_insert(mt, 1, (void *)0x3);
2231 	mtree_test_erase(mt, 1);
2232 	mtree_test_insert(mt, 0, (void *)0x1);
2233 	mtree_test_insert(mt, 1, (void *)0x3);
2234 	mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2235 	mtree_test_load(mt, 1);
2236 	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2237 	mtree_test_insert(mt, 1, (void *)0x3);
2238 	mtree_test_erase(mt, 1);
2239 	mtree_test_load(mt, 53);
2240 	mtree_test_load(mt, 1);
2241 	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2242 	mtree_test_insert(mt, 222, (void *)0x1bd);
2243 	mtree_test_insert(mt, 485, (void *)0x3cb);
2244 	mtree_test_insert(mt, 1, (void *)0x3);
2245 	mtree_test_erase(mt, 1);
2246 	mtree_test_load(mt, 0);
2247 	mtree_test_insert(mt, 21, (void *)0x2b);
2248 	mtree_test_insert(mt, 3, (void *)0x7);
2249 	mtree_test_store(mt, 621, (void *)0x4db);
2250 	mtree_test_insert(mt, 0, (void *)0x1);
2251 	mtree_test_erase(mt, 5);
2252 	mtree_test_insert(mt, 1, (void *)0x3);
2253 	mtree_test_store(mt, 62, (void *)0x7d);
2254 	mtree_test_erase(mt, 62);
2255 	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2256 	mtree_test_insert(mt, 22, (void *)0x2d);
2257 	mtree_test_insert(mt, 12, (void *)0x19);
2258 	mtree_test_erase(mt, 1);
2259 	mtree_test_insert(mt, 1, (void *)0x3);
2260 	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2261 	mtree_test_erase(mt, 62);
2262 	mtree_test_erase(mt, 1);
2263 	mtree_test_load(mt, 1);
2264 	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2265 	mtree_test_insert(mt, 1, (void *)0x3);
2266 	mtree_test_erase(mt, 1);
2267 	mtree_test_load(mt, 53);
2268 	mtree_test_load(mt, 1);
2269 	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2270 	mtree_test_insert(mt, 222, (void *)0x1bd);
2271 	mtree_test_insert(mt, 485, (void *)0x3cb);
2272 	mtree_test_insert(mt, 1, (void *)0x3);
2273 	mtree_test_erase(mt, 1);
2274 	mtree_test_insert(mt, 1, (void *)0x3);
2275 	mtree_test_load(mt, 0);
2276 	mtree_test_load(mt, 0);
2277 	mtree_destroy(mt);
2278 
2279 	/*
2280 	 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2281 	 * data by overwriting it first - that way metadata is of no concern.
2282 	 */
2283 	mt_init_flags(mt, 0);
2284 	mtree_test_load(mt, 1);
2285 	mtree_test_insert(mt, 102, (void *)0xcd);
2286 	mtree_test_erase(mt, 2);
2287 	mtree_test_erase(mt, 0);
2288 	mtree_test_load(mt, 0);
2289 	mtree_test_insert(mt, 4, (void *)0x9);
2290 	mtree_test_insert(mt, 2, (void *)0x5);
2291 	mtree_test_insert(mt, 110, (void *)0xdd);
2292 	mtree_test_insert(mt, 1, (void *)0x3);
2293 	mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2294 	mtree_test_erase(mt, 2);
2295 	mtree_test_store(mt, 0, (void *)0x1);
2296 	mtree_test_store(mt, 112, (void *)0xe1);
2297 	mtree_test_insert(mt, 21, (void *)0x2b);
2298 	mtree_test_store(mt, 1, (void *)0x3);
2299 	mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2300 	mtree_test_store(mt, 2, (void *)0x5);
2301 	mtree_test_load(mt, 22);
2302 	mtree_test_erase(mt, 2);
2303 	mtree_test_store(mt, 210, (void *)0x1a5);
2304 	mtree_test_store_range(mt, 0, 2, (void *)0x1);
2305 	mtree_test_store(mt, 2, (void *)0x5);
2306 	mtree_test_erase(mt, 2);
2307 	mtree_test_erase(mt, 22);
2308 	mtree_test_erase(mt, 1);
2309 	mtree_test_erase(mt, 2);
2310 	mtree_test_store(mt, 0, (void *)0x1);
2311 	mtree_test_load(mt, 112);
2312 	mtree_test_insert(mt, 2, (void *)0x5);
2313 	mtree_test_erase(mt, 2);
2314 	mtree_test_store(mt, 1, (void *)0x3);
2315 	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2316 	mtree_test_erase(mt, 0);
2317 	mtree_test_erase(mt, 2);
2318 	mtree_test_store(mt, 2, (void *)0x5);
2319 	mtree_test_erase(mt, 0);
2320 	mtree_test_erase(mt, 2);
2321 	mtree_test_store(mt, 0, (void *)0x1);
2322 	mtree_test_store(mt, 0, (void *)0x1);
2323 	mtree_test_erase(mt, 2);
2324 	mtree_test_store(mt, 2, (void *)0x5);
2325 	mtree_test_erase(mt, 2);
2326 	mtree_test_insert(mt, 2, (void *)0x5);
2327 	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2328 	mtree_test_erase(mt, 0);
2329 	mtree_test_erase(mt, 2);
2330 	mtree_test_store(mt, 0, (void *)0x1);
2331 	mtree_test_load(mt, 112);
2332 	mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2333 	mtree_test_store(mt, 2, (void *)0x5);
2334 	mtree_test_load(mt, 110);
2335 	mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2336 	mtree_test_load(mt, 2);
2337 	mtree_test_store(mt, 2, (void *)0x5);
2338 	mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2339 	mtree_test_erase(mt, 12);
2340 	mtree_test_store(mt, 2, (void *)0x5);
2341 	mtree_test_load(mt, 22);
2342 	mtree_destroy(mt);
2343 
2344 
2345 	/*
2346 	 * 8.  When rebalancing or spanning_rebalance(), the max of the new node
2347 	 * may be set incorrectly to the final pivot and not the right max.
2348 	 * Fix by setting the left max to orig right max if the entire node is
2349 	 * consumed.
2350 	 */
2351 	mt_init_flags(mt, 0);
2352 	mtree_test_store(mt, 6, (void *)0xd);
2353 	mtree_test_store(mt, 67, (void *)0x87);
2354 	mtree_test_insert(mt, 15, (void *)0x1f);
2355 	mtree_test_insert(mt, 6716, (void *)0x3479);
2356 	mtree_test_store(mt, 61, (void *)0x7b);
2357 	mtree_test_insert(mt, 13, (void *)0x1b);
2358 	mtree_test_store(mt, 8, (void *)0x11);
2359 	mtree_test_insert(mt, 1, (void *)0x3);
2360 	mtree_test_load(mt, 0);
2361 	mtree_test_erase(mt, 67167);
2362 	mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2363 	mtree_test_insert(mt, 6, (void *)0xd);
2364 	mtree_test_erase(mt, 67);
2365 	mtree_test_insert(mt, 1, (void *)0x3);
2366 	mtree_test_erase(mt, 667167);
2367 	mtree_test_insert(mt, 6, (void *)0xd);
2368 	mtree_test_store(mt, 67, (void *)0x87);
2369 	mtree_test_insert(mt, 5, (void *)0xb);
2370 	mtree_test_erase(mt, 1);
2371 	mtree_test_insert(mt, 6, (void *)0xd);
2372 	mtree_test_erase(mt, 67);
2373 	mtree_test_insert(mt, 15, (void *)0x1f);
2374 	mtree_test_insert(mt, 67167, (void *)0x20cbf);
2375 	mtree_test_insert(mt, 1, (void *)0x3);
2376 	mtree_test_load(mt, 7);
2377 	mtree_test_insert(mt, 16, (void *)0x21);
2378 	mtree_test_insert(mt, 36, (void *)0x49);
2379 	mtree_test_store(mt, 67, (void *)0x87);
2380 	mtree_test_store(mt, 6, (void *)0xd);
2381 	mtree_test_insert(mt, 367, (void *)0x2df);
2382 	mtree_test_insert(mt, 115, (void *)0xe7);
2383 	mtree_test_store(mt, 0, (void *)0x1);
2384 	mtree_test_store_range(mt, 1, 3, (void *)0x3);
2385 	mtree_test_store(mt, 1, (void *)0x3);
2386 	mtree_test_erase(mt, 67167);
2387 	mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2388 	mtree_test_store(mt, 1, (void *)0x3);
2389 	mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2390 	mtree_test_load(mt, 67);
2391 	mtree_test_insert(mt, 1, (void *)0x3);
2392 	mtree_test_erase(mt, 67167);
2393 	mtree_destroy(mt);
2394 
2395 	/*
2396 	 * 9. spanning store to the end of data caused an invalid metadata
2397 	 * length which resulted in a crash eventually.
2398 	 * Fix by checking if there is a value in pivot before incrementing the
2399 	 * metadata end in mab_mas_cp().  To ensure this doesn't happen again,
2400 	 * abstract the two locations this happens into a function called
2401 	 * mas_leaf_set_meta().
2402 	 */
2403 	mt_init_flags(mt, 0);
2404 	mtree_test_insert(mt, 21, (void *)0x2b);
2405 	mtree_test_insert(mt, 12, (void *)0x19);
2406 	mtree_test_insert(mt, 6, (void *)0xd);
2407 	mtree_test_insert(mt, 8, (void *)0x11);
2408 	mtree_test_insert(mt, 2, (void *)0x5);
2409 	mtree_test_insert(mt, 91, (void *)0xb7);
2410 	mtree_test_insert(mt, 18, (void *)0x25);
2411 	mtree_test_insert(mt, 81, (void *)0xa3);
2412 	mtree_test_store_range(mt, 0, 128, (void *)0x1);
2413 	mtree_test_store(mt, 1, (void *)0x3);
2414 	mtree_test_erase(mt, 8);
2415 	mtree_test_insert(mt, 11, (void *)0x17);
2416 	mtree_test_insert(mt, 8, (void *)0x11);
2417 	mtree_test_insert(mt, 21, (void *)0x2b);
2418 	mtree_test_insert(mt, 2, (void *)0x5);
2419 	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2420 	mtree_test_erase(mt, ULONG_MAX - 10);
2421 	mtree_test_store_range(mt, 0, 281, (void *)0x1);
2422 	mtree_test_erase(mt, 2);
2423 	mtree_test_insert(mt, 1211, (void *)0x977);
2424 	mtree_test_insert(mt, 111, (void *)0xdf);
2425 	mtree_test_insert(mt, 13, (void *)0x1b);
2426 	mtree_test_insert(mt, 211, (void *)0x1a7);
2427 	mtree_test_insert(mt, 11, (void *)0x17);
2428 	mtree_test_insert(mt, 5, (void *)0xb);
2429 	mtree_test_insert(mt, 1218, (void *)0x985);
2430 	mtree_test_insert(mt, 61, (void *)0x7b);
2431 	mtree_test_store(mt, 1, (void *)0x3);
2432 	mtree_test_insert(mt, 121, (void *)0xf3);
2433 	mtree_test_insert(mt, 8, (void *)0x11);
2434 	mtree_test_insert(mt, 21, (void *)0x2b);
2435 	mtree_test_insert(mt, 2, (void *)0x5);
2436 	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2437 	mtree_test_erase(mt, ULONG_MAX - 10);
2438 }
2439 
2440 /* duplicate the tree with a specific gap */
2441 static noinline void check_dup_gaps(struct maple_tree *mt,
2442 				    unsigned long nr_entries, bool zero_start,
2443 				    unsigned long gap)
2444 {
2445 	unsigned long i = 0;
2446 	struct maple_tree newmt;
2447 	int ret;
2448 	void *tmp;
2449 	MA_STATE(mas, mt, 0, 0);
2450 	MA_STATE(newmas, &newmt, 0, 0);
2451 
2452 	if (!zero_start)
2453 		i = 1;
2454 
2455 	mt_zero_nr_tallocated();
2456 	for (; i <= nr_entries; i++)
2457 		mtree_store_range(mt, i*10, (i+1)*10 - gap,
2458 				  xa_mk_value(i), GFP_KERNEL);
2459 
2460 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2461 	mt_set_non_kernel(99999);
2462 	mas_lock(&newmas);
2463 	ret = mas_expected_entries(&newmas, nr_entries);
2464 	mt_set_non_kernel(0);
2465 	MT_BUG_ON(mt, ret != 0);
2466 
2467 	rcu_read_lock();
2468 	mas_for_each(&mas, tmp, ULONG_MAX) {
2469 		newmas.index = mas.index;
2470 		newmas.last = mas.last;
2471 		mas_store(&newmas, tmp);
2472 	}
2473 	rcu_read_unlock();
2474 	mas_destroy(&newmas);
2475 	mas_unlock(&newmas);
2476 
2477 	mtree_destroy(&newmt);
2478 }
2479 
2480 /* Duplicate many sizes of trees.  Mainly to test expected entry values */
2481 static noinline void check_dup(struct maple_tree *mt)
2482 {
2483 	int i;
2484 	int big_start = 100010;
2485 
2486 	/* Check with a value at zero */
2487 	for (i = 10; i < 1000; i++) {
2488 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2489 		check_dup_gaps(mt, i, true, 5);
2490 		mtree_destroy(mt);
2491 		rcu_barrier();
2492 	}
2493 
2494 	cond_resched();
2495 	mt_cache_shrink();
2496 	/* Check with a value at zero, no gap */
2497 	for (i = 1000; i < 2000; i++) {
2498 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2499 		check_dup_gaps(mt, i, true, 0);
2500 		mtree_destroy(mt);
2501 		rcu_barrier();
2502 	}
2503 
2504 	cond_resched();
2505 	mt_cache_shrink();
2506 	/* Check with a value at zero and unreasonably large */
2507 	for (i = big_start; i < big_start + 10; i++) {
2508 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2509 		check_dup_gaps(mt, i, true, 5);
2510 		mtree_destroy(mt);
2511 		rcu_barrier();
2512 	}
2513 
2514 	cond_resched();
2515 	mt_cache_shrink();
2516 	/* Small to medium size not starting at zero*/
2517 	for (i = 200; i < 1000; i++) {
2518 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2519 		check_dup_gaps(mt, i, false, 5);
2520 		mtree_destroy(mt);
2521 		rcu_barrier();
2522 	}
2523 
2524 	cond_resched();
2525 	mt_cache_shrink();
2526 	/* Unreasonably large not starting at zero*/
2527 	for (i = big_start; i < big_start + 10; i++) {
2528 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2529 		check_dup_gaps(mt, i, false, 5);
2530 		mtree_destroy(mt);
2531 		rcu_barrier();
2532 		cond_resched();
2533 		mt_cache_shrink();
2534 	}
2535 
2536 	/* Check non-allocation tree not starting at zero */
2537 	for (i = 1500; i < 3000; i++) {
2538 		mt_init_flags(mt, 0);
2539 		check_dup_gaps(mt, i, false, 5);
2540 		mtree_destroy(mt);
2541 		rcu_barrier();
2542 		cond_resched();
2543 		if (i % 2 == 0)
2544 			mt_cache_shrink();
2545 	}
2546 
2547 	mt_cache_shrink();
2548 	/* Check non-allocation tree starting at zero */
2549 	for (i = 200; i < 1000; i++) {
2550 		mt_init_flags(mt, 0);
2551 		check_dup_gaps(mt, i, true, 5);
2552 		mtree_destroy(mt);
2553 		rcu_barrier();
2554 		cond_resched();
2555 	}
2556 
2557 	mt_cache_shrink();
2558 	/* Unreasonably large */
2559 	for (i = big_start + 5; i < big_start + 10; i++) {
2560 		mt_init_flags(mt, 0);
2561 		check_dup_gaps(mt, i, true, 5);
2562 		mtree_destroy(mt);
2563 		rcu_barrier();
2564 		mt_cache_shrink();
2565 		cond_resched();
2566 	}
2567 }
2568 
2569 static noinline void check_bnode_min_spanning(struct maple_tree *mt)
2570 {
2571 	int i = 50;
2572 	MA_STATE(mas, mt, 0, 0);
2573 
2574 	mt_set_non_kernel(9999);
2575 	mas_lock(&mas);
2576 	do {
2577 		mas_set_range(&mas, i*10, i*10+9);
2578 		mas_store(&mas, check_bnode_min_spanning);
2579 	} while (i--);
2580 
2581 	mas_set_range(&mas, 240, 509);
2582 	mas_store(&mas, NULL);
2583 	mas_unlock(&mas);
2584 	mas_destroy(&mas);
2585 	mt_set_non_kernel(0);
2586 }
2587 
2588 static noinline void check_empty_area_window(struct maple_tree *mt)
2589 {
2590 	unsigned long i, nr_entries = 20;
2591 	MA_STATE(mas, mt, 0, 0);
2592 
2593 	for (i = 1; i <= nr_entries; i++)
2594 		mtree_store_range(mt, i*10, i*10 + 9,
2595 				  xa_mk_value(i), GFP_KERNEL);
2596 
2597 	/* Create another hole besides the one at 0 */
2598 	mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2599 
2600 	/* Check lower bounds that don't fit */
2601 	rcu_read_lock();
2602 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2603 
2604 	mas_reset(&mas);
2605 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2606 
2607 	/* Check lower bound that does fit */
2608 	mas_reset(&mas);
2609 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2610 	MT_BUG_ON(mt, mas.index != 5);
2611 	MT_BUG_ON(mt, mas.last != 9);
2612 	rcu_read_unlock();
2613 
2614 	/* Check one gap that doesn't fit and one that does */
2615 	rcu_read_lock();
2616 	mas_reset(&mas);
2617 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2618 	MT_BUG_ON(mt, mas.index != 161);
2619 	MT_BUG_ON(mt, mas.last != 169);
2620 
2621 	/* Check one gap that does fit above the min */
2622 	mas_reset(&mas);
2623 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2624 	MT_BUG_ON(mt, mas.index != 216);
2625 	MT_BUG_ON(mt, mas.last != 218);
2626 
2627 	/* Check size that doesn't fit any gap */
2628 	mas_reset(&mas);
2629 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2630 
2631 	/*
2632 	 * Check size that doesn't fit the lower end of the window but
2633 	 * does fit the gap
2634 	 */
2635 	mas_reset(&mas);
2636 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2637 
2638 	/*
2639 	 * Check size that doesn't fit the upper end of the window but
2640 	 * does fit the gap
2641 	 */
2642 	mas_reset(&mas);
2643 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2644 
2645 	/* Check mas_empty_area forward */
2646 	mas_reset(&mas);
2647 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2648 	MT_BUG_ON(mt, mas.index != 0);
2649 	MT_BUG_ON(mt, mas.last != 8);
2650 
2651 	mas_reset(&mas);
2652 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2653 	MT_BUG_ON(mt, mas.index != 0);
2654 	MT_BUG_ON(mt, mas.last != 3);
2655 
2656 	mas_reset(&mas);
2657 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2658 
2659 	mas_reset(&mas);
2660 	MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2661 
2662 	mas_reset(&mas);
2663 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EBUSY);
2664 
2665 	mas_reset(&mas);
2666 	mas_empty_area(&mas, 100, 165, 3);
2667 
2668 	mas_reset(&mas);
2669 	MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2670 	rcu_read_unlock();
2671 }
2672 
2673 static noinline void check_empty_area_fill(struct maple_tree *mt)
2674 {
2675 	const unsigned long max = 0x25D78000;
2676 	unsigned long size;
2677 	int loop, shift;
2678 	MA_STATE(mas, mt, 0, 0);
2679 
2680 	mt_set_non_kernel(99999);
2681 	for (shift = 12; shift <= 16; shift++) {
2682 		loop = 5000;
2683 		size = 1 << shift;
2684 		while (loop--) {
2685 			mas_set(&mas, 0);
2686 			mas_lock(&mas);
2687 			MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2688 			MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2689 			mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2690 			mas_unlock(&mas);
2691 			mas_reset(&mas);
2692 		}
2693 	}
2694 
2695 	/* No space left. */
2696 	size = 0x1000;
2697 	rcu_read_lock();
2698 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2699 	rcu_read_unlock();
2700 
2701 	/* Fill a depth 3 node to the maximum */
2702 	for (unsigned long i = 629440511; i <= 629440800; i += 6)
2703 		mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2704 	/* Make space in the second-last depth 4 node */
2705 	mtree_erase(mt, 631668735);
2706 	/* Make space in the last depth 4 node */
2707 	mtree_erase(mt, 629506047);
2708 	mas_reset(&mas);
2709 	/* Search from just after the gap in the second-last depth 4 */
2710 	rcu_read_lock();
2711 	MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2712 	rcu_read_unlock();
2713 	mt_set_non_kernel(0);
2714 }
2715 
2716 static DEFINE_MTREE(tree);
2717 static int maple_tree_seed(void)
2718 {
2719 	unsigned long set[] = {5015, 5014, 5017, 25, 1000,
2720 			       1001, 1002, 1003, 1005, 0,
2721 			       5003, 5002};
2722 	void *ptr = &set;
2723 
2724 	pr_info("\nTEST STARTING\n\n");
2725 
2726 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2727 	check_root_expand(&tree);
2728 	mtree_destroy(&tree);
2729 
2730 #if defined(BENCH_SLOT_STORE)
2731 #define BENCH
2732 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2733 	bench_slot_store(&tree);
2734 	mtree_destroy(&tree);
2735 	goto skip;
2736 #endif
2737 #if defined(BENCH_NODE_STORE)
2738 #define BENCH
2739 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2740 	bench_node_store(&tree);
2741 	mtree_destroy(&tree);
2742 	goto skip;
2743 #endif
2744 #if defined(BENCH_AWALK)
2745 #define BENCH
2746 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2747 	bench_awalk(&tree);
2748 	mtree_destroy(&tree);
2749 	goto skip;
2750 #endif
2751 #if defined(BENCH_WALK)
2752 #define BENCH
2753 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2754 	bench_walk(&tree);
2755 	mtree_destroy(&tree);
2756 	goto skip;
2757 #endif
2758 #if defined(BENCH_FORK)
2759 #define BENCH
2760 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2761 	bench_forking(&tree);
2762 	mtree_destroy(&tree);
2763 	goto skip;
2764 #endif
2765 #if defined(BENCH_MT_FOR_EACH)
2766 #define BENCH
2767 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2768 	bench_mt_for_each(&tree);
2769 	mtree_destroy(&tree);
2770 	goto skip;
2771 #endif
2772 
2773 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2774 	check_iteration(&tree);
2775 	mtree_destroy(&tree);
2776 
2777 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2778 	check_forking(&tree);
2779 	mtree_destroy(&tree);
2780 
2781 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2782 	check_mas_store_gfp(&tree);
2783 	mtree_destroy(&tree);
2784 
2785 	/* Test ranges (store and insert) */
2786 	mt_init_flags(&tree, 0);
2787 	check_ranges(&tree);
2788 	mtree_destroy(&tree);
2789 
2790 #if defined(CONFIG_64BIT)
2791 	/* These tests have ranges outside of 4GB */
2792 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2793 	check_alloc_range(&tree);
2794 	mtree_destroy(&tree);
2795 
2796 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2797 	check_alloc_rev_range(&tree);
2798 	mtree_destroy(&tree);
2799 #endif
2800 
2801 	mt_init_flags(&tree, 0);
2802 
2803 	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
2804 
2805 	check_insert(&tree, set[9], &tree);     /* Insert 0 */
2806 	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
2807 	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
2808 
2809 	check_insert(&tree, set[10], ptr);      /* Insert 5003 */
2810 	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
2811 	check_load(&tree, set[11], NULL);       /* See if 5002 -> NULL */
2812 	check_load(&tree, set[10], ptr);       /* See if 5003 -> ptr */
2813 
2814 	/* Clear out the tree */
2815 	mtree_destroy(&tree);
2816 
2817 	/* Try to insert, insert a dup, and load back what was inserted. */
2818 	mt_init_flags(&tree, 0);
2819 	check_insert(&tree, set[0], &tree);     /* Insert 5015 */
2820 	check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
2821 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
2822 
2823 	/*
2824 	 * Second set of tests try to load a value that doesn't exist, inserts
2825 	 * a second value, then loads the value again
2826 	 */
2827 	check_load(&tree, set[1], NULL);        /* See if 5014 -> NULL */
2828 	check_insert(&tree, set[1], ptr);       /* insert 5014 -> ptr */
2829 	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
2830 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
2831 	/*
2832 	 * Tree currently contains:
2833 	 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
2834 	 */
2835 	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
2836 	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
2837 
2838 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
2839 	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
2840 	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
2841 	check_load(&tree, set[7], &tree);       /* 1003 = &tree ? */
2842 
2843 	/* Clear out tree */
2844 	mtree_destroy(&tree);
2845 
2846 	mt_init_flags(&tree, 0);
2847 	/* Test inserting into a NULL hole. */
2848 	check_insert(&tree, set[5], ptr);       /* insert 1001 -> ptr */
2849 	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
2850 	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
2851 	check_load(&tree, set[5], ptr);         /* See if 1001 -> ptr */
2852 	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
2853 	check_load(&tree, set[7], &tree);       /* See if 1003 -> &tree */
2854 
2855 	/* Clear out the tree */
2856 	mtree_destroy(&tree);
2857 
2858 	mt_init_flags(&tree, 0);
2859 	/*
2860 	 *       set[] = {5015, 5014, 5017, 25, 1000,
2861 	 *                1001, 1002, 1003, 1005, 0,
2862 	 *                5003, 5002};
2863 	 */
2864 
2865 	check_insert(&tree, set[0], ptr); /* 5015 */
2866 	check_insert(&tree, set[1], &tree); /* 5014 */
2867 	check_insert(&tree, set[2], ptr); /* 5017 */
2868 	check_insert(&tree, set[3], &tree); /* 25 */
2869 	check_load(&tree, set[0], ptr);
2870 	check_load(&tree, set[1], &tree);
2871 	check_load(&tree, set[2], ptr);
2872 	check_load(&tree, set[3], &tree);
2873 	check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
2874 	check_load(&tree, set[0], ptr);
2875 	check_load(&tree, set[1], &tree);
2876 	check_load(&tree, set[2], ptr);
2877 	check_load(&tree, set[3], &tree); /*25 */
2878 	check_load(&tree, set[4], ptr);
2879 	check_insert(&tree, set[5], &tree); /* 1001 */
2880 	check_load(&tree, set[0], ptr);
2881 	check_load(&tree, set[1], &tree);
2882 	check_load(&tree, set[2], ptr);
2883 	check_load(&tree, set[3], &tree);
2884 	check_load(&tree, set[4], ptr);
2885 	check_load(&tree, set[5], &tree);
2886 	check_insert(&tree, set[6], ptr);
2887 	check_load(&tree, set[0], ptr);
2888 	check_load(&tree, set[1], &tree);
2889 	check_load(&tree, set[2], ptr);
2890 	check_load(&tree, set[3], &tree);
2891 	check_load(&tree, set[4], ptr);
2892 	check_load(&tree, set[5], &tree);
2893 	check_load(&tree, set[6], ptr);
2894 	check_insert(&tree, set[7], &tree);
2895 	check_load(&tree, set[0], ptr);
2896 	check_insert(&tree, set[8], ptr);
2897 
2898 	check_insert(&tree, set[9], &tree);
2899 
2900 	check_load(&tree, set[0], ptr);
2901 	check_load(&tree, set[1], &tree);
2902 	check_load(&tree, set[2], ptr);
2903 	check_load(&tree, set[3], &tree);
2904 	check_load(&tree, set[4], ptr);
2905 	check_load(&tree, set[5], &tree);
2906 	check_load(&tree, set[6], ptr);
2907 	check_load(&tree, set[9], &tree);
2908 	mtree_destroy(&tree);
2909 
2910 	mt_init_flags(&tree, 0);
2911 	check_seq(&tree, 16, false);
2912 	mtree_destroy(&tree);
2913 
2914 	mt_init_flags(&tree, 0);
2915 	check_seq(&tree, 1000, true);
2916 	mtree_destroy(&tree);
2917 
2918 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2919 	check_rev_seq(&tree, 1000, true);
2920 	mtree_destroy(&tree);
2921 
2922 	check_lower_bound_split(&tree);
2923 	check_upper_bound_split(&tree);
2924 	check_mid_split(&tree);
2925 
2926 	mt_init_flags(&tree, 0);
2927 	check_next_entry(&tree);
2928 	check_find(&tree);
2929 	check_find_2(&tree);
2930 	mtree_destroy(&tree);
2931 
2932 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2933 	check_prev_entry(&tree);
2934 	mtree_destroy(&tree);
2935 
2936 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2937 	check_gap_combining(&tree);
2938 	mtree_destroy(&tree);
2939 
2940 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2941 	check_node_overwrite(&tree);
2942 	mtree_destroy(&tree);
2943 
2944 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2945 	next_prev_test(&tree);
2946 	mtree_destroy(&tree);
2947 
2948 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2949 	check_spanning_relatives(&tree);
2950 	mtree_destroy(&tree);
2951 
2952 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2953 	check_rev_find(&tree);
2954 	mtree_destroy(&tree);
2955 
2956 	mt_init_flags(&tree, 0);
2957 	check_fuzzer(&tree);
2958 	mtree_destroy(&tree);
2959 
2960 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2961 	check_dup(&tree);
2962 	mtree_destroy(&tree);
2963 
2964 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2965 	check_bnode_min_spanning(&tree);
2966 	mtree_destroy(&tree);
2967 
2968 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2969 	check_empty_area_window(&tree);
2970 	mtree_destroy(&tree);
2971 
2972 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2973 	check_empty_area_fill(&tree);
2974 	mtree_destroy(&tree);
2975 
2976 
2977 #if defined(BENCH)
2978 skip:
2979 #endif
2980 	rcu_barrier();
2981 	pr_info("maple_tree: %u of %u tests passed\n",
2982 			atomic_read(&maple_tree_tests_passed),
2983 			atomic_read(&maple_tree_tests_run));
2984 	if (atomic_read(&maple_tree_tests_run) ==
2985 	    atomic_read(&maple_tree_tests_passed))
2986 		return 0;
2987 
2988 	return -EINVAL;
2989 }
2990 
2991 static void maple_tree_harvest(void)
2992 {
2993 
2994 }
2995 
2996 module_init(maple_tree_seed);
2997 module_exit(maple_tree_harvest);
2998 MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
2999 MODULE_LICENSE("GPL");
3000