1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * test_maple_tree.c: Test the maple tree API
4 * Copyright (c) 2018-2022 Oracle Corporation
5 * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
6 *
7 * Any tests that only require the interface of the tree.
8 */
9
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
12 #include <linux/rwsem.h>
13
14 #define MTREE_ALLOC_MAX 0x2000000000000Ul
15 #define CONFIG_MAPLE_SEARCH
16 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
17
18 #ifndef CONFIG_DEBUG_MAPLE_TREE
19 #define mt_dump(mt, fmt) do {} while (0)
20 #define mt_validate(mt) do {} while (0)
21 #define mt_cache_shrink() do {} while (0)
22 #define mas_dump(mas) do {} while (0)
23 #define mas_wr_dump(mas) do {} while (0)
24 atomic_t maple_tree_tests_run;
25 atomic_t maple_tree_tests_passed;
26 #undef MT_BUG_ON
27
28 #define MT_BUG_ON(__tree, __x) do { \
29 atomic_inc(&maple_tree_tests_run); \
30 if (__x) { \
31 pr_info("BUG at %s:%d (%u)\n", \
32 __func__, __LINE__, __x); \
33 pr_info("Pass: %u Run:%u\n", \
34 atomic_read(&maple_tree_tests_passed), \
35 atomic_read(&maple_tree_tests_run)); \
36 } else { \
37 atomic_inc(&maple_tree_tests_passed); \
38 } \
39 } while (0)
40 #endif
41
42 /* #define BENCH_SLOT_STORE */
43 /* #define BENCH_NODE_STORE */
44 /* #define BENCH_AWALK */
45 /* #define BENCH_WALK */
46 /* #define BENCH_MT_FOR_EACH */
47 /* #define BENCH_FORK */
48 /* #define BENCH_MAS_FOR_EACH */
49 /* #define BENCH_MAS_PREV */
50
51 #ifdef __KERNEL__
52 #define mt_set_non_kernel(x) do {} while (0)
53 #define mt_zero_nr_tallocated(x) do {} while (0)
54 #else
55 #define cond_resched() do {} while (0)
56 #endif
mtree_insert_index(struct maple_tree * mt,unsigned long index,gfp_t gfp)57 static int __init mtree_insert_index(struct maple_tree *mt,
58 unsigned long index, gfp_t gfp)
59 {
60 return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
61 }
62
mtree_erase_index(struct maple_tree * mt,unsigned long index)63 static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
64 {
65 MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
66 MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
67 }
68
mtree_test_insert(struct maple_tree * mt,unsigned long index,void * ptr)69 static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
70 void *ptr)
71 {
72 return mtree_insert(mt, index, ptr, GFP_KERNEL);
73 }
74
mtree_test_store_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr)75 static int __init mtree_test_store_range(struct maple_tree *mt,
76 unsigned long start, unsigned long end, void *ptr)
77 {
78 return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
79 }
80
mtree_test_store(struct maple_tree * mt,unsigned long start,void * ptr)81 static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
82 void *ptr)
83 {
84 return mtree_test_store_range(mt, start, start, ptr);
85 }
86
mtree_test_insert_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr)87 static int __init mtree_test_insert_range(struct maple_tree *mt,
88 unsigned long start, unsigned long end, void *ptr)
89 {
90 return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
91 }
92
mtree_test_load(struct maple_tree * mt,unsigned long index)93 static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
94 {
95 return mtree_load(mt, index);
96 }
97
mtree_test_erase(struct maple_tree * mt,unsigned long index)98 static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
99 {
100 return mtree_erase(mt, index);
101 }
102
103 #if defined(CONFIG_64BIT)
check_mtree_alloc_range(struct maple_tree * mt,unsigned long start,unsigned long end,unsigned long size,unsigned long expected,int eret,void * ptr)104 static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
105 unsigned long start, unsigned long end, unsigned long size,
106 unsigned long expected, int eret, void *ptr)
107 {
108
109 unsigned long result = expected + 1;
110 int ret;
111
112 ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
113 GFP_KERNEL);
114 MT_BUG_ON(mt, ret != eret);
115 if (ret)
116 return;
117
118 MT_BUG_ON(mt, result != expected);
119 }
120
check_mtree_alloc_rrange(struct maple_tree * mt,unsigned long start,unsigned long end,unsigned long size,unsigned long expected,int eret,void * ptr)121 static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
122 unsigned long start, unsigned long end, unsigned long size,
123 unsigned long expected, int eret, void *ptr)
124 {
125
126 unsigned long result = expected + 1;
127 int ret;
128
129 ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
130 GFP_KERNEL);
131 MT_BUG_ON(mt, ret != eret);
132 if (ret)
133 return;
134
135 MT_BUG_ON(mt, result != expected);
136 }
137 #endif
138
check_load(struct maple_tree * mt,unsigned long index,void * ptr)139 static noinline void __init check_load(struct maple_tree *mt,
140 unsigned long index, void *ptr)
141 {
142 void *ret = mtree_test_load(mt, index);
143
144 if (ret != ptr)
145 pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
146 MT_BUG_ON(mt, ret != ptr);
147 }
148
check_store_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr,int expected)149 static noinline void __init check_store_range(struct maple_tree *mt,
150 unsigned long start, unsigned long end, void *ptr, int expected)
151 {
152 int ret = -EINVAL;
153 unsigned long i;
154
155 ret = mtree_test_store_range(mt, start, end, ptr);
156 MT_BUG_ON(mt, ret != expected);
157
158 if (ret)
159 return;
160
161 for (i = start; i <= end; i++)
162 check_load(mt, i, ptr);
163 }
164
check_insert_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr,int expected)165 static noinline void __init check_insert_range(struct maple_tree *mt,
166 unsigned long start, unsigned long end, void *ptr, int expected)
167 {
168 int ret = -EINVAL;
169 unsigned long i;
170
171 ret = mtree_test_insert_range(mt, start, end, ptr);
172 MT_BUG_ON(mt, ret != expected);
173
174 if (ret)
175 return;
176
177 for (i = start; i <= end; i++)
178 check_load(mt, i, ptr);
179 }
180
check_insert(struct maple_tree * mt,unsigned long index,void * ptr)181 static noinline void __init check_insert(struct maple_tree *mt,
182 unsigned long index, void *ptr)
183 {
184 int ret = -EINVAL;
185
186 ret = mtree_test_insert(mt, index, ptr);
187 MT_BUG_ON(mt, ret != 0);
188 }
189
check_dup_insert(struct maple_tree * mt,unsigned long index,void * ptr)190 static noinline void __init check_dup_insert(struct maple_tree *mt,
191 unsigned long index, void *ptr)
192 {
193 int ret = -EINVAL;
194
195 ret = mtree_test_insert(mt, index, ptr);
196 MT_BUG_ON(mt, ret != -EEXIST);
197 }
198
199
check_index_load(struct maple_tree * mt,unsigned long index)200 static noinline void __init check_index_load(struct maple_tree *mt,
201 unsigned long index)
202 {
203 return check_load(mt, index, xa_mk_value(index & LONG_MAX));
204 }
205
not_empty(struct maple_node * node)206 static inline __init int not_empty(struct maple_node *node)
207 {
208 int i;
209
210 if (node->parent)
211 return 1;
212
213 for (i = 0; i < ARRAY_SIZE(node->slot); i++)
214 if (node->slot[i])
215 return 1;
216
217 return 0;
218 }
219
220
check_rev_seq(struct maple_tree * mt,unsigned long max,bool verbose)221 static noinline void __init check_rev_seq(struct maple_tree *mt,
222 unsigned long max, bool verbose)
223 {
224 unsigned long i = max, j;
225
226 MT_BUG_ON(mt, !mtree_empty(mt));
227
228 mt_zero_nr_tallocated();
229 while (i) {
230 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
231 for (j = i; j <= max; j++)
232 check_index_load(mt, j);
233
234 check_load(mt, i - 1, NULL);
235 mt_set_in_rcu(mt);
236 MT_BUG_ON(mt, !mt_height(mt));
237 mt_clear_in_rcu(mt);
238 MT_BUG_ON(mt, !mt_height(mt));
239 i--;
240 }
241 check_load(mt, max + 1, NULL);
242
243 #ifndef __KERNEL__
244 if (verbose) {
245 rcu_barrier();
246 mt_dump(mt, mt_dump_dec);
247 pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
248 __func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
249 mt_nr_tallocated());
250 }
251 #endif
252 }
253
check_seq(struct maple_tree * mt,unsigned long max,bool verbose)254 static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
255 bool verbose)
256 {
257 unsigned long i, j;
258
259 MT_BUG_ON(mt, !mtree_empty(mt));
260
261 mt_zero_nr_tallocated();
262 for (i = 0; i <= max; i++) {
263 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
264 for (j = 0; j <= i; j++)
265 check_index_load(mt, j);
266
267 if (i)
268 MT_BUG_ON(mt, !mt_height(mt));
269 check_load(mt, i + 1, NULL);
270 }
271
272 #ifndef __KERNEL__
273 if (verbose) {
274 rcu_barrier();
275 mt_dump(mt, mt_dump_dec);
276 pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
277 max, mt_get_alloc_size()/1024, mt_nr_allocated(),
278 mt_nr_tallocated());
279 }
280 #endif
281 }
282
check_lb_not_empty(struct maple_tree * mt)283 static noinline void __init check_lb_not_empty(struct maple_tree *mt)
284 {
285 unsigned long i, j;
286 unsigned long huge = 4000UL * 1000 * 1000;
287
288
289 i = huge;
290 while (i > 4096) {
291 check_insert(mt, i, (void *) i);
292 for (j = huge; j >= i; j /= 2) {
293 check_load(mt, j-1, NULL);
294 check_load(mt, j, (void *) j);
295 check_load(mt, j+1, NULL);
296 }
297 i /= 2;
298 }
299 mtree_destroy(mt);
300 }
301
check_lower_bound_split(struct maple_tree * mt)302 static noinline void __init check_lower_bound_split(struct maple_tree *mt)
303 {
304 MT_BUG_ON(mt, !mtree_empty(mt));
305 check_lb_not_empty(mt);
306 }
307
check_upper_bound_split(struct maple_tree * mt)308 static noinline void __init check_upper_bound_split(struct maple_tree *mt)
309 {
310 unsigned long i, j;
311 unsigned long huge;
312
313 MT_BUG_ON(mt, !mtree_empty(mt));
314
315 if (MAPLE_32BIT)
316 huge = 2147483647UL;
317 else
318 huge = 4000UL * 1000 * 1000;
319
320 i = 4096;
321 while (i < huge) {
322 check_insert(mt, i, (void *) i);
323 for (j = i; j >= huge; j *= 2) {
324 check_load(mt, j-1, NULL);
325 check_load(mt, j, (void *) j);
326 check_load(mt, j+1, NULL);
327 }
328 i *= 2;
329 }
330 mtree_destroy(mt);
331 }
332
check_mid_split(struct maple_tree * mt)333 static noinline void __init check_mid_split(struct maple_tree *mt)
334 {
335 unsigned long huge = 8000UL * 1000 * 1000;
336
337 check_insert(mt, huge, (void *) huge);
338 check_insert(mt, 0, xa_mk_value(0));
339 check_lb_not_empty(mt);
340 }
341
check_rev_find(struct maple_tree * mt)342 static noinline void __init check_rev_find(struct maple_tree *mt)
343 {
344 int i, nr_entries = 200;
345 void *val;
346 MA_STATE(mas, mt, 0, 0);
347
348 for (i = 0; i <= nr_entries; i++)
349 mtree_store_range(mt, i*10, i*10 + 5,
350 xa_mk_value(i), GFP_KERNEL);
351
352 rcu_read_lock();
353 mas_set(&mas, 1000);
354 val = mas_find_rev(&mas, 1000);
355 MT_BUG_ON(mt, val != xa_mk_value(100));
356 val = mas_find_rev(&mas, 1000);
357 MT_BUG_ON(mt, val != NULL);
358
359 mas_set(&mas, 999);
360 val = mas_find_rev(&mas, 997);
361 MT_BUG_ON(mt, val != NULL);
362
363 mas_set(&mas, 1000);
364 val = mas_find_rev(&mas, 900);
365 MT_BUG_ON(mt, val != xa_mk_value(100));
366 val = mas_find_rev(&mas, 900);
367 MT_BUG_ON(mt, val != xa_mk_value(99));
368
369 mas_set(&mas, 20);
370 val = mas_find_rev(&mas, 0);
371 MT_BUG_ON(mt, val != xa_mk_value(2));
372 val = mas_find_rev(&mas, 0);
373 MT_BUG_ON(mt, val != xa_mk_value(1));
374 val = mas_find_rev(&mas, 0);
375 MT_BUG_ON(mt, val != xa_mk_value(0));
376 val = mas_find_rev(&mas, 0);
377 MT_BUG_ON(mt, val != NULL);
378 rcu_read_unlock();
379 }
380
check_find(struct maple_tree * mt)381 static noinline void __init check_find(struct maple_tree *mt)
382 {
383 unsigned long val = 0;
384 unsigned long count;
385 unsigned long max;
386 unsigned long top;
387 unsigned long last = 0, index = 0;
388 void *entry, *entry2;
389
390 MA_STATE(mas, mt, 0, 0);
391
392 /* Insert 0. */
393 MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
394
395 #if defined(CONFIG_64BIT)
396 top = 4398046511104UL;
397 #else
398 top = ULONG_MAX;
399 #endif
400
401 if (MAPLE_32BIT) {
402 count = 15;
403 } else {
404 count = 20;
405 }
406
407 for (int i = 0; i <= count; i++) {
408 if (val != 64)
409 MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
410 else
411 MT_BUG_ON(mt, mtree_insert(mt, val,
412 XA_ZERO_ENTRY, GFP_KERNEL));
413
414 val <<= 2;
415 }
416
417 val = 0;
418 mas_set(&mas, val);
419 mas_lock(&mas);
420 while ((entry = mas_find(&mas, 268435456)) != NULL) {
421 if (val != 64)
422 MT_BUG_ON(mt, xa_mk_value(val) != entry);
423 else
424 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
425
426 val <<= 2;
427 /* For zero check. */
428 if (!val)
429 val = 1;
430 }
431 mas_unlock(&mas);
432
433 val = 0;
434 mas_set(&mas, val);
435 mas_lock(&mas);
436 mas_for_each(&mas, entry, ULONG_MAX) {
437 if (val != 64)
438 MT_BUG_ON(mt, xa_mk_value(val) != entry);
439 else
440 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
441 val <<= 2;
442 /* For zero check. */
443 if (!val)
444 val = 1;
445 }
446 mas_unlock(&mas);
447
448 /* Test mas_pause */
449 val = 0;
450 mas_set(&mas, val);
451 mas_lock(&mas);
452 mas_for_each(&mas, entry, ULONG_MAX) {
453 if (val != 64)
454 MT_BUG_ON(mt, xa_mk_value(val) != entry);
455 else
456 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
457 val <<= 2;
458 /* For zero check. */
459 if (!val)
460 val = 1;
461
462 mas_pause(&mas);
463 mas_unlock(&mas);
464 mas_lock(&mas);
465 }
466 mas_unlock(&mas);
467
468 val = 0;
469 max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
470 mt_for_each(mt, entry, index, max) {
471 MT_BUG_ON(mt, xa_mk_value(val) != entry);
472 val <<= 2;
473 if (val == 64) /* Skip zero entry. */
474 val <<= 2;
475 /* For zero check. */
476 if (!val)
477 val = 1;
478 }
479
480 val = 0;
481 max = 0;
482 index = 0;
483 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
484 mt_for_each(mt, entry, index, ULONG_MAX) {
485 if (val == top)
486 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
487 else
488 MT_BUG_ON(mt, xa_mk_value(val) != entry);
489
490 /* Workaround for 32bit */
491 if ((val << 2) < val)
492 val = ULONG_MAX;
493 else
494 val <<= 2;
495
496 if (val == 64) /* Skip zero entry. */
497 val <<= 2;
498 /* For zero check. */
499 if (!val)
500 val = 1;
501 max++;
502 MT_BUG_ON(mt, max > 25);
503 }
504 mtree_erase_index(mt, ULONG_MAX);
505
506 mas_reset(&mas);
507 index = 17;
508 entry = mt_find(mt, &index, 512);
509 MT_BUG_ON(mt, xa_mk_value(256) != entry);
510
511 mas_reset(&mas);
512 index = 17;
513 entry = mt_find(mt, &index, 20);
514 MT_BUG_ON(mt, entry != NULL);
515
516
517 /* Range check.. */
518 /* Insert ULONG_MAX */
519 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
520
521 val = 0;
522 mas_set(&mas, 0);
523 mas_lock(&mas);
524 mas_for_each(&mas, entry, ULONG_MAX) {
525 if (val == 64)
526 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
527 else if (val == top)
528 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
529 else
530 MT_BUG_ON(mt, xa_mk_value(val) != entry);
531
532 /* Workaround for 32bit */
533 if ((val << 2) < val)
534 val = ULONG_MAX;
535 else
536 val <<= 2;
537
538 /* For zero check. */
539 if (!val)
540 val = 1;
541 mas_pause(&mas);
542 mas_unlock(&mas);
543 mas_lock(&mas);
544 }
545 mas_unlock(&mas);
546
547 mas_set(&mas, 1048576);
548 mas_lock(&mas);
549 entry = mas_find(&mas, 1048576);
550 mas_unlock(&mas);
551 MT_BUG_ON(mas.tree, entry == NULL);
552
553 /*
554 * Find last value.
555 * 1. get the expected value, leveraging the existence of an end entry
556 * 2. delete end entry
557 * 3. find the last value but searching for ULONG_MAX and then using
558 * prev
559 */
560 /* First, get the expected result. */
561 mas_lock(&mas);
562 mas_reset(&mas);
563 mas.index = ULONG_MAX; /* start at max.. */
564 entry = mas_find(&mas, ULONG_MAX);
565 entry = mas_prev(&mas, 0);
566 index = mas.index;
567 last = mas.last;
568
569 /* Erase the last entry. */
570 mas_reset(&mas);
571 mas.index = ULONG_MAX;
572 mas.last = ULONG_MAX;
573 mas_erase(&mas);
574
575 /* Get the previous value from MAS_START */
576 mas_reset(&mas);
577 entry2 = mas_prev(&mas, 0);
578
579 /* Check results. */
580 MT_BUG_ON(mt, entry != entry2);
581 MT_BUG_ON(mt, index != mas.index);
582 MT_BUG_ON(mt, last != mas.last);
583
584
585 mas.node = MAS_NONE;
586 mas.index = ULONG_MAX;
587 mas.last = ULONG_MAX;
588 entry2 = mas_prev(&mas, 0);
589 MT_BUG_ON(mt, entry != entry2);
590
591 mas_set(&mas, 0);
592 MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
593
594 mas_unlock(&mas);
595 mtree_destroy(mt);
596 }
597
check_find_2(struct maple_tree * mt)598 static noinline void __init check_find_2(struct maple_tree *mt)
599 {
600 unsigned long i, j;
601 void *entry;
602
603 MA_STATE(mas, mt, 0, 0);
604 rcu_read_lock();
605 mas_for_each(&mas, entry, ULONG_MAX)
606 MT_BUG_ON(mt, true);
607 rcu_read_unlock();
608
609 for (i = 0; i < 256; i++) {
610 mtree_insert_index(mt, i, GFP_KERNEL);
611 j = 0;
612 mas_set(&mas, 0);
613 rcu_read_lock();
614 mas_for_each(&mas, entry, ULONG_MAX) {
615 MT_BUG_ON(mt, entry != xa_mk_value(j));
616 j++;
617 }
618 rcu_read_unlock();
619 MT_BUG_ON(mt, j != i + 1);
620 }
621
622 for (i = 0; i < 256; i++) {
623 mtree_erase_index(mt, i);
624 j = i + 1;
625 mas_set(&mas, 0);
626 rcu_read_lock();
627 mas_for_each(&mas, entry, ULONG_MAX) {
628 if (xa_is_zero(entry))
629 continue;
630
631 MT_BUG_ON(mt, entry != xa_mk_value(j));
632 j++;
633 }
634 rcu_read_unlock();
635 MT_BUG_ON(mt, j != 256);
636 }
637
638 /*MT_BUG_ON(mt, !mtree_empty(mt)); */
639 }
640
641
642 #if defined(CONFIG_64BIT)
check_alloc_rev_range(struct maple_tree * mt)643 static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
644 {
645 /*
646 * Generated by:
647 * cat /proc/self/maps | awk '{print $1}'|
648 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
649 */
650
651 static const unsigned long range[] = {
652 /* Inclusive , Exclusive. */
653 0x565234af2000, 0x565234af4000,
654 0x565234af4000, 0x565234af9000,
655 0x565234af9000, 0x565234afb000,
656 0x565234afc000, 0x565234afd000,
657 0x565234afd000, 0x565234afe000,
658 0x565235def000, 0x565235e10000,
659 0x7f36d4bfd000, 0x7f36d4ee2000,
660 0x7f36d4ee2000, 0x7f36d4f04000,
661 0x7f36d4f04000, 0x7f36d504c000,
662 0x7f36d504c000, 0x7f36d5098000,
663 0x7f36d5098000, 0x7f36d5099000,
664 0x7f36d5099000, 0x7f36d509d000,
665 0x7f36d509d000, 0x7f36d509f000,
666 0x7f36d509f000, 0x7f36d50a5000,
667 0x7f36d50b9000, 0x7f36d50db000,
668 0x7f36d50db000, 0x7f36d50dc000,
669 0x7f36d50dc000, 0x7f36d50fa000,
670 0x7f36d50fa000, 0x7f36d5102000,
671 0x7f36d5102000, 0x7f36d5103000,
672 0x7f36d5103000, 0x7f36d5104000,
673 0x7f36d5104000, 0x7f36d5105000,
674 0x7fff5876b000, 0x7fff5878d000,
675 0x7fff5878e000, 0x7fff58791000,
676 0x7fff58791000, 0x7fff58793000,
677 };
678
679 static const unsigned long holes[] = {
680 /*
681 * Note: start of hole is INCLUSIVE
682 * end of hole is EXCLUSIVE
683 * (opposite of the above table.)
684 * Start of hole, end of hole, size of hole (+1)
685 */
686 0x565234afb000, 0x565234afc000, 0x1000,
687 0x565234afe000, 0x565235def000, 0x12F1000,
688 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
689 };
690
691 /*
692 * req_range consists of 4 values.
693 * 1. min index
694 * 2. max index
695 * 3. size
696 * 4. number that should be returned.
697 * 5. return value
698 */
699 static const unsigned long req_range[] = {
700 0x565234af9000, /* Min */
701 0x7fff58791000, /* Max */
702 0x1000, /* Size */
703 0x7fff5878d << 12, /* First rev hole of size 0x1000 */
704 0, /* Return value success. */
705
706 0x0, /* Min */
707 0x565234AF0 << 12, /* Max */
708 0x3000, /* Size */
709 0x565234AEE << 12, /* max - 3. */
710 0, /* Return value success. */
711
712 0x0, /* Min */
713 -1, /* Max */
714 0x1000, /* Size */
715 562949953421311 << 12,/* First rev hole of size 0x1000 */
716 0, /* Return value success. */
717
718 0x0, /* Min */
719 0x7F36D5109 << 12, /* Max */
720 0x4000, /* Size */
721 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
722 0, /* Return value success. */
723
724 /* Ascend test. */
725 0x0,
726 34148798628 << 12,
727 19 << 12,
728 34148797418 << 12,
729 0x0,
730
731 /* Too big test. */
732 0x0,
733 18446744073709551615UL,
734 562915594369134UL << 12,
735 0x0,
736 -EBUSY,
737
738 /* Single space test. */
739 34148798725 << 12,
740 34148798725 << 12,
741 1 << 12,
742 34148798725 << 12,
743 0,
744 };
745
746 int i, range_count = ARRAY_SIZE(range);
747 int req_range_count = ARRAY_SIZE(req_range);
748 unsigned long min = 0;
749
750 MA_STATE(mas, mt, 0, 0);
751
752 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
753 GFP_KERNEL);
754 #define DEBUG_REV_RANGE 0
755 for (i = 0; i < range_count; i += 2) {
756 /* Inclusive, Inclusive (with the -1) */
757
758 #if DEBUG_REV_RANGE
759 pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
760 (range[i + 1] >> 12) - 1);
761 #endif
762 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
763 xa_mk_value(range[i] >> 12), 0);
764 mt_validate(mt);
765 }
766
767
768 mas_lock(&mas);
769 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
770 #if DEBUG_REV_RANGE
771 pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
772 min, holes[i+1]>>12, holes[i+2]>>12,
773 holes[i] >> 12);
774 #endif
775 MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
776 holes[i+1] >> 12,
777 holes[i+2] >> 12));
778 #if DEBUG_REV_RANGE
779 pr_debug("Found %lu %lu\n", mas.index, mas.last);
780 pr_debug("gap %lu %lu\n", (holes[i] >> 12),
781 (holes[i+1] >> 12));
782 #endif
783 MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
784 MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
785 min = holes[i+1] >> 12;
786 mas_reset(&mas);
787 }
788
789 mas_unlock(&mas);
790 for (i = 0; i < req_range_count; i += 5) {
791 #if DEBUG_REV_RANGE
792 pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
793 i, req_range[i] >> 12,
794 (req_range[i + 1] >> 12),
795 req_range[i+2] >> 12,
796 req_range[i+3] >> 12);
797 #endif
798 check_mtree_alloc_rrange(mt,
799 req_range[i] >> 12, /* start */
800 req_range[i+1] >> 12, /* end */
801 req_range[i+2] >> 12, /* size */
802 req_range[i+3] >> 12, /* expected address */
803 req_range[i+4], /* expected return */
804 xa_mk_value(req_range[i] >> 12)); /* pointer */
805 mt_validate(mt);
806 }
807
808 mt_set_non_kernel(1);
809 mtree_erase(mt, 34148798727); /* create a deleted range. */
810 mtree_erase(mt, 34148798725);
811 check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
812 34148798725, 0, mt);
813
814 mtree_destroy(mt);
815 }
816
check_alloc_range(struct maple_tree * mt)817 static noinline void __init check_alloc_range(struct maple_tree *mt)
818 {
819 /*
820 * Generated by:
821 * cat /proc/self/maps|awk '{print $1}'|
822 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
823 */
824
825 static const unsigned long range[] = {
826 /* Inclusive , Exclusive. */
827 0x565234af2000, 0x565234af4000,
828 0x565234af4000, 0x565234af9000,
829 0x565234af9000, 0x565234afb000,
830 0x565234afc000, 0x565234afd000,
831 0x565234afd000, 0x565234afe000,
832 0x565235def000, 0x565235e10000,
833 0x7f36d4bfd000, 0x7f36d4ee2000,
834 0x7f36d4ee2000, 0x7f36d4f04000,
835 0x7f36d4f04000, 0x7f36d504c000,
836 0x7f36d504c000, 0x7f36d5098000,
837 0x7f36d5098000, 0x7f36d5099000,
838 0x7f36d5099000, 0x7f36d509d000,
839 0x7f36d509d000, 0x7f36d509f000,
840 0x7f36d509f000, 0x7f36d50a5000,
841 0x7f36d50b9000, 0x7f36d50db000,
842 0x7f36d50db000, 0x7f36d50dc000,
843 0x7f36d50dc000, 0x7f36d50fa000,
844 0x7f36d50fa000, 0x7f36d5102000,
845 0x7f36d5102000, 0x7f36d5103000,
846 0x7f36d5103000, 0x7f36d5104000,
847 0x7f36d5104000, 0x7f36d5105000,
848 0x7fff5876b000, 0x7fff5878d000,
849 0x7fff5878e000, 0x7fff58791000,
850 0x7fff58791000, 0x7fff58793000,
851 };
852 static const unsigned long holes[] = {
853 /* Start of hole, end of hole, size of hole (+1) */
854 0x565234afb000, 0x565234afc000, 0x1000,
855 0x565234afe000, 0x565235def000, 0x12F1000,
856 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
857 };
858
859 /*
860 * req_range consists of 4 values.
861 * 1. min index
862 * 2. max index
863 * 3. size
864 * 4. number that should be returned.
865 * 5. return value
866 */
867 static const unsigned long req_range[] = {
868 0x565234af9000, /* Min */
869 0x7fff58791000, /* Max */
870 0x1000, /* Size */
871 0x565234afb000, /* First hole in our data of size 1000. */
872 0, /* Return value success. */
873
874 0x0, /* Min */
875 0x7fff58791000, /* Max */
876 0x1F00, /* Size */
877 0x0, /* First hole in our data of size 2000. */
878 0, /* Return value success. */
879
880 /* Test ascend. */
881 34148797436 << 12, /* Min */
882 0x7fff587AF000, /* Max */
883 0x3000, /* Size */
884 34148798629 << 12, /* Expected location */
885 0, /* Return value success. */
886
887 /* Test failing. */
888 34148798623 << 12, /* Min */
889 34148798683 << 12, /* Max */
890 0x15000, /* Size */
891 0, /* Expected location */
892 -EBUSY, /* Return value failed. */
893
894 /* Test filling entire gap. */
895 34148798623 << 12, /* Min */
896 0x7fff587AF000, /* Max */
897 0x10000, /* Size */
898 34148798632 << 12, /* Expected location */
899 0, /* Return value success. */
900
901 /* Test walking off the end of root. */
902 0, /* Min */
903 -1, /* Max */
904 -1, /* Size */
905 0, /* Expected location */
906 -EBUSY, /* Return value failure. */
907
908 /* Test looking for too large a hole across entire range. */
909 0, /* Min */
910 -1, /* Max */
911 4503599618982063UL << 12, /* Size */
912 34359052178 << 12, /* Expected location */
913 -EBUSY, /* Return failure. */
914
915 /* Test a single entry */
916 34148798648 << 12, /* Min */
917 34148798648 << 12, /* Max */
918 4096, /* Size of 1 */
919 34148798648 << 12, /* Location is the same as min/max */
920 0, /* Success */
921 };
922 int i, range_count = ARRAY_SIZE(range);
923 int req_range_count = ARRAY_SIZE(req_range);
924 unsigned long min = 0x565234af2000;
925 MA_STATE(mas, mt, 0, 0);
926
927 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
928 GFP_KERNEL);
929 for (i = 0; i < range_count; i += 2) {
930 #define DEBUG_ALLOC_RANGE 0
931 #if DEBUG_ALLOC_RANGE
932 pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
933 (range[i + 1] >> 12) - 1);
934 mt_dump(mt, mt_dump_hex);
935 #endif
936 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
937 xa_mk_value(range[i] >> 12), 0);
938 mt_validate(mt);
939 }
940
941
942
943 mas_lock(&mas);
944 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
945
946 #if DEBUG_ALLOC_RANGE
947 pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
948 holes[i+1] >> 12, holes[i+2] >> 12,
949 min, holes[i+1]);
950 #endif
951 MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
952 holes[i+1] >> 12,
953 holes[i+2] >> 12));
954 MT_BUG_ON(mt, mas.index != holes[i] >> 12);
955 min = holes[i+1];
956 mas_reset(&mas);
957 }
958 mas_unlock(&mas);
959 for (i = 0; i < req_range_count; i += 5) {
960 #if DEBUG_ALLOC_RANGE
961 pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
962 i/5, req_range[i] >> 12, req_range[i + 1] >> 12,
963 req_range[i + 2] >> 12, req_range[i + 3] >> 12,
964 req_range[i], req_range[i+1]);
965 #endif
966 check_mtree_alloc_range(mt,
967 req_range[i] >> 12, /* start */
968 req_range[i+1] >> 12, /* end */
969 req_range[i+2] >> 12, /* size */
970 req_range[i+3] >> 12, /* expected address */
971 req_range[i+4], /* expected return */
972 xa_mk_value(req_range[i] >> 12)); /* pointer */
973 mt_validate(mt);
974 #if DEBUG_ALLOC_RANGE
975 mt_dump(mt, mt_dump_hex);
976 #endif
977 }
978
979 mtree_destroy(mt);
980 }
981 #endif
982
check_ranges(struct maple_tree * mt)983 static noinline void __init check_ranges(struct maple_tree *mt)
984 {
985 int i, val, val2;
986 static const unsigned long r[] = {
987 10, 15,
988 20, 25,
989 17, 22, /* Overlaps previous range. */
990 9, 1000, /* Huge. */
991 100, 200,
992 45, 168,
993 118, 128,
994 };
995
996 MT_BUG_ON(mt, !mtree_empty(mt));
997 check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
998 check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
999 check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
1000 MT_BUG_ON(mt, !mt_height(mt));
1001 /* Store */
1002 check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1003 check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1004 check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1005 MT_BUG_ON(mt, !mt_height(mt));
1006 mtree_destroy(mt);
1007 MT_BUG_ON(mt, mt_height(mt));
1008
1009 check_seq(mt, 50, false);
1010 mt_set_non_kernel(4);
1011 check_store_range(mt, 5, 47, xa_mk_value(47), 0);
1012 MT_BUG_ON(mt, !mt_height(mt));
1013 mtree_destroy(mt);
1014
1015 /* Create tree of 1-100 */
1016 check_seq(mt, 100, false);
1017 /* Store 45-168 */
1018 mt_set_non_kernel(10);
1019 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1020 MT_BUG_ON(mt, !mt_height(mt));
1021 mtree_destroy(mt);
1022
1023 /* Create tree of 1-200 */
1024 check_seq(mt, 200, false);
1025 /* Store 45-168 */
1026 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1027 MT_BUG_ON(mt, !mt_height(mt));
1028 mtree_destroy(mt);
1029
1030 check_seq(mt, 30, false);
1031 check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1032 MT_BUG_ON(mt, !mt_height(mt));
1033 mtree_destroy(mt);
1034
1035 /* Overwrite across multiple levels. */
1036 /* Create tree of 1-400 */
1037 check_seq(mt, 400, false);
1038 mt_set_non_kernel(50);
1039 /* Store 118-128 */
1040 check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1041 mt_set_non_kernel(50);
1042 mtree_test_erase(mt, 140);
1043 mtree_test_erase(mt, 141);
1044 mtree_test_erase(mt, 142);
1045 mtree_test_erase(mt, 143);
1046 mtree_test_erase(mt, 130);
1047 mtree_test_erase(mt, 131);
1048 mtree_test_erase(mt, 132);
1049 mtree_test_erase(mt, 133);
1050 mtree_test_erase(mt, 134);
1051 mtree_test_erase(mt, 135);
1052 check_load(mt, r[12], xa_mk_value(r[12]));
1053 check_load(mt, r[13], xa_mk_value(r[12]));
1054 check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1055 check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1056 check_load(mt, 135, NULL);
1057 check_load(mt, 140, NULL);
1058 mt_set_non_kernel(0);
1059 MT_BUG_ON(mt, !mt_height(mt));
1060 mtree_destroy(mt);
1061
1062
1063
1064 /* Overwrite multiple levels at the end of the tree (slot 7) */
1065 mt_set_non_kernel(50);
1066 check_seq(mt, 400, false);
1067 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1068 check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1069
1070 check_load(mt, 346, xa_mk_value(346));
1071 for (i = 347; i <= 352; i++)
1072 check_load(mt, i, xa_mk_value(347));
1073 for (i = 353; i <= 361; i++)
1074 check_load(mt, i, xa_mk_value(353));
1075 check_load(mt, 362, xa_mk_value(362));
1076 mt_set_non_kernel(0);
1077 MT_BUG_ON(mt, !mt_height(mt));
1078 mtree_destroy(mt);
1079
1080 mt_set_non_kernel(50);
1081 check_seq(mt, 400, false);
1082 check_store_range(mt, 352, 364, NULL, 0);
1083 check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1084 check_load(mt, 350, xa_mk_value(350));
1085 check_load(mt, 351, xa_mk_value(352));
1086 for (i = 352; i <= 363; i++)
1087 check_load(mt, i, xa_mk_value(352));
1088 check_load(mt, 364, NULL);
1089 check_load(mt, 365, xa_mk_value(365));
1090 mt_set_non_kernel(0);
1091 MT_BUG_ON(mt, !mt_height(mt));
1092 mtree_destroy(mt);
1093
1094 mt_set_non_kernel(5);
1095 check_seq(mt, 400, false);
1096 check_store_range(mt, 352, 364, NULL, 0);
1097 check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1098 check_load(mt, 350, xa_mk_value(350));
1099 check_load(mt, 351, xa_mk_value(352));
1100 for (i = 352; i <= 364; i++)
1101 check_load(mt, i, xa_mk_value(352));
1102 check_load(mt, 365, xa_mk_value(365));
1103 mt_set_non_kernel(0);
1104 MT_BUG_ON(mt, !mt_height(mt));
1105 mtree_destroy(mt);
1106
1107
1108 mt_set_non_kernel(50);
1109 check_seq(mt, 400, false);
1110 check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1111 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1112 mt_set_non_kernel(0);
1113 mt_validate(mt);
1114 MT_BUG_ON(mt, !mt_height(mt));
1115 mtree_destroy(mt);
1116 /*
1117 * Interesting cases:
1118 * 1. Overwrite the end of a node and end in the first entry of the next
1119 * node.
1120 * 2. Split a single range
1121 * 3. Overwrite the start of a range
1122 * 4. Overwrite the end of a range
1123 * 5. Overwrite the entire range
1124 * 6. Overwrite a range that causes multiple parent nodes to be
1125 * combined
1126 * 7. Overwrite a range that causes multiple parent nodes and part of
1127 * root to be combined
1128 * 8. Overwrite the whole tree
1129 * 9. Try to overwrite the zero entry of an alloc tree.
1130 * 10. Write a range larger than a nodes current pivot
1131 */
1132
1133 mt_set_non_kernel(50);
1134 for (i = 0; i <= 500; i++) {
1135 val = i*5;
1136 val2 = (i+1)*5;
1137 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1138 }
1139 check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1140 check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1141 check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1142 check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1143 check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1144 mtree_destroy(mt);
1145 mt_set_non_kernel(0);
1146
1147 mt_set_non_kernel(50);
1148 for (i = 0; i <= 500; i++) {
1149 val = i*5;
1150 val2 = (i+1)*5;
1151 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1152 }
1153 check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1154 check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1155 check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1156 check_store_range(mt, 2460, 2470, NULL, 0);
1157 check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1158 check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1159 mt_set_non_kernel(0);
1160 MT_BUG_ON(mt, !mt_height(mt));
1161 mtree_destroy(mt);
1162
1163 /* Check in-place modifications */
1164 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1165 /* Append to the start of last range */
1166 mt_set_non_kernel(50);
1167 for (i = 0; i <= 500; i++) {
1168 val = i * 5 + 1;
1169 val2 = val + 4;
1170 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1171 }
1172
1173 /* Append to the last range without touching any boundaries */
1174 for (i = 0; i < 10; i++) {
1175 val = val2 + 5;
1176 val2 = val + 4;
1177 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1178 }
1179
1180 /* Append to the end of last range */
1181 val = val2;
1182 for (i = 0; i < 10; i++) {
1183 val += 5;
1184 MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1185 xa_mk_value(val)) != 0);
1186 }
1187
1188 /* Overwriting the range and over a part of the next range */
1189 for (i = 10; i < 30; i += 2) {
1190 val = i * 5 + 1;
1191 val2 = val + 5;
1192 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1193 }
1194
1195 /* Overwriting a part of the range and over the next range */
1196 for (i = 50; i < 70; i += 2) {
1197 val2 = i * 5;
1198 val = val2 - 5;
1199 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1200 }
1201
1202 /*
1203 * Expand the range, only partially overwriting the previous and
1204 * next ranges
1205 */
1206 for (i = 100; i < 130; i += 3) {
1207 val = i * 5 - 5;
1208 val2 = i * 5 + 1;
1209 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1210 }
1211
1212 /*
1213 * Expand the range, only partially overwriting the previous and
1214 * next ranges, in RCU mode
1215 */
1216 mt_set_in_rcu(mt);
1217 for (i = 150; i < 180; i += 3) {
1218 val = i * 5 - 5;
1219 val2 = i * 5 + 1;
1220 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1221 }
1222
1223 MT_BUG_ON(mt, !mt_height(mt));
1224 mt_validate(mt);
1225 mt_set_non_kernel(0);
1226 mtree_destroy(mt);
1227
1228 /* Test rebalance gaps */
1229 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1230 mt_set_non_kernel(50);
1231 for (i = 0; i <= 50; i++) {
1232 val = i*10;
1233 val2 = (i+1)*10;
1234 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1235 }
1236 check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1237 check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1238 check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1239 check_store_range(mt, 240, 249, NULL, 0);
1240 mtree_erase(mt, 200);
1241 mtree_erase(mt, 210);
1242 mtree_erase(mt, 220);
1243 mtree_erase(mt, 230);
1244 mt_set_non_kernel(0);
1245 MT_BUG_ON(mt, !mt_height(mt));
1246 mtree_destroy(mt);
1247
1248 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1249 for (i = 0; i <= 500; i++) {
1250 val = i*10;
1251 val2 = (i+1)*10;
1252 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1253 }
1254 check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1255 mt_validate(mt);
1256 MT_BUG_ON(mt, !mt_height(mt));
1257 mtree_destroy(mt);
1258
1259 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1260 for (i = 0; i <= 500; i++) {
1261 val = i*10;
1262 val2 = (i+1)*10;
1263 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1264 }
1265 check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1266 check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1267 check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1268 check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1269 check_store_range(mt, 4842, 4849, NULL, 0);
1270 mt_validate(mt);
1271 MT_BUG_ON(mt, !mt_height(mt));
1272 mtree_destroy(mt);
1273
1274 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1275 for (i = 0; i <= 1300; i++) {
1276 val = i*10;
1277 val2 = (i+1)*10;
1278 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1279 MT_BUG_ON(mt, mt_height(mt) >= 4);
1280 }
1281 /* Cause a 3 child split all the way up the tree. */
1282 for (i = 5; i < 215; i += 10)
1283 check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1284 for (i = 5; i < 65; i += 10)
1285 check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1286
1287 MT_BUG_ON(mt, mt_height(mt) >= 4);
1288 for (i = 5; i < 45; i += 10)
1289 check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1290 if (!MAPLE_32BIT)
1291 MT_BUG_ON(mt, mt_height(mt) < 4);
1292 mtree_destroy(mt);
1293
1294
1295 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1296 for (i = 0; i <= 1200; i++) {
1297 val = i*10;
1298 val2 = (i+1)*10;
1299 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1300 MT_BUG_ON(mt, mt_height(mt) >= 4);
1301 }
1302 /* Fill parents and leaves before split. */
1303 for (i = 5; i < 455; i += 10)
1304 check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
1305
1306 for (i = 1; i < 16; i++)
1307 check_store_range(mt, 8185 + i, 8185 + i + 1,
1308 xa_mk_value(8185+i), 0);
1309 MT_BUG_ON(mt, mt_height(mt) >= 4);
1310 /* triple split across multiple levels. */
1311 check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1312 if (!MAPLE_32BIT)
1313 MT_BUG_ON(mt, mt_height(mt) != 4);
1314 }
1315
check_next_entry(struct maple_tree * mt)1316 static noinline void __init check_next_entry(struct maple_tree *mt)
1317 {
1318 void *entry = NULL;
1319 unsigned long limit = 30, i = 0;
1320 MA_STATE(mas, mt, i, i);
1321
1322 MT_BUG_ON(mt, !mtree_empty(mt));
1323
1324 check_seq(mt, limit, false);
1325 rcu_read_lock();
1326
1327 /* Check the first one and get ma_state in the correct state. */
1328 MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1329 for ( ; i <= limit + 1; i++) {
1330 entry = mas_next(&mas, limit);
1331 if (i > limit)
1332 MT_BUG_ON(mt, entry != NULL);
1333 else
1334 MT_BUG_ON(mt, xa_mk_value(i) != entry);
1335 }
1336 rcu_read_unlock();
1337 mtree_destroy(mt);
1338 }
1339
check_prev_entry(struct maple_tree * mt)1340 static noinline void __init check_prev_entry(struct maple_tree *mt)
1341 {
1342 unsigned long index = 16;
1343 void *value;
1344 int i;
1345
1346 MA_STATE(mas, mt, index, index);
1347
1348 MT_BUG_ON(mt, !mtree_empty(mt));
1349 check_seq(mt, 30, false);
1350
1351 rcu_read_lock();
1352 value = mas_find(&mas, ULONG_MAX);
1353 MT_BUG_ON(mt, value != xa_mk_value(index));
1354 value = mas_prev(&mas, 0);
1355 MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1356 rcu_read_unlock();
1357 mtree_destroy(mt);
1358
1359 /* Check limits on prev */
1360 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1361 mas_lock(&mas);
1362 for (i = 0; i <= index; i++) {
1363 mas_set_range(&mas, i*10, i*10+5);
1364 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1365 }
1366
1367 mas_set(&mas, 20);
1368 value = mas_walk(&mas);
1369 MT_BUG_ON(mt, value != xa_mk_value(2));
1370
1371 value = mas_prev(&mas, 19);
1372 MT_BUG_ON(mt, value != NULL);
1373
1374 mas_set(&mas, 80);
1375 value = mas_walk(&mas);
1376 MT_BUG_ON(mt, value != xa_mk_value(8));
1377
1378 value = mas_prev(&mas, 76);
1379 MT_BUG_ON(mt, value != NULL);
1380
1381 mas_unlock(&mas);
1382 }
1383
check_root_expand(struct maple_tree * mt)1384 static noinline void __init check_root_expand(struct maple_tree *mt)
1385 {
1386 MA_STATE(mas, mt, 0, 0);
1387 void *ptr;
1388
1389
1390 mas_lock(&mas);
1391 mas_set(&mas, 3);
1392 ptr = mas_walk(&mas);
1393 MT_BUG_ON(mt, mas.index != 0);
1394 MT_BUG_ON(mt, ptr != NULL);
1395 MT_BUG_ON(mt, mas.index != 0);
1396 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1397
1398 ptr = &check_prev_entry;
1399 mas_set(&mas, 1);
1400 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1401
1402 mas_set(&mas, 0);
1403 ptr = mas_walk(&mas);
1404 MT_BUG_ON(mt, ptr != NULL);
1405
1406 mas_set(&mas, 1);
1407 ptr = mas_walk(&mas);
1408 MT_BUG_ON(mt, ptr != &check_prev_entry);
1409
1410 mas_set(&mas, 2);
1411 ptr = mas_walk(&mas);
1412 MT_BUG_ON(mt, ptr != NULL);
1413 mas_unlock(&mas);
1414 mtree_destroy(mt);
1415
1416
1417 mt_init_flags(mt, 0);
1418 mas_lock(&mas);
1419
1420 mas_set(&mas, 0);
1421 ptr = &check_prev_entry;
1422 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1423
1424 mas_set(&mas, 5);
1425 ptr = mas_walk(&mas);
1426 MT_BUG_ON(mt, ptr != NULL);
1427 MT_BUG_ON(mt, mas.index != 1);
1428 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1429
1430 mas_set_range(&mas, 0, 100);
1431 ptr = mas_walk(&mas);
1432 MT_BUG_ON(mt, ptr != &check_prev_entry);
1433 MT_BUG_ON(mt, mas.last != 0);
1434 mas_unlock(&mas);
1435 mtree_destroy(mt);
1436
1437 mt_init_flags(mt, 0);
1438 mas_lock(&mas);
1439
1440 mas_set(&mas, 0);
1441 ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1442 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1443 ptr = mas_next(&mas, ULONG_MAX);
1444 MT_BUG_ON(mt, ptr != NULL);
1445 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1446
1447 mas_set(&mas, 1);
1448 ptr = mas_prev(&mas, 0);
1449 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1450 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1451
1452 mas_unlock(&mas);
1453
1454 mtree_destroy(mt);
1455
1456 mt_init_flags(mt, 0);
1457 mas_lock(&mas);
1458 mas_set(&mas, 0);
1459 ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1460 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1461 ptr = mas_next(&mas, ULONG_MAX);
1462 MT_BUG_ON(mt, ptr != NULL);
1463 MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
1464
1465 mas_set(&mas, 1);
1466 ptr = mas_prev(&mas, 0);
1467 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1468 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1469
1470
1471 mas_unlock(&mas);
1472 }
1473
check_gap_combining(struct maple_tree * mt)1474 static noinline void __init check_gap_combining(struct maple_tree *mt)
1475 {
1476 struct maple_enode *mn1, *mn2;
1477 void *entry;
1478 unsigned long singletons = 100;
1479 static const unsigned long *seq100;
1480 static const unsigned long seq100_64[] = {
1481 /* 0-5 */
1482 74, 75, 76,
1483 50, 100, 2,
1484
1485 /* 6-12 */
1486 44, 45, 46, 43,
1487 20, 50, 3,
1488
1489 /* 13-20*/
1490 80, 81, 82,
1491 76, 2, 79, 85, 4,
1492 };
1493
1494 static const unsigned long seq100_32[] = {
1495 /* 0-5 */
1496 61, 62, 63,
1497 50, 100, 2,
1498
1499 /* 6-12 */
1500 31, 32, 33, 30,
1501 20, 50, 3,
1502
1503 /* 13-20*/
1504 80, 81, 82,
1505 76, 2, 79, 85, 4,
1506 };
1507
1508 static const unsigned long seq2000[] = {
1509 1152, 1151,
1510 1100, 1200, 2,
1511 };
1512 static const unsigned long seq400[] = {
1513 286, 318,
1514 256, 260, 266, 270, 275, 280, 290, 398,
1515 286, 310,
1516 };
1517
1518 unsigned long index;
1519
1520 MA_STATE(mas, mt, 0, 0);
1521
1522 if (MAPLE_32BIT)
1523 seq100 = seq100_32;
1524 else
1525 seq100 = seq100_64;
1526
1527 index = seq100[0];
1528 mas_set(&mas, index);
1529 MT_BUG_ON(mt, !mtree_empty(mt));
1530 check_seq(mt, singletons, false); /* create 100 singletons. */
1531
1532 mt_set_non_kernel(1);
1533 mtree_test_erase(mt, seq100[2]);
1534 check_load(mt, seq100[2], NULL);
1535 mtree_test_erase(mt, seq100[1]);
1536 check_load(mt, seq100[1], NULL);
1537
1538 rcu_read_lock();
1539 entry = mas_find(&mas, ULONG_MAX);
1540 MT_BUG_ON(mt, entry != xa_mk_value(index));
1541 mn1 = mas.node;
1542 mas_next(&mas, ULONG_MAX);
1543 entry = mas_next(&mas, ULONG_MAX);
1544 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1545 mn2 = mas.node;
1546 MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1547
1548 /*
1549 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1550 * seq100[4]. Search for the gap.
1551 */
1552 mt_set_non_kernel(1);
1553 mas_reset(&mas);
1554 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1555 seq100[5]));
1556 MT_BUG_ON(mt, mas.index != index + 1);
1557 rcu_read_unlock();
1558
1559 mtree_test_erase(mt, seq100[6]);
1560 check_load(mt, seq100[6], NULL);
1561 mtree_test_erase(mt, seq100[7]);
1562 check_load(mt, seq100[7], NULL);
1563 mtree_test_erase(mt, seq100[8]);
1564 index = seq100[9];
1565
1566 rcu_read_lock();
1567 mas.index = index;
1568 mas.last = index;
1569 mas_reset(&mas);
1570 entry = mas_find(&mas, ULONG_MAX);
1571 MT_BUG_ON(mt, entry != xa_mk_value(index));
1572 mn1 = mas.node;
1573 entry = mas_next(&mas, ULONG_MAX);
1574 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1575 mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1576 mn2 = mas.node;
1577 MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1578
1579 /*
1580 * At this point, there is a gap of 3 at seq100[6]. Find it by
1581 * searching 20 - 50 for size 3.
1582 */
1583 mas_reset(&mas);
1584 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1585 seq100[12]));
1586 MT_BUG_ON(mt, mas.index != seq100[6]);
1587 rcu_read_unlock();
1588
1589 mt_set_non_kernel(1);
1590 mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1591 check_load(mt, seq100[13], NULL);
1592 check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1593 mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1594 check_load(mt, seq100[13], NULL);
1595 check_load(mt, seq100[14], NULL);
1596
1597 mas_reset(&mas);
1598 rcu_read_lock();
1599 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1600 seq100[17]));
1601 MT_BUG_ON(mt, mas.index != seq100[13]);
1602 mt_validate(mt);
1603 rcu_read_unlock();
1604
1605 /*
1606 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1607 * gap.
1608 */
1609 mt_set_non_kernel(2);
1610 mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1611 mtree_test_erase(mt, seq100[15]);
1612 mas_reset(&mas);
1613 rcu_read_lock();
1614 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1615 seq100[20]));
1616 rcu_read_unlock();
1617 MT_BUG_ON(mt, mas.index != seq100[18]);
1618 mt_validate(mt);
1619 mtree_destroy(mt);
1620
1621 /* seq 2000 tests are for multi-level tree gaps */
1622 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1623 check_seq(mt, 2000, false);
1624 mt_set_non_kernel(1);
1625 mtree_test_erase(mt, seq2000[0]);
1626 mtree_test_erase(mt, seq2000[1]);
1627
1628 mt_set_non_kernel(2);
1629 mas_reset(&mas);
1630 rcu_read_lock();
1631 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1632 seq2000[4]));
1633 MT_BUG_ON(mt, mas.index != seq2000[1]);
1634 rcu_read_unlock();
1635 mt_validate(mt);
1636 mtree_destroy(mt);
1637
1638 /* seq 400 tests rebalancing over two levels. */
1639 mt_set_non_kernel(99);
1640 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1641 check_seq(mt, 400, false);
1642 mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1643 mt_set_non_kernel(0);
1644 mtree_destroy(mt);
1645
1646 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1647 check_seq(mt, 400, false);
1648 mt_set_non_kernel(50);
1649 mtree_test_store_range(mt, seq400[2], seq400[9],
1650 xa_mk_value(seq400[2]));
1651 mtree_test_store_range(mt, seq400[3], seq400[9],
1652 xa_mk_value(seq400[3]));
1653 mtree_test_store_range(mt, seq400[4], seq400[9],
1654 xa_mk_value(seq400[4]));
1655 mtree_test_store_range(mt, seq400[5], seq400[9],
1656 xa_mk_value(seq400[5]));
1657 mtree_test_store_range(mt, seq400[0], seq400[9],
1658 xa_mk_value(seq400[0]));
1659 mtree_test_store_range(mt, seq400[6], seq400[9],
1660 xa_mk_value(seq400[6]));
1661 mtree_test_store_range(mt, seq400[7], seq400[9],
1662 xa_mk_value(seq400[7]));
1663 mtree_test_store_range(mt, seq400[8], seq400[9],
1664 xa_mk_value(seq400[8]));
1665 mtree_test_store_range(mt, seq400[10], seq400[11],
1666 xa_mk_value(seq400[10]));
1667 mt_validate(mt);
1668 mt_set_non_kernel(0);
1669 mtree_destroy(mt);
1670 }
check_node_overwrite(struct maple_tree * mt)1671 static noinline void __init check_node_overwrite(struct maple_tree *mt)
1672 {
1673 int i, max = 4000;
1674
1675 for (i = 0; i < max; i++)
1676 mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1677
1678 mtree_test_store_range(mt, 319951, 367950, NULL);
1679 /*mt_dump(mt, mt_dump_dec); */
1680 mt_validate(mt);
1681 }
1682
1683 #if defined(BENCH_SLOT_STORE)
bench_slot_store(struct maple_tree * mt)1684 static noinline void __init bench_slot_store(struct maple_tree *mt)
1685 {
1686 int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1687
1688 for (i = 0; i < max; i += 10)
1689 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1690
1691 for (i = 0; i < count; i++) {
1692 mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1693 mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1694 GFP_KERNEL);
1695 }
1696 }
1697 #endif
1698
1699 #if defined(BENCH_NODE_STORE)
bench_node_store(struct maple_tree * mt)1700 static noinline void __init bench_node_store(struct maple_tree *mt)
1701 {
1702 int i, overwrite = 76, max = 240, count = 20000000;
1703
1704 for (i = 0; i < max; i += 10)
1705 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1706
1707 for (i = 0; i < count; i++) {
1708 mtree_store_range(mt, overwrite, overwrite + 15,
1709 xa_mk_value(overwrite), GFP_KERNEL);
1710
1711 overwrite += 5;
1712 if (overwrite >= 135)
1713 overwrite = 76;
1714 }
1715 }
1716 #endif
1717
1718 #if defined(BENCH_AWALK)
bench_awalk(struct maple_tree * mt)1719 static noinline void __init bench_awalk(struct maple_tree *mt)
1720 {
1721 int i, max = 2500, count = 50000000;
1722 MA_STATE(mas, mt, 1470, 1470);
1723
1724 for (i = 0; i < max; i += 10)
1725 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1726
1727 mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1728
1729 for (i = 0; i < count; i++) {
1730 mas_empty_area_rev(&mas, 0, 2000, 10);
1731 mas_reset(&mas);
1732 }
1733 }
1734 #endif
1735 #if defined(BENCH_WALK)
bench_walk(struct maple_tree * mt)1736 static noinline void __init bench_walk(struct maple_tree *mt)
1737 {
1738 int i, max = 2500, count = 550000000;
1739 MA_STATE(mas, mt, 1470, 1470);
1740
1741 for (i = 0; i < max; i += 10)
1742 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1743
1744 for (i = 0; i < count; i++) {
1745 mas_walk(&mas);
1746 mas_reset(&mas);
1747 }
1748
1749 }
1750 #endif
1751
1752 #if defined(BENCH_MT_FOR_EACH)
bench_mt_for_each(struct maple_tree * mt)1753 static noinline void __init bench_mt_for_each(struct maple_tree *mt)
1754 {
1755 int i, count = 1000000;
1756 unsigned long max = 2500, index = 0;
1757 void *entry;
1758
1759 for (i = 0; i < max; i += 5)
1760 mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1761
1762 for (i = 0; i < count; i++) {
1763 unsigned long j = 0;
1764
1765 mt_for_each(mt, entry, index, max) {
1766 MT_BUG_ON(mt, entry != xa_mk_value(j));
1767 j += 5;
1768 }
1769
1770 index = 0;
1771 }
1772
1773 }
1774 #endif
1775
1776 #if defined(BENCH_MAS_FOR_EACH)
bench_mas_for_each(struct maple_tree * mt)1777 static noinline void __init bench_mas_for_each(struct maple_tree *mt)
1778 {
1779 int i, count = 1000000;
1780 unsigned long max = 2500;
1781 void *entry;
1782 MA_STATE(mas, mt, 0, 0);
1783
1784 for (i = 0; i < max; i += 5) {
1785 int gap = 4;
1786
1787 if (i % 30 == 0)
1788 gap = 3;
1789 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1790 }
1791
1792 rcu_read_lock();
1793 for (i = 0; i < count; i++) {
1794 unsigned long j = 0;
1795
1796 mas_for_each(&mas, entry, max) {
1797 MT_BUG_ON(mt, entry != xa_mk_value(j));
1798 j += 5;
1799 }
1800 mas_set(&mas, 0);
1801 }
1802 rcu_read_unlock();
1803
1804 }
1805 #endif
1806 #if defined(BENCH_MAS_PREV)
bench_mas_prev(struct maple_tree * mt)1807 static noinline void __init bench_mas_prev(struct maple_tree *mt)
1808 {
1809 int i, count = 1000000;
1810 unsigned long max = 2500;
1811 void *entry;
1812 MA_STATE(mas, mt, 0, 0);
1813
1814 for (i = 0; i < max; i += 5) {
1815 int gap = 4;
1816
1817 if (i % 30 == 0)
1818 gap = 3;
1819 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1820 }
1821
1822 rcu_read_lock();
1823 for (i = 0; i < count; i++) {
1824 unsigned long j = 2495;
1825
1826 mas_set(&mas, ULONG_MAX);
1827 while ((entry = mas_prev(&mas, 0)) != NULL) {
1828 MT_BUG_ON(mt, entry != xa_mk_value(j));
1829 j -= 5;
1830 }
1831 }
1832 rcu_read_unlock();
1833
1834 }
1835 #endif
1836 /* check_forking - simulate the kernel forking sequence with the tree. */
check_forking(struct maple_tree * mt)1837 static noinline void __init check_forking(struct maple_tree *mt)
1838 {
1839
1840 struct maple_tree newmt;
1841 int i, nr_entries = 134;
1842 void *val;
1843 MA_STATE(mas, mt, 0, 0);
1844 MA_STATE(newmas, mt, 0, 0);
1845 struct rw_semaphore newmt_lock;
1846
1847 init_rwsem(&newmt_lock);
1848
1849 for (i = 0; i <= nr_entries; i++)
1850 mtree_store_range(mt, i*10, i*10 + 5,
1851 xa_mk_value(i), GFP_KERNEL);
1852
1853 mt_set_non_kernel(99999);
1854 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1855 mt_set_external_lock(&newmt, &newmt_lock);
1856 newmas.tree = &newmt;
1857 mas_reset(&newmas);
1858 mas_reset(&mas);
1859 down_write(&newmt_lock);
1860 mas.index = 0;
1861 mas.last = 0;
1862 if (mas_expected_entries(&newmas, nr_entries)) {
1863 pr_err("OOM!");
1864 BUG_ON(1);
1865 }
1866 rcu_read_lock();
1867 mas_for_each(&mas, val, ULONG_MAX) {
1868 newmas.index = mas.index;
1869 newmas.last = mas.last;
1870 mas_store(&newmas, val);
1871 }
1872 rcu_read_unlock();
1873 mas_destroy(&newmas);
1874 mt_validate(&newmt);
1875 mt_set_non_kernel(0);
1876 __mt_destroy(&newmt);
1877 up_write(&newmt_lock);
1878 }
1879
check_iteration(struct maple_tree * mt)1880 static noinline void __init check_iteration(struct maple_tree *mt)
1881 {
1882 int i, nr_entries = 125;
1883 void *val;
1884 MA_STATE(mas, mt, 0, 0);
1885
1886 for (i = 0; i <= nr_entries; i++)
1887 mtree_store_range(mt, i * 10, i * 10 + 9,
1888 xa_mk_value(i), GFP_KERNEL);
1889
1890 mt_set_non_kernel(99999);
1891
1892 i = 0;
1893 mas_lock(&mas);
1894 mas_for_each(&mas, val, 925) {
1895 MT_BUG_ON(mt, mas.index != i * 10);
1896 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1897 /* Overwrite end of entry 92 */
1898 if (i == 92) {
1899 mas.index = 925;
1900 mas.last = 929;
1901 mas_store(&mas, val);
1902 }
1903 i++;
1904 }
1905 /* Ensure mas_find() gets the next value */
1906 val = mas_find(&mas, ULONG_MAX);
1907 MT_BUG_ON(mt, val != xa_mk_value(i));
1908
1909 mas_set(&mas, 0);
1910 i = 0;
1911 mas_for_each(&mas, val, 785) {
1912 MT_BUG_ON(mt, mas.index != i * 10);
1913 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1914 /* Overwrite start of entry 78 */
1915 if (i == 78) {
1916 mas.index = 780;
1917 mas.last = 785;
1918 mas_store(&mas, val);
1919 } else {
1920 i++;
1921 }
1922 }
1923 val = mas_find(&mas, ULONG_MAX);
1924 MT_BUG_ON(mt, val != xa_mk_value(i));
1925
1926 mas_set(&mas, 0);
1927 i = 0;
1928 mas_for_each(&mas, val, 765) {
1929 MT_BUG_ON(mt, mas.index != i * 10);
1930 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1931 /* Overwrite end of entry 76 and advance to the end */
1932 if (i == 76) {
1933 mas.index = 760;
1934 mas.last = 765;
1935 mas_store(&mas, val);
1936 }
1937 i++;
1938 }
1939 /* Make sure the next find returns the one after 765, 766-769 */
1940 val = mas_find(&mas, ULONG_MAX);
1941 MT_BUG_ON(mt, val != xa_mk_value(76));
1942 mas_unlock(&mas);
1943 mas_destroy(&mas);
1944 mt_set_non_kernel(0);
1945 }
1946
check_mas_store_gfp(struct maple_tree * mt)1947 static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
1948 {
1949
1950 struct maple_tree newmt;
1951 int i, nr_entries = 135;
1952 void *val;
1953 MA_STATE(mas, mt, 0, 0);
1954 MA_STATE(newmas, mt, 0, 0);
1955
1956 for (i = 0; i <= nr_entries; i++)
1957 mtree_store_range(mt, i*10, i*10 + 5,
1958 xa_mk_value(i), GFP_KERNEL);
1959
1960 mt_set_non_kernel(99999);
1961 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1962 newmas.tree = &newmt;
1963 rcu_read_lock();
1964 mas_lock(&newmas);
1965 mas_reset(&newmas);
1966 mas_set(&mas, 0);
1967 mas_for_each(&mas, val, ULONG_MAX) {
1968 newmas.index = mas.index;
1969 newmas.last = mas.last;
1970 mas_store_gfp(&newmas, val, GFP_KERNEL);
1971 }
1972 mas_unlock(&newmas);
1973 rcu_read_unlock();
1974 mt_validate(&newmt);
1975 mt_set_non_kernel(0);
1976 mtree_destroy(&newmt);
1977 }
1978
1979 #if defined(BENCH_FORK)
bench_forking(struct maple_tree * mt)1980 static noinline void __init bench_forking(struct maple_tree *mt)
1981 {
1982
1983 struct maple_tree newmt;
1984 int i, nr_entries = 134, nr_fork = 80000;
1985 void *val;
1986 MA_STATE(mas, mt, 0, 0);
1987 MA_STATE(newmas, mt, 0, 0);
1988 struct rw_semaphore newmt_lock;
1989
1990 init_rwsem(&newmt_lock);
1991 mt_set_external_lock(&newmt, &newmt_lock);
1992
1993 for (i = 0; i <= nr_entries; i++)
1994 mtree_store_range(mt, i*10, i*10 + 5,
1995 xa_mk_value(i), GFP_KERNEL);
1996
1997 for (i = 0; i < nr_fork; i++) {
1998 mt_set_non_kernel(99999);
1999 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2000 newmas.tree = &newmt;
2001 mas_reset(&newmas);
2002 mas_reset(&mas);
2003 mas.index = 0;
2004 mas.last = 0;
2005 rcu_read_lock();
2006 down_write(&newmt_lock);
2007 if (mas_expected_entries(&newmas, nr_entries)) {
2008 printk("OOM!");
2009 BUG_ON(1);
2010 }
2011 mas_for_each(&mas, val, ULONG_MAX) {
2012 newmas.index = mas.index;
2013 newmas.last = mas.last;
2014 mas_store(&newmas, val);
2015 }
2016 mas_destroy(&newmas);
2017 rcu_read_unlock();
2018 mt_validate(&newmt);
2019 mt_set_non_kernel(0);
2020 __mt_destroy(&newmt);
2021 up_write(&newmt_lock);
2022 }
2023 }
2024 #endif
2025
next_prev_test(struct maple_tree * mt)2026 static noinline void __init next_prev_test(struct maple_tree *mt)
2027 {
2028 int i, nr_entries;
2029 void *val;
2030 MA_STATE(mas, mt, 0, 0);
2031 struct maple_enode *mn;
2032 static const unsigned long *level2;
2033 static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
2034 725};
2035 static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
2036 1760, 1765};
2037 unsigned long last_index;
2038
2039 if (MAPLE_32BIT) {
2040 nr_entries = 500;
2041 level2 = level2_32;
2042 last_index = 0x138e;
2043 } else {
2044 nr_entries = 200;
2045 level2 = level2_64;
2046 last_index = 0x7d6;
2047 }
2048
2049 for (i = 0; i <= nr_entries; i++)
2050 mtree_store_range(mt, i*10, i*10 + 5,
2051 xa_mk_value(i), GFP_KERNEL);
2052
2053 mas_lock(&mas);
2054 for (i = 0; i <= nr_entries / 2; i++) {
2055 mas_next(&mas, 1000);
2056 if (mas_is_none(&mas))
2057 break;
2058
2059 }
2060 mas_reset(&mas);
2061 mas_set(&mas, 0);
2062 i = 0;
2063 mas_for_each(&mas, val, 1000) {
2064 i++;
2065 }
2066
2067 mas_reset(&mas);
2068 mas_set(&mas, 0);
2069 i = 0;
2070 mas_for_each(&mas, val, 1000) {
2071 mas_pause(&mas);
2072 i++;
2073 }
2074
2075 /*
2076 * 680 - 685 = 0x61a00001930c
2077 * 686 - 689 = NULL;
2078 * 690 - 695 = 0x61a00001930c
2079 * Check simple next/prev
2080 */
2081 mas_set(&mas, 686);
2082 val = mas_walk(&mas);
2083 MT_BUG_ON(mt, val != NULL);
2084
2085 val = mas_next(&mas, 1000);
2086 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2087 MT_BUG_ON(mt, mas.index != 690);
2088 MT_BUG_ON(mt, mas.last != 695);
2089
2090 val = mas_prev(&mas, 0);
2091 MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2092 MT_BUG_ON(mt, mas.index != 680);
2093 MT_BUG_ON(mt, mas.last != 685);
2094
2095 val = mas_next(&mas, 1000);
2096 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2097 MT_BUG_ON(mt, mas.index != 690);
2098 MT_BUG_ON(mt, mas.last != 695);
2099
2100 val = mas_next(&mas, 1000);
2101 MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2102 MT_BUG_ON(mt, mas.index != 700);
2103 MT_BUG_ON(mt, mas.last != 705);
2104
2105 /* Check across node boundaries of the tree */
2106 mas_set(&mas, 70);
2107 val = mas_walk(&mas);
2108 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2109 MT_BUG_ON(mt, mas.index != 70);
2110 MT_BUG_ON(mt, mas.last != 75);
2111
2112 val = mas_next(&mas, 1000);
2113 MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2114 MT_BUG_ON(mt, mas.index != 80);
2115 MT_BUG_ON(mt, mas.last != 85);
2116
2117 val = mas_prev(&mas, 70);
2118 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2119 MT_BUG_ON(mt, mas.index != 70);
2120 MT_BUG_ON(mt, mas.last != 75);
2121
2122 /* Check across two levels of the tree */
2123 mas_reset(&mas);
2124 mas_set(&mas, level2[0]);
2125 val = mas_walk(&mas);
2126 MT_BUG_ON(mt, val != NULL);
2127 val = mas_next(&mas, level2[1]);
2128 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2129 MT_BUG_ON(mt, mas.index != level2[2]);
2130 MT_BUG_ON(mt, mas.last != level2[3]);
2131 mn = mas.node;
2132
2133 val = mas_next(&mas, level2[1]);
2134 MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2135 MT_BUG_ON(mt, mas.index != level2[4]);
2136 MT_BUG_ON(mt, mas.last != level2[5]);
2137 MT_BUG_ON(mt, mn == mas.node);
2138
2139 val = mas_prev(&mas, 0);
2140 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2141 MT_BUG_ON(mt, mas.index != level2[2]);
2142 MT_BUG_ON(mt, mas.last != level2[3]);
2143
2144 /* Check running off the end and back on */
2145 mas_set(&mas, nr_entries * 10);
2146 val = mas_walk(&mas);
2147 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2148 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2149 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2150
2151 val = mas_next(&mas, ULONG_MAX);
2152 MT_BUG_ON(mt, val != NULL);
2153 MT_BUG_ON(mt, mas.index != last_index);
2154 MT_BUG_ON(mt, mas.last != ULONG_MAX);
2155
2156 val = mas_prev(&mas, 0);
2157 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2158 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2159 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2160
2161 /* Check running off the start and back on */
2162 mas_reset(&mas);
2163 mas_set(&mas, 10);
2164 val = mas_walk(&mas);
2165 MT_BUG_ON(mt, val != xa_mk_value(1));
2166 MT_BUG_ON(mt, mas.index != 10);
2167 MT_BUG_ON(mt, mas.last != 15);
2168
2169 val = mas_prev(&mas, 0);
2170 MT_BUG_ON(mt, val != xa_mk_value(0));
2171 MT_BUG_ON(mt, mas.index != 0);
2172 MT_BUG_ON(mt, mas.last != 5);
2173
2174 val = mas_prev(&mas, 0);
2175 MT_BUG_ON(mt, val != NULL);
2176 MT_BUG_ON(mt, mas.index != 0);
2177 MT_BUG_ON(mt, mas.last != 5);
2178 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
2179
2180 mas.index = 0;
2181 mas.last = 5;
2182 mas_store(&mas, NULL);
2183 mas_reset(&mas);
2184 mas_set(&mas, 10);
2185 mas_walk(&mas);
2186
2187 val = mas_prev(&mas, 0);
2188 MT_BUG_ON(mt, val != NULL);
2189 MT_BUG_ON(mt, mas.index != 0);
2190 MT_BUG_ON(mt, mas.last != 9);
2191 mas_unlock(&mas);
2192
2193 mtree_destroy(mt);
2194
2195 mt_init(mt);
2196 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2197 mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
2198 rcu_read_lock();
2199 mas_set(&mas, 5);
2200 val = mas_prev(&mas, 4);
2201 MT_BUG_ON(mt, val != NULL);
2202 rcu_read_unlock();
2203 }
2204
2205
2206
2207 /* Test spanning writes that require balancing right sibling or right cousin */
check_spanning_relatives(struct maple_tree * mt)2208 static noinline void __init check_spanning_relatives(struct maple_tree *mt)
2209 {
2210
2211 unsigned long i, nr_entries = 1000;
2212
2213 for (i = 0; i <= nr_entries; i++)
2214 mtree_store_range(mt, i*10, i*10 + 5,
2215 xa_mk_value(i), GFP_KERNEL);
2216
2217
2218 mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2219 }
2220
check_fuzzer(struct maple_tree * mt)2221 static noinline void __init check_fuzzer(struct maple_tree *mt)
2222 {
2223 /*
2224 * 1. Causes a spanning rebalance of a single root node.
2225 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2226 * entire right side is consumed.
2227 */
2228 mtree_test_insert(mt, 88, (void *)0xb1);
2229 mtree_test_insert(mt, 84, (void *)0xa9);
2230 mtree_test_insert(mt, 2, (void *)0x5);
2231 mtree_test_insert(mt, 4, (void *)0x9);
2232 mtree_test_insert(mt, 14, (void *)0x1d);
2233 mtree_test_insert(mt, 7, (void *)0xf);
2234 mtree_test_insert(mt, 12, (void *)0x19);
2235 mtree_test_insert(mt, 18, (void *)0x25);
2236 mtree_test_store_range(mt, 8, 18, (void *)0x11);
2237 mtree_destroy(mt);
2238
2239
2240 /*
2241 * 2. Cause a spanning rebalance of two nodes in root.
2242 * Fixed by setting mast->r->max correctly.
2243 */
2244 mt_init_flags(mt, 0);
2245 mtree_test_store(mt, 87, (void *)0xaf);
2246 mtree_test_store(mt, 0, (void *)0x1);
2247 mtree_test_load(mt, 4);
2248 mtree_test_insert(mt, 4, (void *)0x9);
2249 mtree_test_store(mt, 8, (void *)0x11);
2250 mtree_test_store(mt, 44, (void *)0x59);
2251 mtree_test_store(mt, 68, (void *)0x89);
2252 mtree_test_store(mt, 2, (void *)0x5);
2253 mtree_test_insert(mt, 43, (void *)0x57);
2254 mtree_test_insert(mt, 24, (void *)0x31);
2255 mtree_test_insert(mt, 844, (void *)0x699);
2256 mtree_test_store(mt, 84, (void *)0xa9);
2257 mtree_test_store(mt, 4, (void *)0x9);
2258 mtree_test_erase(mt, 4);
2259 mtree_test_load(mt, 5);
2260 mtree_test_erase(mt, 0);
2261 mtree_destroy(mt);
2262
2263 /*
2264 * 3. Cause a node overflow on copy
2265 * Fixed by using the correct check for node size in mas_wr_modify()
2266 * Also discovered issue with metadata setting.
2267 */
2268 mt_init_flags(mt, 0);
2269 mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2270 mtree_test_store(mt, 4, (void *)0x9);
2271 mtree_test_erase(mt, 5);
2272 mtree_test_erase(mt, 0);
2273 mtree_test_erase(mt, 4);
2274 mtree_test_store(mt, 5, (void *)0xb);
2275 mtree_test_erase(mt, 5);
2276 mtree_test_store(mt, 5, (void *)0xb);
2277 mtree_test_erase(mt, 5);
2278 mtree_test_erase(mt, 4);
2279 mtree_test_store(mt, 4, (void *)0x9);
2280 mtree_test_store(mt, 444, (void *)0x379);
2281 mtree_test_store(mt, 0, (void *)0x1);
2282 mtree_test_load(mt, 0);
2283 mtree_test_store(mt, 5, (void *)0xb);
2284 mtree_test_erase(mt, 0);
2285 mtree_destroy(mt);
2286
2287 /*
2288 * 4. spanning store failure due to writing incorrect pivot value at
2289 * last slot.
2290 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2291 *
2292 */
2293 mt_init_flags(mt, 0);
2294 mtree_test_insert(mt, 261, (void *)0x20b);
2295 mtree_test_store(mt, 516, (void *)0x409);
2296 mtree_test_store(mt, 6, (void *)0xd);
2297 mtree_test_insert(mt, 5, (void *)0xb);
2298 mtree_test_insert(mt, 1256, (void *)0x9d1);
2299 mtree_test_store(mt, 4, (void *)0x9);
2300 mtree_test_erase(mt, 1);
2301 mtree_test_store(mt, 56, (void *)0x71);
2302 mtree_test_insert(mt, 1, (void *)0x3);
2303 mtree_test_store(mt, 24, (void *)0x31);
2304 mtree_test_erase(mt, 1);
2305 mtree_test_insert(mt, 2263, (void *)0x11af);
2306 mtree_test_insert(mt, 446, (void *)0x37d);
2307 mtree_test_store_range(mt, 6, 45, (void *)0xd);
2308 mtree_test_store_range(mt, 3, 446, (void *)0x7);
2309 mtree_destroy(mt);
2310
2311 /*
2312 * 5. mas_wr_extend_null() may overflow slots.
2313 * Fix by checking against wr_mas->node_end.
2314 */
2315 mt_init_flags(mt, 0);
2316 mtree_test_store(mt, 48, (void *)0x61);
2317 mtree_test_store(mt, 3, (void *)0x7);
2318 mtree_test_load(mt, 0);
2319 mtree_test_store(mt, 88, (void *)0xb1);
2320 mtree_test_store(mt, 81, (void *)0xa3);
2321 mtree_test_insert(mt, 0, (void *)0x1);
2322 mtree_test_insert(mt, 8, (void *)0x11);
2323 mtree_test_insert(mt, 4, (void *)0x9);
2324 mtree_test_insert(mt, 2480, (void *)0x1361);
2325 mtree_test_insert(mt, ULONG_MAX,
2326 (void *)0xffffffffffffffff);
2327 mtree_test_erase(mt, ULONG_MAX);
2328 mtree_destroy(mt);
2329
2330 /*
2331 * 6. When reusing a node with an implied pivot and the node is
2332 * shrinking, old data would be left in the implied slot
2333 * Fixed by checking the last pivot for the mas->max and clear
2334 * accordingly. This only affected the left-most node as that node is
2335 * the only one allowed to end in NULL.
2336 */
2337 mt_init_flags(mt, 0);
2338 mtree_test_erase(mt, 3);
2339 mtree_test_insert(mt, 22, (void *)0x2d);
2340 mtree_test_insert(mt, 15, (void *)0x1f);
2341 mtree_test_load(mt, 2);
2342 mtree_test_insert(mt, 1, (void *)0x3);
2343 mtree_test_insert(mt, 1, (void *)0x3);
2344 mtree_test_insert(mt, 5, (void *)0xb);
2345 mtree_test_erase(mt, 1);
2346 mtree_test_insert(mt, 1, (void *)0x3);
2347 mtree_test_insert(mt, 4, (void *)0x9);
2348 mtree_test_insert(mt, 1, (void *)0x3);
2349 mtree_test_erase(mt, 1);
2350 mtree_test_insert(mt, 2, (void *)0x5);
2351 mtree_test_insert(mt, 1, (void *)0x3);
2352 mtree_test_erase(mt, 3);
2353 mtree_test_insert(mt, 22, (void *)0x2d);
2354 mtree_test_insert(mt, 15, (void *)0x1f);
2355 mtree_test_insert(mt, 2, (void *)0x5);
2356 mtree_test_insert(mt, 1, (void *)0x3);
2357 mtree_test_insert(mt, 8, (void *)0x11);
2358 mtree_test_load(mt, 2);
2359 mtree_test_insert(mt, 1, (void *)0x3);
2360 mtree_test_insert(mt, 1, (void *)0x3);
2361 mtree_test_store(mt, 1, (void *)0x3);
2362 mtree_test_insert(mt, 5, (void *)0xb);
2363 mtree_test_erase(mt, 1);
2364 mtree_test_insert(mt, 1, (void *)0x3);
2365 mtree_test_insert(mt, 4, (void *)0x9);
2366 mtree_test_insert(mt, 1, (void *)0x3);
2367 mtree_test_erase(mt, 1);
2368 mtree_test_insert(mt, 2, (void *)0x5);
2369 mtree_test_insert(mt, 1, (void *)0x3);
2370 mtree_test_erase(mt, 3);
2371 mtree_test_insert(mt, 22, (void *)0x2d);
2372 mtree_test_insert(mt, 15, (void *)0x1f);
2373 mtree_test_insert(mt, 2, (void *)0x5);
2374 mtree_test_insert(mt, 1, (void *)0x3);
2375 mtree_test_insert(mt, 8, (void *)0x11);
2376 mtree_test_insert(mt, 12, (void *)0x19);
2377 mtree_test_erase(mt, 1);
2378 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2379 mtree_test_erase(mt, 62);
2380 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2381 mtree_test_insert(mt, 11, (void *)0x17);
2382 mtree_test_insert(mt, 3, (void *)0x7);
2383 mtree_test_insert(mt, 3, (void *)0x7);
2384 mtree_test_store(mt, 62, (void *)0x7d);
2385 mtree_test_erase(mt, 62);
2386 mtree_test_store_range(mt, 1, 15, (void *)0x3);
2387 mtree_test_erase(mt, 1);
2388 mtree_test_insert(mt, 22, (void *)0x2d);
2389 mtree_test_insert(mt, 12, (void *)0x19);
2390 mtree_test_erase(mt, 1);
2391 mtree_test_insert(mt, 3, (void *)0x7);
2392 mtree_test_store(mt, 62, (void *)0x7d);
2393 mtree_test_erase(mt, 62);
2394 mtree_test_insert(mt, 122, (void *)0xf5);
2395 mtree_test_store(mt, 3, (void *)0x7);
2396 mtree_test_insert(mt, 0, (void *)0x1);
2397 mtree_test_store_range(mt, 0, 1, (void *)0x1);
2398 mtree_test_insert(mt, 85, (void *)0xab);
2399 mtree_test_insert(mt, 72, (void *)0x91);
2400 mtree_test_insert(mt, 81, (void *)0xa3);
2401 mtree_test_insert(mt, 726, (void *)0x5ad);
2402 mtree_test_insert(mt, 0, (void *)0x1);
2403 mtree_test_insert(mt, 1, (void *)0x3);
2404 mtree_test_store(mt, 51, (void *)0x67);
2405 mtree_test_insert(mt, 611, (void *)0x4c7);
2406 mtree_test_insert(mt, 485, (void *)0x3cb);
2407 mtree_test_insert(mt, 1, (void *)0x3);
2408 mtree_test_erase(mt, 1);
2409 mtree_test_insert(mt, 0, (void *)0x1);
2410 mtree_test_insert(mt, 1, (void *)0x3);
2411 mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2412 mtree_test_load(mt, 1);
2413 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2414 mtree_test_insert(mt, 1, (void *)0x3);
2415 mtree_test_erase(mt, 1);
2416 mtree_test_load(mt, 53);
2417 mtree_test_load(mt, 1);
2418 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2419 mtree_test_insert(mt, 222, (void *)0x1bd);
2420 mtree_test_insert(mt, 485, (void *)0x3cb);
2421 mtree_test_insert(mt, 1, (void *)0x3);
2422 mtree_test_erase(mt, 1);
2423 mtree_test_load(mt, 0);
2424 mtree_test_insert(mt, 21, (void *)0x2b);
2425 mtree_test_insert(mt, 3, (void *)0x7);
2426 mtree_test_store(mt, 621, (void *)0x4db);
2427 mtree_test_insert(mt, 0, (void *)0x1);
2428 mtree_test_erase(mt, 5);
2429 mtree_test_insert(mt, 1, (void *)0x3);
2430 mtree_test_store(mt, 62, (void *)0x7d);
2431 mtree_test_erase(mt, 62);
2432 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2433 mtree_test_insert(mt, 22, (void *)0x2d);
2434 mtree_test_insert(mt, 12, (void *)0x19);
2435 mtree_test_erase(mt, 1);
2436 mtree_test_insert(mt, 1, (void *)0x3);
2437 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2438 mtree_test_erase(mt, 62);
2439 mtree_test_erase(mt, 1);
2440 mtree_test_load(mt, 1);
2441 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2442 mtree_test_insert(mt, 1, (void *)0x3);
2443 mtree_test_erase(mt, 1);
2444 mtree_test_load(mt, 53);
2445 mtree_test_load(mt, 1);
2446 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2447 mtree_test_insert(mt, 222, (void *)0x1bd);
2448 mtree_test_insert(mt, 485, (void *)0x3cb);
2449 mtree_test_insert(mt, 1, (void *)0x3);
2450 mtree_test_erase(mt, 1);
2451 mtree_test_insert(mt, 1, (void *)0x3);
2452 mtree_test_load(mt, 0);
2453 mtree_test_load(mt, 0);
2454 mtree_destroy(mt);
2455
2456 /*
2457 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2458 * data by overwriting it first - that way metadata is of no concern.
2459 */
2460 mt_init_flags(mt, 0);
2461 mtree_test_load(mt, 1);
2462 mtree_test_insert(mt, 102, (void *)0xcd);
2463 mtree_test_erase(mt, 2);
2464 mtree_test_erase(mt, 0);
2465 mtree_test_load(mt, 0);
2466 mtree_test_insert(mt, 4, (void *)0x9);
2467 mtree_test_insert(mt, 2, (void *)0x5);
2468 mtree_test_insert(mt, 110, (void *)0xdd);
2469 mtree_test_insert(mt, 1, (void *)0x3);
2470 mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2471 mtree_test_erase(mt, 2);
2472 mtree_test_store(mt, 0, (void *)0x1);
2473 mtree_test_store(mt, 112, (void *)0xe1);
2474 mtree_test_insert(mt, 21, (void *)0x2b);
2475 mtree_test_store(mt, 1, (void *)0x3);
2476 mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2477 mtree_test_store(mt, 2, (void *)0x5);
2478 mtree_test_load(mt, 22);
2479 mtree_test_erase(mt, 2);
2480 mtree_test_store(mt, 210, (void *)0x1a5);
2481 mtree_test_store_range(mt, 0, 2, (void *)0x1);
2482 mtree_test_store(mt, 2, (void *)0x5);
2483 mtree_test_erase(mt, 2);
2484 mtree_test_erase(mt, 22);
2485 mtree_test_erase(mt, 1);
2486 mtree_test_erase(mt, 2);
2487 mtree_test_store(mt, 0, (void *)0x1);
2488 mtree_test_load(mt, 112);
2489 mtree_test_insert(mt, 2, (void *)0x5);
2490 mtree_test_erase(mt, 2);
2491 mtree_test_store(mt, 1, (void *)0x3);
2492 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2493 mtree_test_erase(mt, 0);
2494 mtree_test_erase(mt, 2);
2495 mtree_test_store(mt, 2, (void *)0x5);
2496 mtree_test_erase(mt, 0);
2497 mtree_test_erase(mt, 2);
2498 mtree_test_store(mt, 0, (void *)0x1);
2499 mtree_test_store(mt, 0, (void *)0x1);
2500 mtree_test_erase(mt, 2);
2501 mtree_test_store(mt, 2, (void *)0x5);
2502 mtree_test_erase(mt, 2);
2503 mtree_test_insert(mt, 2, (void *)0x5);
2504 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2505 mtree_test_erase(mt, 0);
2506 mtree_test_erase(mt, 2);
2507 mtree_test_store(mt, 0, (void *)0x1);
2508 mtree_test_load(mt, 112);
2509 mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2510 mtree_test_store(mt, 2, (void *)0x5);
2511 mtree_test_load(mt, 110);
2512 mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2513 mtree_test_load(mt, 2);
2514 mtree_test_store(mt, 2, (void *)0x5);
2515 mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2516 mtree_test_erase(mt, 12);
2517 mtree_test_store(mt, 2, (void *)0x5);
2518 mtree_test_load(mt, 22);
2519 mtree_destroy(mt);
2520
2521
2522 /*
2523 * 8. When rebalancing or spanning_rebalance(), the max of the new node
2524 * may be set incorrectly to the final pivot and not the right max.
2525 * Fix by setting the left max to orig right max if the entire node is
2526 * consumed.
2527 */
2528 mt_init_flags(mt, 0);
2529 mtree_test_store(mt, 6, (void *)0xd);
2530 mtree_test_store(mt, 67, (void *)0x87);
2531 mtree_test_insert(mt, 15, (void *)0x1f);
2532 mtree_test_insert(mt, 6716, (void *)0x3479);
2533 mtree_test_store(mt, 61, (void *)0x7b);
2534 mtree_test_insert(mt, 13, (void *)0x1b);
2535 mtree_test_store(mt, 8, (void *)0x11);
2536 mtree_test_insert(mt, 1, (void *)0x3);
2537 mtree_test_load(mt, 0);
2538 mtree_test_erase(mt, 67167);
2539 mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2540 mtree_test_insert(mt, 6, (void *)0xd);
2541 mtree_test_erase(mt, 67);
2542 mtree_test_insert(mt, 1, (void *)0x3);
2543 mtree_test_erase(mt, 667167);
2544 mtree_test_insert(mt, 6, (void *)0xd);
2545 mtree_test_store(mt, 67, (void *)0x87);
2546 mtree_test_insert(mt, 5, (void *)0xb);
2547 mtree_test_erase(mt, 1);
2548 mtree_test_insert(mt, 6, (void *)0xd);
2549 mtree_test_erase(mt, 67);
2550 mtree_test_insert(mt, 15, (void *)0x1f);
2551 mtree_test_insert(mt, 67167, (void *)0x20cbf);
2552 mtree_test_insert(mt, 1, (void *)0x3);
2553 mtree_test_load(mt, 7);
2554 mtree_test_insert(mt, 16, (void *)0x21);
2555 mtree_test_insert(mt, 36, (void *)0x49);
2556 mtree_test_store(mt, 67, (void *)0x87);
2557 mtree_test_store(mt, 6, (void *)0xd);
2558 mtree_test_insert(mt, 367, (void *)0x2df);
2559 mtree_test_insert(mt, 115, (void *)0xe7);
2560 mtree_test_store(mt, 0, (void *)0x1);
2561 mtree_test_store_range(mt, 1, 3, (void *)0x3);
2562 mtree_test_store(mt, 1, (void *)0x3);
2563 mtree_test_erase(mt, 67167);
2564 mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2565 mtree_test_store(mt, 1, (void *)0x3);
2566 mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2567 mtree_test_load(mt, 67);
2568 mtree_test_insert(mt, 1, (void *)0x3);
2569 mtree_test_erase(mt, 67167);
2570 mtree_destroy(mt);
2571
2572 /*
2573 * 9. spanning store to the end of data caused an invalid metadata
2574 * length which resulted in a crash eventually.
2575 * Fix by checking if there is a value in pivot before incrementing the
2576 * metadata end in mab_mas_cp(). To ensure this doesn't happen again,
2577 * abstract the two locations this happens into a function called
2578 * mas_leaf_set_meta().
2579 */
2580 mt_init_flags(mt, 0);
2581 mtree_test_insert(mt, 21, (void *)0x2b);
2582 mtree_test_insert(mt, 12, (void *)0x19);
2583 mtree_test_insert(mt, 6, (void *)0xd);
2584 mtree_test_insert(mt, 8, (void *)0x11);
2585 mtree_test_insert(mt, 2, (void *)0x5);
2586 mtree_test_insert(mt, 91, (void *)0xb7);
2587 mtree_test_insert(mt, 18, (void *)0x25);
2588 mtree_test_insert(mt, 81, (void *)0xa3);
2589 mtree_test_store_range(mt, 0, 128, (void *)0x1);
2590 mtree_test_store(mt, 1, (void *)0x3);
2591 mtree_test_erase(mt, 8);
2592 mtree_test_insert(mt, 11, (void *)0x17);
2593 mtree_test_insert(mt, 8, (void *)0x11);
2594 mtree_test_insert(mt, 21, (void *)0x2b);
2595 mtree_test_insert(mt, 2, (void *)0x5);
2596 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2597 mtree_test_erase(mt, ULONG_MAX - 10);
2598 mtree_test_store_range(mt, 0, 281, (void *)0x1);
2599 mtree_test_erase(mt, 2);
2600 mtree_test_insert(mt, 1211, (void *)0x977);
2601 mtree_test_insert(mt, 111, (void *)0xdf);
2602 mtree_test_insert(mt, 13, (void *)0x1b);
2603 mtree_test_insert(mt, 211, (void *)0x1a7);
2604 mtree_test_insert(mt, 11, (void *)0x17);
2605 mtree_test_insert(mt, 5, (void *)0xb);
2606 mtree_test_insert(mt, 1218, (void *)0x985);
2607 mtree_test_insert(mt, 61, (void *)0x7b);
2608 mtree_test_store(mt, 1, (void *)0x3);
2609 mtree_test_insert(mt, 121, (void *)0xf3);
2610 mtree_test_insert(mt, 8, (void *)0x11);
2611 mtree_test_insert(mt, 21, (void *)0x2b);
2612 mtree_test_insert(mt, 2, (void *)0x5);
2613 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2614 mtree_test_erase(mt, ULONG_MAX - 10);
2615 }
2616
2617 /* duplicate the tree with a specific gap */
check_dup_gaps(struct maple_tree * mt,unsigned long nr_entries,bool zero_start,unsigned long gap)2618 static noinline void __init check_dup_gaps(struct maple_tree *mt,
2619 unsigned long nr_entries, bool zero_start,
2620 unsigned long gap)
2621 {
2622 unsigned long i = 0;
2623 struct maple_tree newmt;
2624 int ret;
2625 void *tmp;
2626 MA_STATE(mas, mt, 0, 0);
2627 MA_STATE(newmas, &newmt, 0, 0);
2628 struct rw_semaphore newmt_lock;
2629
2630 init_rwsem(&newmt_lock);
2631 mt_set_external_lock(&newmt, &newmt_lock);
2632
2633 if (!zero_start)
2634 i = 1;
2635
2636 mt_zero_nr_tallocated();
2637 for (; i <= nr_entries; i++)
2638 mtree_store_range(mt, i*10, (i+1)*10 - gap,
2639 xa_mk_value(i), GFP_KERNEL);
2640
2641 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2642 mt_set_non_kernel(99999);
2643 down_write(&newmt_lock);
2644 ret = mas_expected_entries(&newmas, nr_entries);
2645 mt_set_non_kernel(0);
2646 MT_BUG_ON(mt, ret != 0);
2647
2648 rcu_read_lock();
2649 mas_for_each(&mas, tmp, ULONG_MAX) {
2650 newmas.index = mas.index;
2651 newmas.last = mas.last;
2652 mas_store(&newmas, tmp);
2653 }
2654 rcu_read_unlock();
2655 mas_destroy(&newmas);
2656
2657 __mt_destroy(&newmt);
2658 up_write(&newmt_lock);
2659 }
2660
2661 /* Duplicate many sizes of trees. Mainly to test expected entry values */
check_dup(struct maple_tree * mt)2662 static noinline void __init check_dup(struct maple_tree *mt)
2663 {
2664 int i;
2665 int big_start = 100010;
2666
2667 /* Check with a value at zero */
2668 for (i = 10; i < 1000; i++) {
2669 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2670 check_dup_gaps(mt, i, true, 5);
2671 mtree_destroy(mt);
2672 rcu_barrier();
2673 }
2674
2675 cond_resched();
2676 mt_cache_shrink();
2677 /* Check with a value at zero, no gap */
2678 for (i = 1000; i < 2000; i++) {
2679 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2680 check_dup_gaps(mt, i, true, 0);
2681 mtree_destroy(mt);
2682 rcu_barrier();
2683 }
2684
2685 cond_resched();
2686 mt_cache_shrink();
2687 /* Check with a value at zero and unreasonably large */
2688 for (i = big_start; i < big_start + 10; i++) {
2689 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2690 check_dup_gaps(mt, i, true, 5);
2691 mtree_destroy(mt);
2692 rcu_barrier();
2693 }
2694
2695 cond_resched();
2696 mt_cache_shrink();
2697 /* Small to medium size not starting at zero*/
2698 for (i = 200; i < 1000; i++) {
2699 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2700 check_dup_gaps(mt, i, false, 5);
2701 mtree_destroy(mt);
2702 rcu_barrier();
2703 }
2704
2705 cond_resched();
2706 mt_cache_shrink();
2707 /* Unreasonably large not starting at zero*/
2708 for (i = big_start; i < big_start + 10; i++) {
2709 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2710 check_dup_gaps(mt, i, false, 5);
2711 mtree_destroy(mt);
2712 rcu_barrier();
2713 cond_resched();
2714 mt_cache_shrink();
2715 }
2716
2717 /* Check non-allocation tree not starting at zero */
2718 for (i = 1500; i < 3000; i++) {
2719 mt_init_flags(mt, 0);
2720 check_dup_gaps(mt, i, false, 5);
2721 mtree_destroy(mt);
2722 rcu_barrier();
2723 cond_resched();
2724 if (i % 2 == 0)
2725 mt_cache_shrink();
2726 }
2727
2728 mt_cache_shrink();
2729 /* Check non-allocation tree starting at zero */
2730 for (i = 200; i < 1000; i++) {
2731 mt_init_flags(mt, 0);
2732 check_dup_gaps(mt, i, true, 5);
2733 mtree_destroy(mt);
2734 rcu_barrier();
2735 cond_resched();
2736 }
2737
2738 mt_cache_shrink();
2739 /* Unreasonably large */
2740 for (i = big_start + 5; i < big_start + 10; i++) {
2741 mt_init_flags(mt, 0);
2742 check_dup_gaps(mt, i, true, 5);
2743 mtree_destroy(mt);
2744 rcu_barrier();
2745 mt_cache_shrink();
2746 cond_resched();
2747 }
2748 }
2749
check_bnode_min_spanning(struct maple_tree * mt)2750 static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
2751 {
2752 int i = 50;
2753 MA_STATE(mas, mt, 0, 0);
2754
2755 mt_set_non_kernel(9999);
2756 mas_lock(&mas);
2757 do {
2758 mas_set_range(&mas, i*10, i*10+9);
2759 mas_store(&mas, check_bnode_min_spanning);
2760 } while (i--);
2761
2762 mas_set_range(&mas, 240, 509);
2763 mas_store(&mas, NULL);
2764 mas_unlock(&mas);
2765 mas_destroy(&mas);
2766 mt_set_non_kernel(0);
2767 }
2768
check_empty_area_window(struct maple_tree * mt)2769 static noinline void __init check_empty_area_window(struct maple_tree *mt)
2770 {
2771 unsigned long i, nr_entries = 20;
2772 MA_STATE(mas, mt, 0, 0);
2773
2774 for (i = 1; i <= nr_entries; i++)
2775 mtree_store_range(mt, i*10, i*10 + 9,
2776 xa_mk_value(i), GFP_KERNEL);
2777
2778 /* Create another hole besides the one at 0 */
2779 mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2780
2781 /* Check lower bounds that don't fit */
2782 rcu_read_lock();
2783 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2784
2785 mas_reset(&mas);
2786 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2787
2788 /* Check lower bound that does fit */
2789 mas_reset(&mas);
2790 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2791 MT_BUG_ON(mt, mas.index != 5);
2792 MT_BUG_ON(mt, mas.last != 9);
2793 rcu_read_unlock();
2794
2795 /* Check one gap that doesn't fit and one that does */
2796 rcu_read_lock();
2797 mas_reset(&mas);
2798 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2799 MT_BUG_ON(mt, mas.index != 161);
2800 MT_BUG_ON(mt, mas.last != 169);
2801
2802 /* Check one gap that does fit above the min */
2803 mas_reset(&mas);
2804 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2805 MT_BUG_ON(mt, mas.index != 216);
2806 MT_BUG_ON(mt, mas.last != 218);
2807
2808 /* Check size that doesn't fit any gap */
2809 mas_reset(&mas);
2810 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2811
2812 /*
2813 * Check size that doesn't fit the lower end of the window but
2814 * does fit the gap
2815 */
2816 mas_reset(&mas);
2817 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2818
2819 /*
2820 * Check size that doesn't fit the upper end of the window but
2821 * does fit the gap
2822 */
2823 mas_reset(&mas);
2824 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2825
2826 /* Check mas_empty_area forward */
2827 mas_reset(&mas);
2828 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2829 MT_BUG_ON(mt, mas.index != 0);
2830 MT_BUG_ON(mt, mas.last != 8);
2831
2832 mas_reset(&mas);
2833 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2834 MT_BUG_ON(mt, mas.index != 0);
2835 MT_BUG_ON(mt, mas.last != 3);
2836
2837 mas_reset(&mas);
2838 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2839
2840 mas_reset(&mas);
2841 MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2842
2843 mas_reset(&mas);
2844 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
2845
2846 mas_reset(&mas);
2847 mas_empty_area(&mas, 100, 165, 3);
2848
2849 mas_reset(&mas);
2850 MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2851 rcu_read_unlock();
2852 }
2853
check_empty_area_fill(struct maple_tree * mt)2854 static noinline void __init check_empty_area_fill(struct maple_tree *mt)
2855 {
2856 const unsigned long max = 0x25D78000;
2857 unsigned long size;
2858 int loop, shift;
2859 MA_STATE(mas, mt, 0, 0);
2860
2861 mt_set_non_kernel(99999);
2862 for (shift = 12; shift <= 16; shift++) {
2863 loop = 5000;
2864 size = 1 << shift;
2865 while (loop--) {
2866 mas_set(&mas, 0);
2867 mas_lock(&mas);
2868 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2869 MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2870 mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2871 mas_unlock(&mas);
2872 mas_reset(&mas);
2873 }
2874 }
2875
2876 /* No space left. */
2877 size = 0x1000;
2878 rcu_read_lock();
2879 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2880 rcu_read_unlock();
2881
2882 /* Fill a depth 3 node to the maximum */
2883 for (unsigned long i = 629440511; i <= 629440800; i += 6)
2884 mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2885 /* Make space in the second-last depth 4 node */
2886 mtree_erase(mt, 631668735);
2887 /* Make space in the last depth 4 node */
2888 mtree_erase(mt, 629506047);
2889 mas_reset(&mas);
2890 /* Search from just after the gap in the second-last depth 4 */
2891 rcu_read_lock();
2892 MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2893 rcu_read_unlock();
2894 mt_set_non_kernel(0);
2895 }
2896
2897 /*
2898 * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
2899 *
2900 * The table below shows the single entry tree (0-0 pointer) and normal tree
2901 * with nodes.
2902 *
2903 * Function ENTRY Start Result index & last
2904 * ┬ ┬ ┬ ┬ ┬
2905 * │ │ │ │ └─ the final range
2906 * │ │ │ └─ The node value after execution
2907 * │ │ └─ The node value before execution
2908 * │ └─ If the entry exists or does not exists (DNE)
2909 * └─ The function name
2910 *
2911 * Function ENTRY Start Result index & last
2912 * mas_next()
2913 * - after last
2914 * Single entry tree at 0-0
2915 * ------------------------
2916 * DNE MAS_START MAS_NONE 1 - oo
2917 * DNE MAS_PAUSE MAS_NONE 1 - oo
2918 * DNE MAS_ROOT MAS_NONE 1 - oo
2919 * when index = 0
2920 * DNE MAS_NONE MAS_ROOT 0
2921 * when index > 0
2922 * DNE MAS_NONE MAS_NONE 1 - oo
2923 *
2924 * Normal tree
2925 * -----------
2926 * exists MAS_START active range
2927 * DNE MAS_START active set to last range
2928 * exists MAS_PAUSE active range
2929 * DNE MAS_PAUSE active set to last range
2930 * exists MAS_NONE active range
2931 * exists active active range
2932 * DNE active active set to last range
2933 * ERANGE active MAS_OVERFLOW last range
2934 *
2935 * Function ENTRY Start Result index & last
2936 * mas_prev()
2937 * - before index
2938 * Single entry tree at 0-0
2939 * ------------------------
2940 * if index > 0
2941 * exists MAS_START MAS_ROOT 0
2942 * exists MAS_PAUSE MAS_ROOT 0
2943 * exists MAS_NONE MAS_ROOT 0
2944 *
2945 * if index == 0
2946 * DNE MAS_START MAS_NONE 0
2947 * DNE MAS_PAUSE MAS_NONE 0
2948 * DNE MAS_NONE MAS_NONE 0
2949 * DNE MAS_ROOT MAS_NONE 0
2950 *
2951 * Normal tree
2952 * -----------
2953 * exists MAS_START active range
2954 * DNE MAS_START active set to min
2955 * exists MAS_PAUSE active range
2956 * DNE MAS_PAUSE active set to min
2957 * exists MAS_NONE active range
2958 * DNE MAS_NONE MAS_NONE set to min
2959 * any MAS_ROOT MAS_NONE 0
2960 * exists active active range
2961 * DNE active active last range
2962 * ERANGE active MAS_UNDERFLOW last range
2963 *
2964 * Function ENTRY Start Result index & last
2965 * mas_find()
2966 * - at index or next
2967 * Single entry tree at 0-0
2968 * ------------------------
2969 * if index > 0
2970 * DNE MAS_START MAS_NONE 0
2971 * DNE MAS_PAUSE MAS_NONE 0
2972 * DNE MAS_ROOT MAS_NONE 0
2973 * DNE MAS_NONE MAS_NONE 1
2974 * if index == 0
2975 * exists MAS_START MAS_ROOT 0
2976 * exists MAS_PAUSE MAS_ROOT 0
2977 * exists MAS_NONE MAS_ROOT 0
2978 *
2979 * Normal tree
2980 * -----------
2981 * exists MAS_START active range
2982 * DNE MAS_START active set to max
2983 * exists MAS_PAUSE active range
2984 * DNE MAS_PAUSE active set to max
2985 * exists MAS_NONE active range (start at last)
2986 * exists active active range
2987 * DNE active active last range (max < last)
2988 *
2989 * Function ENTRY Start Result index & last
2990 * mas_find_rev()
2991 * - at index or before
2992 * Single entry tree at 0-0
2993 * ------------------------
2994 * if index > 0
2995 * exists MAS_START MAS_ROOT 0
2996 * exists MAS_PAUSE MAS_ROOT 0
2997 * exists MAS_NONE MAS_ROOT 0
2998 * if index == 0
2999 * DNE MAS_START MAS_NONE 0
3000 * DNE MAS_PAUSE MAS_NONE 0
3001 * DNE MAS_NONE MAS_NONE 0
3002 * DNE MAS_ROOT MAS_NONE 0
3003 *
3004 * Normal tree
3005 * -----------
3006 * exists MAS_START active range
3007 * DNE MAS_START active set to min
3008 * exists MAS_PAUSE active range
3009 * DNE MAS_PAUSE active set to min
3010 * exists MAS_NONE active range (start at index)
3011 * exists active active range
3012 * DNE active active last range (min > index)
3013 *
3014 * Function ENTRY Start Result index & last
3015 * mas_walk()
3016 * - Look up index
3017 * Single entry tree at 0-0
3018 * ------------------------
3019 * if index > 0
3020 * DNE MAS_START MAS_ROOT 1 - oo
3021 * DNE MAS_PAUSE MAS_ROOT 1 - oo
3022 * DNE MAS_NONE MAS_ROOT 1 - oo
3023 * DNE MAS_ROOT MAS_ROOT 1 - oo
3024 * if index == 0
3025 * exists MAS_START MAS_ROOT 0
3026 * exists MAS_PAUSE MAS_ROOT 0
3027 * exists MAS_NONE MAS_ROOT 0
3028 * exists MAS_ROOT MAS_ROOT 0
3029 *
3030 * Normal tree
3031 * -----------
3032 * exists MAS_START active range
3033 * DNE MAS_START active range of NULL
3034 * exists MAS_PAUSE active range
3035 * DNE MAS_PAUSE active range of NULL
3036 * exists MAS_NONE active range
3037 * DNE MAS_NONE active range of NULL
3038 * exists active active range
3039 * DNE active active range of NULL
3040 */
3041
3042 #define mas_active(x) (((x).node != MAS_ROOT) && \
3043 ((x).node != MAS_START) && \
3044 ((x).node != MAS_PAUSE) && \
3045 ((x).node != MAS_NONE))
check_state_handling(struct maple_tree * mt)3046 static noinline void __init check_state_handling(struct maple_tree *mt)
3047 {
3048 MA_STATE(mas, mt, 0, 0);
3049 void *entry, *ptr = (void *) 0x1234500;
3050 void *ptr2 = &ptr;
3051 void *ptr3 = &ptr2;
3052
3053 /* Check MAS_ROOT First */
3054 mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
3055
3056 mas_lock(&mas);
3057 /* prev: Start -> underflow*/
3058 entry = mas_prev(&mas, 0);
3059 MT_BUG_ON(mt, entry != NULL);
3060 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3061
3062 /* prev: Start -> root */
3063 mas_set(&mas, 10);
3064 entry = mas_prev(&mas, 0);
3065 MT_BUG_ON(mt, entry != ptr);
3066 MT_BUG_ON(mt, mas.index != 0);
3067 MT_BUG_ON(mt, mas.last != 0);
3068 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3069
3070 /* prev: pause -> root */
3071 mas_set(&mas, 10);
3072 mas_pause(&mas);
3073 entry = mas_prev(&mas, 0);
3074 MT_BUG_ON(mt, entry != ptr);
3075 MT_BUG_ON(mt, mas.index != 0);
3076 MT_BUG_ON(mt, mas.last != 0);
3077 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3078
3079 /* next: start -> none */
3080 mas_set(&mas, 0);
3081 entry = mas_next(&mas, ULONG_MAX);
3082 MT_BUG_ON(mt, mas.index != 1);
3083 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3084 MT_BUG_ON(mt, entry != NULL);
3085 MT_BUG_ON(mt, mas.node != MAS_NONE);
3086
3087 /* next: start -> none*/
3088 mas_set(&mas, 10);
3089 entry = mas_next(&mas, ULONG_MAX);
3090 MT_BUG_ON(mt, mas.index != 1);
3091 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3092 MT_BUG_ON(mt, entry != NULL);
3093 MT_BUG_ON(mt, mas.node != MAS_NONE);
3094
3095 /* find: start -> root */
3096 mas_set(&mas, 0);
3097 entry = mas_find(&mas, ULONG_MAX);
3098 MT_BUG_ON(mt, entry != ptr);
3099 MT_BUG_ON(mt, mas.index != 0);
3100 MT_BUG_ON(mt, mas.last != 0);
3101 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3102
3103 /* find: root -> none */
3104 entry = mas_find(&mas, ULONG_MAX);
3105 MT_BUG_ON(mt, entry != NULL);
3106 MT_BUG_ON(mt, mas.index != 1);
3107 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3108 MT_BUG_ON(mt, mas.node != MAS_NONE);
3109
3110 /* find: none -> none */
3111 entry = mas_find(&mas, ULONG_MAX);
3112 MT_BUG_ON(mt, entry != NULL);
3113 MT_BUG_ON(mt, mas.index != 1);
3114 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3115 MT_BUG_ON(mt, mas.node != MAS_NONE);
3116
3117 /* find: start -> none */
3118 mas_set(&mas, 10);
3119 entry = mas_find(&mas, ULONG_MAX);
3120 MT_BUG_ON(mt, entry != NULL);
3121 MT_BUG_ON(mt, mas.index != 1);
3122 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3123 MT_BUG_ON(mt, mas.node != MAS_NONE);
3124
3125 /* find_rev: none -> root */
3126 entry = mas_find_rev(&mas, 0);
3127 MT_BUG_ON(mt, entry != ptr);
3128 MT_BUG_ON(mt, mas.index != 0);
3129 MT_BUG_ON(mt, mas.last != 0);
3130 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3131
3132 /* find_rev: start -> root */
3133 mas_set(&mas, 0);
3134 entry = mas_find_rev(&mas, 0);
3135 MT_BUG_ON(mt, entry != ptr);
3136 MT_BUG_ON(mt, mas.index != 0);
3137 MT_BUG_ON(mt, mas.last != 0);
3138 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3139
3140 /* find_rev: root -> none */
3141 entry = mas_find_rev(&mas, 0);
3142 MT_BUG_ON(mt, entry != NULL);
3143 MT_BUG_ON(mt, mas.index != 0);
3144 MT_BUG_ON(mt, mas.last != 0);
3145 MT_BUG_ON(mt, mas.node != MAS_NONE);
3146
3147 /* find_rev: none -> none */
3148 entry = mas_find_rev(&mas, 0);
3149 MT_BUG_ON(mt, entry != NULL);
3150 MT_BUG_ON(mt, mas.index != 0);
3151 MT_BUG_ON(mt, mas.last != 0);
3152 MT_BUG_ON(mt, mas.node != MAS_NONE);
3153
3154 /* find_rev: start -> root */
3155 mas_set(&mas, 10);
3156 entry = mas_find_rev(&mas, 0);
3157 MT_BUG_ON(mt, entry != ptr);
3158 MT_BUG_ON(mt, mas.index != 0);
3159 MT_BUG_ON(mt, mas.last != 0);
3160 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3161
3162 /* walk: start -> none */
3163 mas_set(&mas, 10);
3164 entry = mas_walk(&mas);
3165 MT_BUG_ON(mt, entry != NULL);
3166 MT_BUG_ON(mt, mas.index != 1);
3167 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3168 MT_BUG_ON(mt, mas.node != MAS_NONE);
3169
3170 /* walk: pause -> none*/
3171 mas_set(&mas, 10);
3172 mas_pause(&mas);
3173 entry = mas_walk(&mas);
3174 MT_BUG_ON(mt, entry != NULL);
3175 MT_BUG_ON(mt, mas.index != 1);
3176 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3177 MT_BUG_ON(mt, mas.node != MAS_NONE);
3178
3179 /* walk: none -> none */
3180 mas.index = mas.last = 10;
3181 entry = mas_walk(&mas);
3182 MT_BUG_ON(mt, entry != NULL);
3183 MT_BUG_ON(mt, mas.index != 1);
3184 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3185 MT_BUG_ON(mt, mas.node != MAS_NONE);
3186
3187 /* walk: none -> none */
3188 entry = mas_walk(&mas);
3189 MT_BUG_ON(mt, entry != NULL);
3190 MT_BUG_ON(mt, mas.index != 1);
3191 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3192 MT_BUG_ON(mt, mas.node != MAS_NONE);
3193
3194 /* walk: start -> root */
3195 mas_set(&mas, 0);
3196 entry = mas_walk(&mas);
3197 MT_BUG_ON(mt, entry != ptr);
3198 MT_BUG_ON(mt, mas.index != 0);
3199 MT_BUG_ON(mt, mas.last != 0);
3200 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3201
3202 /* walk: pause -> root */
3203 mas_set(&mas, 0);
3204 mas_pause(&mas);
3205 entry = mas_walk(&mas);
3206 MT_BUG_ON(mt, entry != ptr);
3207 MT_BUG_ON(mt, mas.index != 0);
3208 MT_BUG_ON(mt, mas.last != 0);
3209 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3210
3211 /* walk: none -> root */
3212 mas.node = MAS_NONE;
3213 entry = mas_walk(&mas);
3214 MT_BUG_ON(mt, entry != ptr);
3215 MT_BUG_ON(mt, mas.index != 0);
3216 MT_BUG_ON(mt, mas.last != 0);
3217 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3218
3219 /* walk: root -> root */
3220 entry = mas_walk(&mas);
3221 MT_BUG_ON(mt, entry != ptr);
3222 MT_BUG_ON(mt, mas.index != 0);
3223 MT_BUG_ON(mt, mas.last != 0);
3224 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3225
3226 /* walk: root -> none */
3227 mas_set(&mas, 10);
3228 entry = mas_walk(&mas);
3229 MT_BUG_ON(mt, entry != NULL);
3230 MT_BUG_ON(mt, mas.index != 1);
3231 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3232 MT_BUG_ON(mt, mas.node != MAS_NONE);
3233
3234 /* walk: none -> root */
3235 mas.index = mas.last = 0;
3236 entry = mas_walk(&mas);
3237 MT_BUG_ON(mt, entry != ptr);
3238 MT_BUG_ON(mt, mas.index != 0);
3239 MT_BUG_ON(mt, mas.last != 0);
3240 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3241
3242 mas_unlock(&mas);
3243
3244 /* Check when there is an actual node */
3245 mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3246 mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3247 mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3248 mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3249
3250 mas_lock(&mas);
3251
3252 /* next: start ->active */
3253 mas_set(&mas, 0);
3254 entry = mas_next(&mas, ULONG_MAX);
3255 MT_BUG_ON(mt, entry != ptr);
3256 MT_BUG_ON(mt, mas.index != 0x1000);
3257 MT_BUG_ON(mt, mas.last != 0x1500);
3258 MT_BUG_ON(mt, !mas_active(mas));
3259
3260 /* next: pause ->active */
3261 mas_set(&mas, 0);
3262 mas_pause(&mas);
3263 entry = mas_next(&mas, ULONG_MAX);
3264 MT_BUG_ON(mt, entry != ptr);
3265 MT_BUG_ON(mt, mas.index != 0x1000);
3266 MT_BUG_ON(mt, mas.last != 0x1500);
3267 MT_BUG_ON(mt, !mas_active(mas));
3268
3269 /* next: none ->active */
3270 mas.index = mas.last = 0;
3271 mas.offset = 0;
3272 mas.node = MAS_NONE;
3273 entry = mas_next(&mas, ULONG_MAX);
3274 MT_BUG_ON(mt, entry != ptr);
3275 MT_BUG_ON(mt, mas.index != 0x1000);
3276 MT_BUG_ON(mt, mas.last != 0x1500);
3277 MT_BUG_ON(mt, !mas_active(mas));
3278
3279 /* next:active ->active */
3280 entry = mas_next(&mas, ULONG_MAX);
3281 MT_BUG_ON(mt, entry != ptr2);
3282 MT_BUG_ON(mt, mas.index != 0x2000);
3283 MT_BUG_ON(mt, mas.last != 0x2500);
3284 MT_BUG_ON(mt, !mas_active(mas));
3285
3286 /* next:active -> active beyond data */
3287 entry = mas_next(&mas, 0x2999);
3288 MT_BUG_ON(mt, entry != NULL);
3289 MT_BUG_ON(mt, mas.index != 0x2501);
3290 MT_BUG_ON(mt, mas.last != 0x2fff);
3291 MT_BUG_ON(mt, !mas_active(mas));
3292
3293 /* Continue after last range ends after max */
3294 entry = mas_next(&mas, ULONG_MAX);
3295 MT_BUG_ON(mt, entry != ptr3);
3296 MT_BUG_ON(mt, mas.index != 0x3000);
3297 MT_BUG_ON(mt, mas.last != 0x3500);
3298 MT_BUG_ON(mt, !mas_active(mas));
3299
3300 /* next:active -> active continued */
3301 entry = mas_next(&mas, ULONG_MAX);
3302 MT_BUG_ON(mt, entry != NULL);
3303 MT_BUG_ON(mt, mas.index != 0x3501);
3304 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3305 MT_BUG_ON(mt, !mas_active(mas));
3306
3307 /* next:active -> overflow */
3308 entry = mas_next(&mas, ULONG_MAX);
3309 MT_BUG_ON(mt, entry != NULL);
3310 MT_BUG_ON(mt, mas.index != 0x3501);
3311 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3312 MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
3313
3314 /* next:overflow -> overflow */
3315 entry = mas_next(&mas, ULONG_MAX);
3316 MT_BUG_ON(mt, entry != NULL);
3317 MT_BUG_ON(mt, mas.index != 0x3501);
3318 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3319 MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
3320
3321 /* prev:overflow -> active */
3322 entry = mas_prev(&mas, 0);
3323 MT_BUG_ON(mt, entry != ptr3);
3324 MT_BUG_ON(mt, mas.index != 0x3000);
3325 MT_BUG_ON(mt, mas.last != 0x3500);
3326 MT_BUG_ON(mt, !mas_active(mas));
3327
3328 /* next: none -> active, skip value at location */
3329 mas_set(&mas, 0);
3330 entry = mas_next(&mas, ULONG_MAX);
3331 mas.node = MAS_NONE;
3332 mas.offset = 0;
3333 entry = mas_next(&mas, ULONG_MAX);
3334 MT_BUG_ON(mt, entry != ptr2);
3335 MT_BUG_ON(mt, mas.index != 0x2000);
3336 MT_BUG_ON(mt, mas.last != 0x2500);
3337 MT_BUG_ON(mt, !mas_active(mas));
3338
3339 /* prev:active ->active */
3340 entry = mas_prev(&mas, 0);
3341 MT_BUG_ON(mt, entry != ptr);
3342 MT_BUG_ON(mt, mas.index != 0x1000);
3343 MT_BUG_ON(mt, mas.last != 0x1500);
3344 MT_BUG_ON(mt, !mas_active(mas));
3345
3346 /* prev:active -> active spanning end range */
3347 entry = mas_prev(&mas, 0x0100);
3348 MT_BUG_ON(mt, entry != NULL);
3349 MT_BUG_ON(mt, mas.index != 0);
3350 MT_BUG_ON(mt, mas.last != 0x0FFF);
3351 MT_BUG_ON(mt, !mas_active(mas));
3352
3353 /* prev:active -> underflow */
3354 entry = mas_prev(&mas, 0);
3355 MT_BUG_ON(mt, entry != NULL);
3356 MT_BUG_ON(mt, mas.index != 0);
3357 MT_BUG_ON(mt, mas.last != 0x0FFF);
3358 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3359
3360 /* prev:underflow -> underflow */
3361 entry = mas_prev(&mas, 0);
3362 MT_BUG_ON(mt, entry != NULL);
3363 MT_BUG_ON(mt, mas.index != 0);
3364 MT_BUG_ON(mt, mas.last != 0x0FFF);
3365 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3366
3367 /* next:underflow -> active */
3368 entry = mas_next(&mas, ULONG_MAX);
3369 MT_BUG_ON(mt, entry != ptr);
3370 MT_BUG_ON(mt, mas.index != 0x1000);
3371 MT_BUG_ON(mt, mas.last != 0x1500);
3372 MT_BUG_ON(mt, !mas_active(mas));
3373
3374 /* prev:first value -> underflow */
3375 entry = mas_prev(&mas, 0x1000);
3376 MT_BUG_ON(mt, entry != NULL);
3377 MT_BUG_ON(mt, mas.index != 0x1000);
3378 MT_BUG_ON(mt, mas.last != 0x1500);
3379 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3380
3381 /* find:underflow -> first value */
3382 entry = mas_find(&mas, ULONG_MAX);
3383 MT_BUG_ON(mt, entry != ptr);
3384 MT_BUG_ON(mt, mas.index != 0x1000);
3385 MT_BUG_ON(mt, mas.last != 0x1500);
3386 MT_BUG_ON(mt, !mas_active(mas));
3387
3388 /* prev: pause ->active */
3389 mas_set(&mas, 0x3600);
3390 entry = mas_prev(&mas, 0);
3391 MT_BUG_ON(mt, entry != ptr3);
3392 mas_pause(&mas);
3393 entry = mas_prev(&mas, 0);
3394 MT_BUG_ON(mt, entry != ptr2);
3395 MT_BUG_ON(mt, mas.index != 0x2000);
3396 MT_BUG_ON(mt, mas.last != 0x2500);
3397 MT_BUG_ON(mt, !mas_active(mas));
3398
3399 /* prev:active -> active spanning min */
3400 entry = mas_prev(&mas, 0x1600);
3401 MT_BUG_ON(mt, entry != NULL);
3402 MT_BUG_ON(mt, mas.index != 0x1501);
3403 MT_BUG_ON(mt, mas.last != 0x1FFF);
3404 MT_BUG_ON(mt, !mas_active(mas));
3405
3406 /* prev: active ->active, continue */
3407 entry = mas_prev(&mas, 0);
3408 MT_BUG_ON(mt, entry != ptr);
3409 MT_BUG_ON(mt, mas.index != 0x1000);
3410 MT_BUG_ON(mt, mas.last != 0x1500);
3411 MT_BUG_ON(mt, !mas_active(mas));
3412
3413 /* find: start ->active */
3414 mas_set(&mas, 0);
3415 entry = mas_find(&mas, ULONG_MAX);
3416 MT_BUG_ON(mt, entry != ptr);
3417 MT_BUG_ON(mt, mas.index != 0x1000);
3418 MT_BUG_ON(mt, mas.last != 0x1500);
3419 MT_BUG_ON(mt, !mas_active(mas));
3420
3421 /* find: pause ->active */
3422 mas_set(&mas, 0);
3423 mas_pause(&mas);
3424 entry = mas_find(&mas, ULONG_MAX);
3425 MT_BUG_ON(mt, entry != ptr);
3426 MT_BUG_ON(mt, mas.index != 0x1000);
3427 MT_BUG_ON(mt, mas.last != 0x1500);
3428 MT_BUG_ON(mt, !mas_active(mas));
3429
3430 /* find: start ->active on value */;
3431 mas_set(&mas, 1200);
3432 entry = mas_find(&mas, ULONG_MAX);
3433 MT_BUG_ON(mt, entry != ptr);
3434 MT_BUG_ON(mt, mas.index != 0x1000);
3435 MT_BUG_ON(mt, mas.last != 0x1500);
3436 MT_BUG_ON(mt, !mas_active(mas));
3437
3438 /* find:active ->active */
3439 entry = mas_find(&mas, ULONG_MAX);
3440 MT_BUG_ON(mt, entry != ptr2);
3441 MT_BUG_ON(mt, mas.index != 0x2000);
3442 MT_BUG_ON(mt, mas.last != 0x2500);
3443 MT_BUG_ON(mt, !mas_active(mas));
3444
3445
3446 /* find:active -> active (NULL)*/
3447 entry = mas_find(&mas, 0x2700);
3448 MT_BUG_ON(mt, entry != NULL);
3449 MT_BUG_ON(mt, mas.index != 0x2501);
3450 MT_BUG_ON(mt, mas.last != 0x2FFF);
3451 MT_BUG_ON(mt, !mas_active(mas));
3452
3453 /* find: overflow ->active */
3454 entry = mas_find(&mas, 0x5000);
3455 MT_BUG_ON(mt, entry != ptr3);
3456 MT_BUG_ON(mt, mas.index != 0x3000);
3457 MT_BUG_ON(mt, mas.last != 0x3500);
3458 MT_BUG_ON(mt, !mas_active(mas));
3459
3460 /* find:active -> active (NULL) end*/
3461 entry = mas_find(&mas, ULONG_MAX);
3462 MT_BUG_ON(mt, entry != NULL);
3463 MT_BUG_ON(mt, mas.index != 0x3501);
3464 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3465 MT_BUG_ON(mt, !mas_active(mas));
3466
3467 /* find_rev: active (END) ->active */
3468 entry = mas_find_rev(&mas, 0);
3469 MT_BUG_ON(mt, entry != ptr3);
3470 MT_BUG_ON(mt, mas.index != 0x3000);
3471 MT_BUG_ON(mt, mas.last != 0x3500);
3472 MT_BUG_ON(mt, !mas_active(mas));
3473
3474 /* find_rev:active ->active */
3475 entry = mas_find_rev(&mas, 0);
3476 MT_BUG_ON(mt, entry != ptr2);
3477 MT_BUG_ON(mt, mas.index != 0x2000);
3478 MT_BUG_ON(mt, mas.last != 0x2500);
3479 MT_BUG_ON(mt, !mas_active(mas));
3480
3481 /* find_rev: pause ->active */
3482 mas_pause(&mas);
3483 entry = mas_find_rev(&mas, 0);
3484 MT_BUG_ON(mt, entry != ptr);
3485 MT_BUG_ON(mt, mas.index != 0x1000);
3486 MT_BUG_ON(mt, mas.last != 0x1500);
3487 MT_BUG_ON(mt, !mas_active(mas));
3488
3489 /* find_rev:active -> active */
3490 entry = mas_find_rev(&mas, 0);
3491 MT_BUG_ON(mt, entry != NULL);
3492 MT_BUG_ON(mt, mas.index != 0);
3493 MT_BUG_ON(mt, mas.last != 0x0FFF);
3494 MT_BUG_ON(mt, !mas_active(mas));
3495
3496 /* find_rev: start ->active */
3497 mas_set(&mas, 0x1200);
3498 entry = mas_find_rev(&mas, 0);
3499 MT_BUG_ON(mt, entry != ptr);
3500 MT_BUG_ON(mt, mas.index != 0x1000);
3501 MT_BUG_ON(mt, mas.last != 0x1500);
3502 MT_BUG_ON(mt, !mas_active(mas));
3503
3504 /* mas_walk start ->active */
3505 mas_set(&mas, 0x1200);
3506 entry = mas_walk(&mas);
3507 MT_BUG_ON(mt, entry != ptr);
3508 MT_BUG_ON(mt, mas.index != 0x1000);
3509 MT_BUG_ON(mt, mas.last != 0x1500);
3510 MT_BUG_ON(mt, !mas_active(mas));
3511
3512 /* mas_walk start ->active */
3513 mas_set(&mas, 0x1600);
3514 entry = mas_walk(&mas);
3515 MT_BUG_ON(mt, entry != NULL);
3516 MT_BUG_ON(mt, mas.index != 0x1501);
3517 MT_BUG_ON(mt, mas.last != 0x1fff);
3518 MT_BUG_ON(mt, !mas_active(mas));
3519
3520 /* mas_walk pause ->active */
3521 mas_set(&mas, 0x1200);
3522 mas_pause(&mas);
3523 entry = mas_walk(&mas);
3524 MT_BUG_ON(mt, entry != ptr);
3525 MT_BUG_ON(mt, mas.index != 0x1000);
3526 MT_BUG_ON(mt, mas.last != 0x1500);
3527 MT_BUG_ON(mt, !mas_active(mas));
3528
3529 /* mas_walk pause -> active */
3530 mas_set(&mas, 0x1600);
3531 mas_pause(&mas);
3532 entry = mas_walk(&mas);
3533 MT_BUG_ON(mt, entry != NULL);
3534 MT_BUG_ON(mt, mas.index != 0x1501);
3535 MT_BUG_ON(mt, mas.last != 0x1fff);
3536 MT_BUG_ON(mt, !mas_active(mas));
3537
3538 /* mas_walk none -> active */
3539 mas_set(&mas, 0x1200);
3540 mas.node = MAS_NONE;
3541 entry = mas_walk(&mas);
3542 MT_BUG_ON(mt, entry != ptr);
3543 MT_BUG_ON(mt, mas.index != 0x1000);
3544 MT_BUG_ON(mt, mas.last != 0x1500);
3545 MT_BUG_ON(mt, !mas_active(mas));
3546
3547 /* mas_walk none -> active */
3548 mas_set(&mas, 0x1600);
3549 mas.node = MAS_NONE;
3550 entry = mas_walk(&mas);
3551 MT_BUG_ON(mt, entry != NULL);
3552 MT_BUG_ON(mt, mas.index != 0x1501);
3553 MT_BUG_ON(mt, mas.last != 0x1fff);
3554 MT_BUG_ON(mt, !mas_active(mas));
3555
3556 /* mas_walk active -> active */
3557 mas.index = 0x1200;
3558 mas.last = 0x1200;
3559 mas.offset = 0;
3560 entry = mas_walk(&mas);
3561 MT_BUG_ON(mt, entry != ptr);
3562 MT_BUG_ON(mt, mas.index != 0x1000);
3563 MT_BUG_ON(mt, mas.last != 0x1500);
3564 MT_BUG_ON(mt, !mas_active(mas));
3565
3566 /* mas_walk active -> active */
3567 mas.index = 0x1600;
3568 mas.last = 0x1600;
3569 entry = mas_walk(&mas);
3570 MT_BUG_ON(mt, entry != NULL);
3571 MT_BUG_ON(mt, mas.index != 0x1501);
3572 MT_BUG_ON(mt, mas.last != 0x1fff);
3573 MT_BUG_ON(mt, !mas_active(mas));
3574
3575 mas_unlock(&mas);
3576 }
3577
3578 static DEFINE_MTREE(tree);
maple_tree_seed(void)3579 static int __init maple_tree_seed(void)
3580 {
3581 unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3582 1001, 1002, 1003, 1005, 0,
3583 5003, 5002};
3584 void *ptr = &set;
3585
3586 pr_info("\nTEST STARTING\n\n");
3587
3588 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3589 check_root_expand(&tree);
3590 mtree_destroy(&tree);
3591
3592 #if defined(BENCH_SLOT_STORE)
3593 #define BENCH
3594 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3595 bench_slot_store(&tree);
3596 mtree_destroy(&tree);
3597 goto skip;
3598 #endif
3599 #if defined(BENCH_NODE_STORE)
3600 #define BENCH
3601 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3602 bench_node_store(&tree);
3603 mtree_destroy(&tree);
3604 goto skip;
3605 #endif
3606 #if defined(BENCH_AWALK)
3607 #define BENCH
3608 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3609 bench_awalk(&tree);
3610 mtree_destroy(&tree);
3611 goto skip;
3612 #endif
3613 #if defined(BENCH_WALK)
3614 #define BENCH
3615 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3616 bench_walk(&tree);
3617 mtree_destroy(&tree);
3618 goto skip;
3619 #endif
3620 #if defined(BENCH_FORK)
3621 #define BENCH
3622 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3623 bench_forking(&tree);
3624 mtree_destroy(&tree);
3625 goto skip;
3626 #endif
3627 #if defined(BENCH_MT_FOR_EACH)
3628 #define BENCH
3629 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3630 bench_mt_for_each(&tree);
3631 mtree_destroy(&tree);
3632 goto skip;
3633 #endif
3634 #if defined(BENCH_MAS_FOR_EACH)
3635 #define BENCH
3636 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3637 bench_mas_for_each(&tree);
3638 mtree_destroy(&tree);
3639 goto skip;
3640 #endif
3641 #if defined(BENCH_MAS_PREV)
3642 #define BENCH
3643 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3644 bench_mas_prev(&tree);
3645 mtree_destroy(&tree);
3646 goto skip;
3647 #endif
3648
3649 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3650 check_iteration(&tree);
3651 mtree_destroy(&tree);
3652
3653 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3654 check_forking(&tree);
3655 mtree_destroy(&tree);
3656
3657 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3658 check_mas_store_gfp(&tree);
3659 mtree_destroy(&tree);
3660
3661 /* Test ranges (store and insert) */
3662 mt_init_flags(&tree, 0);
3663 check_ranges(&tree);
3664 mtree_destroy(&tree);
3665
3666 #if defined(CONFIG_64BIT)
3667 /* These tests have ranges outside of 4GB */
3668 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3669 check_alloc_range(&tree);
3670 mtree_destroy(&tree);
3671
3672 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3673 check_alloc_rev_range(&tree);
3674 mtree_destroy(&tree);
3675 #endif
3676
3677 mt_init_flags(&tree, 0);
3678
3679 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3680
3681 check_insert(&tree, set[9], &tree); /* Insert 0 */
3682 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3683 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3684
3685 check_insert(&tree, set[10], ptr); /* Insert 5003 */
3686 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3687 check_load(&tree, set[11], NULL); /* See if 5002 -> NULL */
3688 check_load(&tree, set[10], ptr); /* See if 5003 -> ptr */
3689
3690 /* Clear out the tree */
3691 mtree_destroy(&tree);
3692
3693 /* Try to insert, insert a dup, and load back what was inserted. */
3694 mt_init_flags(&tree, 0);
3695 check_insert(&tree, set[0], &tree); /* Insert 5015 */
3696 check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3697 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3698
3699 /*
3700 * Second set of tests try to load a value that doesn't exist, inserts
3701 * a second value, then loads the value again
3702 */
3703 check_load(&tree, set[1], NULL); /* See if 5014 -> NULL */
3704 check_insert(&tree, set[1], ptr); /* insert 5014 -> ptr */
3705 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3706 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3707 /*
3708 * Tree currently contains:
3709 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3710 */
3711 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3712 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3713
3714 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3715 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3716 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3717 check_load(&tree, set[7], &tree); /* 1003 = &tree ? */
3718
3719 /* Clear out tree */
3720 mtree_destroy(&tree);
3721
3722 mt_init_flags(&tree, 0);
3723 /* Test inserting into a NULL hole. */
3724 check_insert(&tree, set[5], ptr); /* insert 1001 -> ptr */
3725 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3726 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3727 check_load(&tree, set[5], ptr); /* See if 1001 -> ptr */
3728 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3729 check_load(&tree, set[7], &tree); /* See if 1003 -> &tree */
3730
3731 /* Clear out the tree */
3732 mtree_destroy(&tree);
3733
3734 mt_init_flags(&tree, 0);
3735 /*
3736 * set[] = {5015, 5014, 5017, 25, 1000,
3737 * 1001, 1002, 1003, 1005, 0,
3738 * 5003, 5002};
3739 */
3740
3741 check_insert(&tree, set[0], ptr); /* 5015 */
3742 check_insert(&tree, set[1], &tree); /* 5014 */
3743 check_insert(&tree, set[2], ptr); /* 5017 */
3744 check_insert(&tree, set[3], &tree); /* 25 */
3745 check_load(&tree, set[0], ptr);
3746 check_load(&tree, set[1], &tree);
3747 check_load(&tree, set[2], ptr);
3748 check_load(&tree, set[3], &tree);
3749 check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3750 check_load(&tree, set[0], ptr);
3751 check_load(&tree, set[1], &tree);
3752 check_load(&tree, set[2], ptr);
3753 check_load(&tree, set[3], &tree); /*25 */
3754 check_load(&tree, set[4], ptr);
3755 check_insert(&tree, set[5], &tree); /* 1001 */
3756 check_load(&tree, set[0], ptr);
3757 check_load(&tree, set[1], &tree);
3758 check_load(&tree, set[2], ptr);
3759 check_load(&tree, set[3], &tree);
3760 check_load(&tree, set[4], ptr);
3761 check_load(&tree, set[5], &tree);
3762 check_insert(&tree, set[6], ptr);
3763 check_load(&tree, set[0], ptr);
3764 check_load(&tree, set[1], &tree);
3765 check_load(&tree, set[2], ptr);
3766 check_load(&tree, set[3], &tree);
3767 check_load(&tree, set[4], ptr);
3768 check_load(&tree, set[5], &tree);
3769 check_load(&tree, set[6], ptr);
3770 check_insert(&tree, set[7], &tree);
3771 check_load(&tree, set[0], ptr);
3772 check_insert(&tree, set[8], ptr);
3773
3774 check_insert(&tree, set[9], &tree);
3775
3776 check_load(&tree, set[0], ptr);
3777 check_load(&tree, set[1], &tree);
3778 check_load(&tree, set[2], ptr);
3779 check_load(&tree, set[3], &tree);
3780 check_load(&tree, set[4], ptr);
3781 check_load(&tree, set[5], &tree);
3782 check_load(&tree, set[6], ptr);
3783 check_load(&tree, set[9], &tree);
3784 mtree_destroy(&tree);
3785
3786 mt_init_flags(&tree, 0);
3787 check_seq(&tree, 16, false);
3788 mtree_destroy(&tree);
3789
3790 mt_init_flags(&tree, 0);
3791 check_seq(&tree, 1000, true);
3792 mtree_destroy(&tree);
3793
3794 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3795 check_rev_seq(&tree, 1000, true);
3796 mtree_destroy(&tree);
3797
3798 check_lower_bound_split(&tree);
3799 check_upper_bound_split(&tree);
3800 check_mid_split(&tree);
3801
3802 mt_init_flags(&tree, 0);
3803 check_next_entry(&tree);
3804 check_find(&tree);
3805 check_find_2(&tree);
3806 mtree_destroy(&tree);
3807
3808 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3809 check_prev_entry(&tree);
3810 mtree_destroy(&tree);
3811
3812 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3813 check_gap_combining(&tree);
3814 mtree_destroy(&tree);
3815
3816 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3817 check_node_overwrite(&tree);
3818 mtree_destroy(&tree);
3819
3820 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3821 next_prev_test(&tree);
3822 mtree_destroy(&tree);
3823
3824 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3825 check_spanning_relatives(&tree);
3826 mtree_destroy(&tree);
3827
3828 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3829 check_rev_find(&tree);
3830 mtree_destroy(&tree);
3831
3832 mt_init_flags(&tree, 0);
3833 check_fuzzer(&tree);
3834 mtree_destroy(&tree);
3835
3836 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3837 check_dup(&tree);
3838 mtree_destroy(&tree);
3839
3840 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3841 check_bnode_min_spanning(&tree);
3842 mtree_destroy(&tree);
3843
3844 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3845 check_empty_area_window(&tree);
3846 mtree_destroy(&tree);
3847
3848 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3849 check_empty_area_fill(&tree);
3850 mtree_destroy(&tree);
3851
3852 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3853 check_state_handling(&tree);
3854 mtree_destroy(&tree);
3855
3856 #if defined(BENCH)
3857 skip:
3858 #endif
3859 rcu_barrier();
3860 pr_info("maple_tree: %u of %u tests passed\n",
3861 atomic_read(&maple_tree_tests_passed),
3862 atomic_read(&maple_tree_tests_run));
3863 if (atomic_read(&maple_tree_tests_run) ==
3864 atomic_read(&maple_tree_tests_passed))
3865 return 0;
3866
3867 return -EINVAL;
3868 }
3869
maple_tree_harvest(void)3870 static void __exit maple_tree_harvest(void)
3871 {
3872
3873 }
3874
3875 module_init(maple_tree_seed);
3876 module_exit(maple_tree_harvest);
3877 MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
3878 MODULE_LICENSE("GPL");
3879