Linux 6.16-rc6
[linux-2.6-block.git] / lib / test_maple_tree.c
CommitLineData
e15e06a8
LH
1// SPDX-License-Identifier: GPL-2.0+
2/*
3 * test_maple_tree.c: Test the maple tree API
120b1162 4 * Copyright (c) 2018-2022 Oracle Corporation
e15e06a8 5 * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
120b1162
LH
6 *
7 * Any tests that only require the interface of the tree.
e15e06a8
LH
8 */
9
10#include <linux/maple_tree.h>
11#include <linux/module.h>
099d7439 12#include <linux/rwsem.h>
e15e06a8
LH
13
14#define MTREE_ALLOC_MAX 0x2000000000000Ul
e15e06a8 15#define CONFIG_MAPLE_SEARCH
120b1162
LH
16#define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
17
a5199577
LH
18#ifndef CONFIG_DEBUG_MAPLE_TREE
19#define mt_dump(mt, fmt) do {} while (0)
20#define mt_validate(mt) do {} while (0)
21#define mt_cache_shrink() do {} while (0)
22#define mas_dump(mas) do {} while (0)
23#define mas_wr_dump(mas) do {} while (0)
24atomic_t maple_tree_tests_run;
25atomic_t maple_tree_tests_passed;
26#undef MT_BUG_ON
27
28#define MT_BUG_ON(__tree, __x) do { \
29 atomic_inc(&maple_tree_tests_run); \
30 if (__x) { \
31 pr_info("BUG at %s:%d (%u)\n", \
32 __func__, __LINE__, __x); \
33 pr_info("Pass: %u Run:%u\n", \
34 atomic_read(&maple_tree_tests_passed), \
35 atomic_read(&maple_tree_tests_run)); \
36 } else { \
37 atomic_inc(&maple_tree_tests_passed); \
38 } \
39} while (0)
40#endif
41
e15e06a8
LH
42/* #define BENCH_SLOT_STORE */
43/* #define BENCH_NODE_STORE */
44/* #define BENCH_AWALK */
45/* #define BENCH_WALK */
24662dec 46/* #define BENCH_LOAD */
e15e06a8
LH
47/* #define BENCH_MT_FOR_EACH */
48/* #define BENCH_FORK */
361c678b 49/* #define BENCH_MAS_FOR_EACH */
8c314f3b 50/* #define BENCH_MAS_PREV */
120b1162
LH
51
52#ifdef __KERNEL__
53#define mt_set_non_kernel(x) do {} while (0)
54#define mt_zero_nr_tallocated(x) do {} while (0)
55#else
56#define cond_resched() do {} while (0)
57#endif
067311d3
LH
58
59#define mas_is_none(x) ((x)->status == ma_none)
60#define mas_is_overflow(x) ((x)->status == ma_overflow)
61#define mas_is_underflow(x) ((x)->status == ma_underflow)
62
eaf9790d
LH
63static int __init mtree_insert_index(struct maple_tree *mt,
64 unsigned long index, gfp_t gfp)
e15e06a8
LH
65{
66 return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
67}
68
eaf9790d 69static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
70{
71 MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
72 MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
73}
74
eaf9790d 75static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
e15e06a8
LH
76 void *ptr)
77{
78 return mtree_insert(mt, index, ptr, GFP_KERNEL);
79}
80
eaf9790d
LH
81static int __init mtree_test_store_range(struct maple_tree *mt,
82 unsigned long start, unsigned long end, void *ptr)
e15e06a8
LH
83{
84 return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
85}
86
eaf9790d 87static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
e15e06a8
LH
88 void *ptr)
89{
90 return mtree_test_store_range(mt, start, start, ptr);
91}
92
eaf9790d
LH
93static int __init mtree_test_insert_range(struct maple_tree *mt,
94 unsigned long start, unsigned long end, void *ptr)
e15e06a8
LH
95{
96 return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
97}
98
eaf9790d 99static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
100{
101 return mtree_load(mt, index);
102}
103
eaf9790d 104static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
105{
106 return mtree_erase(mt, index);
107}
108
120b1162 109#if defined(CONFIG_64BIT)
eaf9790d 110static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
e15e06a8
LH
111 unsigned long start, unsigned long end, unsigned long size,
112 unsigned long expected, int eret, void *ptr)
113{
114
115 unsigned long result = expected + 1;
116 int ret;
117
118 ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
119 GFP_KERNEL);
120 MT_BUG_ON(mt, ret != eret);
121 if (ret)
122 return;
123
124 MT_BUG_ON(mt, result != expected);
125}
126
eaf9790d 127static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
e15e06a8
LH
128 unsigned long start, unsigned long end, unsigned long size,
129 unsigned long expected, int eret, void *ptr)
130{
131
132 unsigned long result = expected + 1;
133 int ret;
134
ba997212 135 ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
e15e06a8
LH
136 GFP_KERNEL);
137 MT_BUG_ON(mt, ret != eret);
138 if (ret)
139 return;
140
141 MT_BUG_ON(mt, result != expected);
142}
120b1162 143#endif
e15e06a8 144
eaf9790d
LH
145static noinline void __init check_load(struct maple_tree *mt,
146 unsigned long index, void *ptr)
e15e06a8
LH
147{
148 void *ret = mtree_test_load(mt, index);
149
150 if (ret != ptr)
151 pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
152 MT_BUG_ON(mt, ret != ptr);
153}
154
eaf9790d 155static noinline void __init check_store_range(struct maple_tree *mt,
e15e06a8
LH
156 unsigned long start, unsigned long end, void *ptr, int expected)
157{
158 int ret = -EINVAL;
159 unsigned long i;
160
161 ret = mtree_test_store_range(mt, start, end, ptr);
162 MT_BUG_ON(mt, ret != expected);
163
164 if (ret)
165 return;
166
167 for (i = start; i <= end; i++)
168 check_load(mt, i, ptr);
169}
170
eaf9790d 171static noinline void __init check_insert_range(struct maple_tree *mt,
e15e06a8
LH
172 unsigned long start, unsigned long end, void *ptr, int expected)
173{
174 int ret = -EINVAL;
175 unsigned long i;
176
177 ret = mtree_test_insert_range(mt, start, end, ptr);
178 MT_BUG_ON(mt, ret != expected);
179
180 if (ret)
181 return;
182
183 for (i = start; i <= end; i++)
184 check_load(mt, i, ptr);
185}
186
eaf9790d
LH
187static noinline void __init check_insert(struct maple_tree *mt,
188 unsigned long index, void *ptr)
e15e06a8
LH
189{
190 int ret = -EINVAL;
191
192 ret = mtree_test_insert(mt, index, ptr);
193 MT_BUG_ON(mt, ret != 0);
194}
195
eaf9790d 196static noinline void __init check_dup_insert(struct maple_tree *mt,
e15e06a8
LH
197 unsigned long index, void *ptr)
198{
199 int ret = -EINVAL;
200
201 ret = mtree_test_insert(mt, index, ptr);
202 MT_BUG_ON(mt, ret != -EEXIST);
203}
204
205
eaf9790d
LH
206static noinline void __init check_index_load(struct maple_tree *mt,
207 unsigned long index)
e15e06a8
LH
208{
209 return check_load(mt, index, xa_mk_value(index & LONG_MAX));
210}
211
eaf9790d 212static inline __init int not_empty(struct maple_node *node)
e15e06a8
LH
213{
214 int i;
215
216 if (node->parent)
217 return 1;
218
219 for (i = 0; i < ARRAY_SIZE(node->slot); i++)
220 if (node->slot[i])
221 return 1;
222
223 return 0;
224}
225
e15e06a8 226
eaf9790d
LH
227static noinline void __init check_rev_seq(struct maple_tree *mt,
228 unsigned long max, bool verbose)
e15e06a8
LH
229{
230 unsigned long i = max, j;
231
232 MT_BUG_ON(mt, !mtree_empty(mt));
233
234 mt_zero_nr_tallocated();
235 while (i) {
236 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
237 for (j = i; j <= max; j++)
238 check_index_load(mt, j);
239
240 check_load(mt, i - 1, NULL);
241 mt_set_in_rcu(mt);
242 MT_BUG_ON(mt, !mt_height(mt));
243 mt_clear_in_rcu(mt);
244 MT_BUG_ON(mt, !mt_height(mt));
245 i--;
246 }
247 check_load(mt, max + 1, NULL);
248
120b1162 249#ifndef __KERNEL__
e15e06a8
LH
250 if (verbose) {
251 rcu_barrier();
89f499f3 252 mt_dump(mt, mt_dump_dec);
e15e06a8
LH
253 pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
254 __func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
255 mt_nr_tallocated());
256 }
120b1162 257#endif
e15e06a8
LH
258}
259
eaf9790d 260static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
e15e06a8
LH
261 bool verbose)
262{
263 unsigned long i, j;
264
265 MT_BUG_ON(mt, !mtree_empty(mt));
266
267 mt_zero_nr_tallocated();
268 for (i = 0; i <= max; i++) {
269 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
270 for (j = 0; j <= i; j++)
271 check_index_load(mt, j);
272
273 if (i)
274 MT_BUG_ON(mt, !mt_height(mt));
275 check_load(mt, i + 1, NULL);
276 }
120b1162
LH
277
278#ifndef __KERNEL__
e15e06a8
LH
279 if (verbose) {
280 rcu_barrier();
89f499f3 281 mt_dump(mt, mt_dump_dec);
e15e06a8
LH
282 pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
283 max, mt_get_alloc_size()/1024, mt_nr_allocated(),
284 mt_nr_tallocated());
285 }
120b1162 286#endif
e15e06a8
LH
287}
288
eaf9790d 289static noinline void __init check_lb_not_empty(struct maple_tree *mt)
e15e06a8
LH
290{
291 unsigned long i, j;
292 unsigned long huge = 4000UL * 1000 * 1000;
293
294
295 i = huge;
296 while (i > 4096) {
297 check_insert(mt, i, (void *) i);
298 for (j = huge; j >= i; j /= 2) {
299 check_load(mt, j-1, NULL);
300 check_load(mt, j, (void *) j);
301 check_load(mt, j+1, NULL);
302 }
303 i /= 2;
304 }
305 mtree_destroy(mt);
306}
307
eaf9790d 308static noinline void __init check_lower_bound_split(struct maple_tree *mt)
e15e06a8
LH
309{
310 MT_BUG_ON(mt, !mtree_empty(mt));
311 check_lb_not_empty(mt);
312}
313
eaf9790d 314static noinline void __init check_upper_bound_split(struct maple_tree *mt)
e15e06a8
LH
315{
316 unsigned long i, j;
120b1162 317 unsigned long huge;
e15e06a8
LH
318
319 MT_BUG_ON(mt, !mtree_empty(mt));
320
120b1162
LH
321 if (MAPLE_32BIT)
322 huge = 2147483647UL;
323 else
324 huge = 4000UL * 1000 * 1000;
325
e15e06a8
LH
326 i = 4096;
327 while (i < huge) {
328 check_insert(mt, i, (void *) i);
329 for (j = i; j >= huge; j *= 2) {
330 check_load(mt, j-1, NULL);
331 check_load(mt, j, (void *) j);
332 check_load(mt, j+1, NULL);
333 }
334 i *= 2;
335 }
336 mtree_destroy(mt);
337}
338
eaf9790d 339static noinline void __init check_mid_split(struct maple_tree *mt)
e15e06a8
LH
340{
341 unsigned long huge = 8000UL * 1000 * 1000;
342
343 check_insert(mt, huge, (void *) huge);
344 check_insert(mt, 0, xa_mk_value(0));
345 check_lb_not_empty(mt);
346}
347
eaf9790d 348static noinline void __init check_rev_find(struct maple_tree *mt)
e15e06a8
LH
349{
350 int i, nr_entries = 200;
351 void *val;
352 MA_STATE(mas, mt, 0, 0);
353
354 for (i = 0; i <= nr_entries; i++)
355 mtree_store_range(mt, i*10, i*10 + 5,
356 xa_mk_value(i), GFP_KERNEL);
357
120b1162 358 rcu_read_lock();
e15e06a8
LH
359 mas_set(&mas, 1000);
360 val = mas_find_rev(&mas, 1000);
361 MT_BUG_ON(mt, val != xa_mk_value(100));
362 val = mas_find_rev(&mas, 1000);
363 MT_BUG_ON(mt, val != NULL);
364
365 mas_set(&mas, 999);
366 val = mas_find_rev(&mas, 997);
367 MT_BUG_ON(mt, val != NULL);
368
369 mas_set(&mas, 1000);
370 val = mas_find_rev(&mas, 900);
371 MT_BUG_ON(mt, val != xa_mk_value(100));
372 val = mas_find_rev(&mas, 900);
373 MT_BUG_ON(mt, val != xa_mk_value(99));
374
375 mas_set(&mas, 20);
376 val = mas_find_rev(&mas, 0);
377 MT_BUG_ON(mt, val != xa_mk_value(2));
378 val = mas_find_rev(&mas, 0);
379 MT_BUG_ON(mt, val != xa_mk_value(1));
380 val = mas_find_rev(&mas, 0);
381 MT_BUG_ON(mt, val != xa_mk_value(0));
382 val = mas_find_rev(&mas, 0);
383 MT_BUG_ON(mt, val != NULL);
120b1162 384 rcu_read_unlock();
e15e06a8
LH
385}
386
eaf9790d 387static noinline void __init check_find(struct maple_tree *mt)
e15e06a8
LH
388{
389 unsigned long val = 0;
120b1162 390 unsigned long count;
e15e06a8 391 unsigned long max;
120b1162 392 unsigned long top;
e15e06a8
LH
393 unsigned long last = 0, index = 0;
394 void *entry, *entry2;
395
396 MA_STATE(mas, mt, 0, 0);
397
398 /* Insert 0. */
399 MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
400
120b1162
LH
401#if defined(CONFIG_64BIT)
402 top = 4398046511104UL;
403#else
404 top = ULONG_MAX;
405#endif
406
407 if (MAPLE_32BIT) {
408 count = 15;
409 } else {
410 count = 20;
411 }
412
e15e06a8
LH
413 for (int i = 0; i <= count; i++) {
414 if (val != 64)
415 MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
416 else
417 MT_BUG_ON(mt, mtree_insert(mt, val,
418 XA_ZERO_ENTRY, GFP_KERNEL));
419
420 val <<= 2;
421 }
422
423 val = 0;
424 mas_set(&mas, val);
425 mas_lock(&mas);
426 while ((entry = mas_find(&mas, 268435456)) != NULL) {
427 if (val != 64)
428 MT_BUG_ON(mt, xa_mk_value(val) != entry);
429 else
430 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
431
432 val <<= 2;
433 /* For zero check. */
434 if (!val)
435 val = 1;
436 }
437 mas_unlock(&mas);
438
439 val = 0;
440 mas_set(&mas, val);
441 mas_lock(&mas);
442 mas_for_each(&mas, entry, ULONG_MAX) {
443 if (val != 64)
444 MT_BUG_ON(mt, xa_mk_value(val) != entry);
445 else
446 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
447 val <<= 2;
448 /* For zero check. */
449 if (!val)
450 val = 1;
451 }
452 mas_unlock(&mas);
453
454 /* Test mas_pause */
455 val = 0;
456 mas_set(&mas, val);
457 mas_lock(&mas);
458 mas_for_each(&mas, entry, ULONG_MAX) {
459 if (val != 64)
460 MT_BUG_ON(mt, xa_mk_value(val) != entry);
461 else
462 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
463 val <<= 2;
464 /* For zero check. */
465 if (!val)
466 val = 1;
467
468 mas_pause(&mas);
469 mas_unlock(&mas);
470 mas_lock(&mas);
471 }
472 mas_unlock(&mas);
473
474 val = 0;
475 max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
476 mt_for_each(mt, entry, index, max) {
477 MT_BUG_ON(mt, xa_mk_value(val) != entry);
478 val <<= 2;
479 if (val == 64) /* Skip zero entry. */
480 val <<= 2;
481 /* For zero check. */
482 if (!val)
483 val = 1;
484 }
485
486 val = 0;
487 max = 0;
488 index = 0;
489 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
490 mt_for_each(mt, entry, index, ULONG_MAX) {
120b1162
LH
491 if (val == top)
492 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
e15e06a8
LH
493 else
494 MT_BUG_ON(mt, xa_mk_value(val) != entry);
120b1162
LH
495
496 /* Workaround for 32bit */
497 if ((val << 2) < val)
498 val = ULONG_MAX;
499 else
500 val <<= 2;
501
e15e06a8
LH
502 if (val == 64) /* Skip zero entry. */
503 val <<= 2;
504 /* For zero check. */
505 if (!val)
506 val = 1;
507 max++;
508 MT_BUG_ON(mt, max > 25);
509 }
510 mtree_erase_index(mt, ULONG_MAX);
511
512 mas_reset(&mas);
513 index = 17;
514 entry = mt_find(mt, &index, 512);
515 MT_BUG_ON(mt, xa_mk_value(256) != entry);
516
517 mas_reset(&mas);
518 index = 17;
519 entry = mt_find(mt, &index, 20);
520 MT_BUG_ON(mt, entry != NULL);
521
522
523 /* Range check.. */
524 /* Insert ULONG_MAX */
525 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
526
527 val = 0;
528 mas_set(&mas, 0);
529 mas_lock(&mas);
530 mas_for_each(&mas, entry, ULONG_MAX) {
531 if (val == 64)
532 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
120b1162
LH
533 else if (val == top)
534 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
e15e06a8
LH
535 else
536 MT_BUG_ON(mt, xa_mk_value(val) != entry);
120b1162
LH
537
538 /* Workaround for 32bit */
539 if ((val << 2) < val)
540 val = ULONG_MAX;
541 else
542 val <<= 2;
e15e06a8
LH
543
544 /* For zero check. */
545 if (!val)
546 val = 1;
547 mas_pause(&mas);
548 mas_unlock(&mas);
549 mas_lock(&mas);
550 }
551 mas_unlock(&mas);
552
553 mas_set(&mas, 1048576);
554 mas_lock(&mas);
555 entry = mas_find(&mas, 1048576);
556 mas_unlock(&mas);
557 MT_BUG_ON(mas.tree, entry == NULL);
558
559 /*
560 * Find last value.
561 * 1. get the expected value, leveraging the existence of an end entry
562 * 2. delete end entry
563 * 3. find the last value but searching for ULONG_MAX and then using
564 * prev
565 */
566 /* First, get the expected result. */
567 mas_lock(&mas);
568 mas_reset(&mas);
569 mas.index = ULONG_MAX; /* start at max.. */
570 entry = mas_find(&mas, ULONG_MAX);
571 entry = mas_prev(&mas, 0);
572 index = mas.index;
573 last = mas.last;
574
575 /* Erase the last entry. */
576 mas_reset(&mas);
577 mas.index = ULONG_MAX;
578 mas.last = ULONG_MAX;
579 mas_erase(&mas);
580
581 /* Get the previous value from MAS_START */
582 mas_reset(&mas);
583 entry2 = mas_prev(&mas, 0);
584
585 /* Check results. */
586 MT_BUG_ON(mt, entry != entry2);
587 MT_BUG_ON(mt, index != mas.index);
588 MT_BUG_ON(mt, last != mas.last);
589
590
067311d3 591 mas.status = ma_none;
e15e06a8
LH
592 mas.index = ULONG_MAX;
593 mas.last = ULONG_MAX;
594 entry2 = mas_prev(&mas, 0);
595 MT_BUG_ON(mt, entry != entry2);
596
597 mas_set(&mas, 0);
598 MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
599
600 mas_unlock(&mas);
601 mtree_destroy(mt);
602}
603
eaf9790d 604static noinline void __init check_find_2(struct maple_tree *mt)
e15e06a8
LH
605{
606 unsigned long i, j;
607 void *entry;
608
609 MA_STATE(mas, mt, 0, 0);
610 rcu_read_lock();
611 mas_for_each(&mas, entry, ULONG_MAX)
612 MT_BUG_ON(mt, true);
613 rcu_read_unlock();
614
615 for (i = 0; i < 256; i++) {
616 mtree_insert_index(mt, i, GFP_KERNEL);
617 j = 0;
618 mas_set(&mas, 0);
619 rcu_read_lock();
620 mas_for_each(&mas, entry, ULONG_MAX) {
621 MT_BUG_ON(mt, entry != xa_mk_value(j));
622 j++;
623 }
624 rcu_read_unlock();
625 MT_BUG_ON(mt, j != i + 1);
626 }
627
628 for (i = 0; i < 256; i++) {
629 mtree_erase_index(mt, i);
630 j = i + 1;
631 mas_set(&mas, 0);
632 rcu_read_lock();
633 mas_for_each(&mas, entry, ULONG_MAX) {
634 if (xa_is_zero(entry))
635 continue;
636
637 MT_BUG_ON(mt, entry != xa_mk_value(j));
638 j++;
639 }
640 rcu_read_unlock();
641 MT_BUG_ON(mt, j != 256);
642 }
643
644 /*MT_BUG_ON(mt, !mtree_empty(mt)); */
645}
646
e15e06a8 647
120b1162 648#if defined(CONFIG_64BIT)
eaf9790d 649static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
e15e06a8 650{
e15e06a8 651 /*
120b1162
LH
652 * Generated by:
653 * cat /proc/self/maps | awk '{print $1}'|
654 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
e15e06a8 655 */
e15e06a8 656
eaf9790d 657 static const unsigned long range[] = {
120b1162
LH
658 /* Inclusive , Exclusive. */
659 0x565234af2000, 0x565234af4000,
660 0x565234af4000, 0x565234af9000,
661 0x565234af9000, 0x565234afb000,
662 0x565234afc000, 0x565234afd000,
663 0x565234afd000, 0x565234afe000,
664 0x565235def000, 0x565235e10000,
665 0x7f36d4bfd000, 0x7f36d4ee2000,
666 0x7f36d4ee2000, 0x7f36d4f04000,
667 0x7f36d4f04000, 0x7f36d504c000,
668 0x7f36d504c000, 0x7f36d5098000,
669 0x7f36d5098000, 0x7f36d5099000,
670 0x7f36d5099000, 0x7f36d509d000,
671 0x7f36d509d000, 0x7f36d509f000,
672 0x7f36d509f000, 0x7f36d50a5000,
673 0x7f36d50b9000, 0x7f36d50db000,
674 0x7f36d50db000, 0x7f36d50dc000,
675 0x7f36d50dc000, 0x7f36d50fa000,
676 0x7f36d50fa000, 0x7f36d5102000,
677 0x7f36d5102000, 0x7f36d5103000,
678 0x7f36d5103000, 0x7f36d5104000,
679 0x7f36d5104000, 0x7f36d5105000,
680 0x7fff5876b000, 0x7fff5878d000,
681 0x7fff5878e000, 0x7fff58791000,
682 0x7fff58791000, 0x7fff58793000,
683 };
e15e06a8 684
eaf9790d 685 static const unsigned long holes[] = {
120b1162
LH
686 /*
687 * Note: start of hole is INCLUSIVE
688 * end of hole is EXCLUSIVE
689 * (opposite of the above table.)
690 * Start of hole, end of hole, size of hole (+1)
691 */
692 0x565234afb000, 0x565234afc000, 0x1000,
693 0x565234afe000, 0x565235def000, 0x12F1000,
694 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
695 };
e15e06a8 696
120b1162
LH
697 /*
698 * req_range consists of 4 values.
699 * 1. min index
700 * 2. max index
701 * 3. size
702 * 4. number that should be returned.
703 * 5. return value
704 */
eaf9790d 705 static const unsigned long req_range[] = {
120b1162
LH
706 0x565234af9000, /* Min */
707 0x7fff58791000, /* Max */
708 0x1000, /* Size */
709 0x7fff5878d << 12, /* First rev hole of size 0x1000 */
710 0, /* Return value success. */
e15e06a8 711
120b1162 712 0x0, /* Min */
ba997212 713 0x565234AF0 << 12, /* Max */
120b1162
LH
714 0x3000, /* Size */
715 0x565234AEE << 12, /* max - 3. */
716 0, /* Return value success. */
e15e06a8 717
120b1162
LH
718 0x0, /* Min */
719 -1, /* Max */
720 0x1000, /* Size */
721 562949953421311 << 12,/* First rev hole of size 0x1000 */
722 0, /* Return value success. */
e15e06a8 723
120b1162 724 0x0, /* Min */
ba997212 725 0x7F36D5109 << 12, /* Max */
120b1162
LH
726 0x4000, /* Size */
727 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
728 0, /* Return value success. */
e15e06a8 729
120b1162
LH
730 /* Ascend test. */
731 0x0,
ba997212 732 34148798628 << 12,
120b1162
LH
733 19 << 12,
734 34148797418 << 12,
735 0x0,
e15e06a8 736
120b1162
LH
737 /* Too big test. */
738 0x0,
739 18446744073709551615UL,
740 562915594369134UL << 12,
741 0x0,
742 -EBUSY,
e15e06a8 743
ba997212
LH
744 /* Single space test. */
745 34148798725 << 12,
746 34148798725 << 12,
747 1 << 12,
748 34148798725 << 12,
749 0,
120b1162 750 };
e15e06a8 751
120b1162
LH
752 int i, range_count = ARRAY_SIZE(range);
753 int req_range_count = ARRAY_SIZE(req_range);
754 unsigned long min = 0;
e15e06a8 755
120b1162 756 MA_STATE(mas, mt, 0, 0);
e15e06a8 757
120b1162
LH
758 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
759 GFP_KERNEL);
760#define DEBUG_REV_RANGE 0
761 for (i = 0; i < range_count; i += 2) {
762 /* Inclusive, Inclusive (with the -1) */
e15e06a8 763
120b1162
LH
764#if DEBUG_REV_RANGE
765 pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
766 (range[i + 1] >> 12) - 1);
767#endif
768 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
769 xa_mk_value(range[i] >> 12), 0);
770 mt_validate(mt);
e15e06a8
LH
771 }
772
e15e06a8 773
120b1162
LH
774 mas_lock(&mas);
775 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
776#if DEBUG_REV_RANGE
777 pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
778 min, holes[i+1]>>12, holes[i+2]>>12,
779 holes[i] >> 12);
780#endif
781 MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
782 holes[i+1] >> 12,
783 holes[i+2] >> 12));
784#if DEBUG_REV_RANGE
785 pr_debug("Found %lu %lu\n", mas.index, mas.last);
786 pr_debug("gap %lu %lu\n", (holes[i] >> 12),
787 (holes[i+1] >> 12));
788#endif
789 MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
790 MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
791 min = holes[i+1] >> 12;
792 mas_reset(&mas);
e15e06a8 793 }
e15e06a8 794
120b1162
LH
795 mas_unlock(&mas);
796 for (i = 0; i < req_range_count; i += 5) {
797#if DEBUG_REV_RANGE
ba997212
LH
798 pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
799 i, req_range[i] >> 12,
800 (req_range[i + 1] >> 12),
120b1162
LH
801 req_range[i+2] >> 12,
802 req_range[i+3] >> 12);
803#endif
804 check_mtree_alloc_rrange(mt,
805 req_range[i] >> 12, /* start */
806 req_range[i+1] >> 12, /* end */
807 req_range[i+2] >> 12, /* size */
808 req_range[i+3] >> 12, /* expected address */
809 req_range[i+4], /* expected return */
810 xa_mk_value(req_range[i] >> 12)); /* pointer */
811 mt_validate(mt);
e15e06a8
LH
812 }
813
120b1162
LH
814 mt_set_non_kernel(1);
815 mtree_erase(mt, 34148798727); /* create a deleted range. */
ba997212 816 mtree_erase(mt, 34148798725);
120b1162
LH
817 check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
818 34148798725, 0, mt);
e15e06a8 819
120b1162
LH
820 mtree_destroy(mt);
821}
e15e06a8 822
eaf9790d 823static noinline void __init check_alloc_range(struct maple_tree *mt)
e15e06a8 824{
120b1162
LH
825 /*
826 * Generated by:
827 * cat /proc/self/maps|awk '{print $1}'|
828 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
829 */
e15e06a8 830
eaf9790d 831 static const unsigned long range[] = {
120b1162
LH
832 /* Inclusive , Exclusive. */
833 0x565234af2000, 0x565234af4000,
834 0x565234af4000, 0x565234af9000,
835 0x565234af9000, 0x565234afb000,
836 0x565234afc000, 0x565234afd000,
837 0x565234afd000, 0x565234afe000,
838 0x565235def000, 0x565235e10000,
839 0x7f36d4bfd000, 0x7f36d4ee2000,
840 0x7f36d4ee2000, 0x7f36d4f04000,
841 0x7f36d4f04000, 0x7f36d504c000,
842 0x7f36d504c000, 0x7f36d5098000,
843 0x7f36d5098000, 0x7f36d5099000,
844 0x7f36d5099000, 0x7f36d509d000,
845 0x7f36d509d000, 0x7f36d509f000,
846 0x7f36d509f000, 0x7f36d50a5000,
847 0x7f36d50b9000, 0x7f36d50db000,
848 0x7f36d50db000, 0x7f36d50dc000,
849 0x7f36d50dc000, 0x7f36d50fa000,
850 0x7f36d50fa000, 0x7f36d5102000,
851 0x7f36d5102000, 0x7f36d5103000,
852 0x7f36d5103000, 0x7f36d5104000,
853 0x7f36d5104000, 0x7f36d5105000,
854 0x7fff5876b000, 0x7fff5878d000,
855 0x7fff5878e000, 0x7fff58791000,
856 0x7fff58791000, 0x7fff58793000,
857 };
eaf9790d 858 static const unsigned long holes[] = {
120b1162
LH
859 /* Start of hole, end of hole, size of hole (+1) */
860 0x565234afb000, 0x565234afc000, 0x1000,
861 0x565234afe000, 0x565235def000, 0x12F1000,
862 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
863 };
e15e06a8 864
120b1162
LH
865 /*
866 * req_range consists of 4 values.
867 * 1. min index
868 * 2. max index
869 * 3. size
870 * 4. number that should be returned.
871 * 5. return value
872 */
eaf9790d 873 static const unsigned long req_range[] = {
120b1162
LH
874 0x565234af9000, /* Min */
875 0x7fff58791000, /* Max */
876 0x1000, /* Size */
877 0x565234afb000, /* First hole in our data of size 1000. */
878 0, /* Return value success. */
e15e06a8 879
120b1162
LH
880 0x0, /* Min */
881 0x7fff58791000, /* Max */
882 0x1F00, /* Size */
883 0x0, /* First hole in our data of size 2000. */
884 0, /* Return value success. */
e15e06a8 885
120b1162
LH
886 /* Test ascend. */
887 34148797436 << 12, /* Min */
888 0x7fff587AF000, /* Max */
889 0x3000, /* Size */
890 34148798629 << 12, /* Expected location */
891 0, /* Return value success. */
e15e06a8 892
120b1162
LH
893 /* Test failing. */
894 34148798623 << 12, /* Min */
895 34148798683 << 12, /* Max */
896 0x15000, /* Size */
897 0, /* Expected location */
898 -EBUSY, /* Return value failed. */
e15e06a8 899
120b1162
LH
900 /* Test filling entire gap. */
901 34148798623 << 12, /* Min */
902 0x7fff587AF000, /* Max */
903 0x10000, /* Size */
904 34148798632 << 12, /* Expected location */
905 0, /* Return value success. */
e15e06a8 906
120b1162
LH
907 /* Test walking off the end of root. */
908 0, /* Min */
909 -1, /* Max */
910 -1, /* Size */
911 0, /* Expected location */
912 -EBUSY, /* Return value failure. */
e15e06a8 913
120b1162
LH
914 /* Test looking for too large a hole across entire range. */
915 0, /* Min */
916 -1, /* Max */
917 4503599618982063UL << 12, /* Size */
918 34359052178 << 12, /* Expected location */
919 -EBUSY, /* Return failure. */
ba997212
LH
920
921 /* Test a single entry */
922 34148798648 << 12, /* Min */
923 34148798648 << 12, /* Max */
924 4096, /* Size of 1 */
925 34148798648 << 12, /* Location is the same as min/max */
926 0, /* Success */
120b1162
LH
927 };
928 int i, range_count = ARRAY_SIZE(range);
929 int req_range_count = ARRAY_SIZE(req_range);
930 unsigned long min = 0x565234af2000;
e15e06a8
LH
931 MA_STATE(mas, mt, 0, 0);
932
120b1162
LH
933 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
934 GFP_KERNEL);
935 for (i = 0; i < range_count; i += 2) {
936#define DEBUG_ALLOC_RANGE 0
937#if DEBUG_ALLOC_RANGE
938 pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
939 (range[i + 1] >> 12) - 1);
89f499f3 940 mt_dump(mt, mt_dump_hex);
e15e06a8 941#endif
120b1162
LH
942 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
943 xa_mk_value(range[i] >> 12), 0);
944 mt_validate(mt);
945 }
e15e06a8 946
e15e06a8 947
e15e06a8 948
120b1162
LH
949 mas_lock(&mas);
950 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
e15e06a8 951
120b1162
LH
952#if DEBUG_ALLOC_RANGE
953 pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
954 holes[i+1] >> 12, holes[i+2] >> 12,
955 min, holes[i+1]);
956#endif
957 MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
958 holes[i+1] >> 12,
959 holes[i+2] >> 12));
960 MT_BUG_ON(mt, mas.index != holes[i] >> 12);
961 min = holes[i+1];
e15e06a8 962 mas_reset(&mas);
120b1162
LH
963 }
964 mas_unlock(&mas);
965 for (i = 0; i < req_range_count; i += 5) {
966#if DEBUG_ALLOC_RANGE
967 pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
968 i/5, req_range[i] >> 12, req_range[i + 1] >> 12,
969 req_range[i + 2] >> 12, req_range[i + 3] >> 12,
970 req_range[i], req_range[i+1]);
e15e06a8 971#endif
120b1162
LH
972 check_mtree_alloc_range(mt,
973 req_range[i] >> 12, /* start */
974 req_range[i+1] >> 12, /* end */
975 req_range[i+2] >> 12, /* size */
976 req_range[i+3] >> 12, /* expected address */
977 req_range[i+4], /* expected return */
978 xa_mk_value(req_range[i] >> 12)); /* pointer */
e15e06a8 979 mt_validate(mt);
120b1162 980#if DEBUG_ALLOC_RANGE
89f499f3 981 mt_dump(mt, mt_dump_hex);
e15e06a8 982#endif
e15e06a8 983 }
e15e06a8 984
120b1162
LH
985 mtree_destroy(mt);
986}
987#endif
e15e06a8 988
eaf9790d 989static noinline void __init check_ranges(struct maple_tree *mt)
e15e06a8 990{
120b1162 991 int i, val, val2;
eaf9790d 992 static const unsigned long r[] = {
120b1162
LH
993 10, 15,
994 20, 25,
995 17, 22, /* Overlaps previous range. */
996 9, 1000, /* Huge. */
997 100, 200,
998 45, 168,
999 118, 128,
1000 };
e15e06a8 1001
120b1162
LH
1002 MT_BUG_ON(mt, !mtree_empty(mt));
1003 check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
1004 check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
1005 check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
1006 MT_BUG_ON(mt, !mt_height(mt));
1007 /* Store */
1008 check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1009 check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1010 check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1011 MT_BUG_ON(mt, !mt_height(mt));
1012 mtree_destroy(mt);
1013 MT_BUG_ON(mt, mt_height(mt));
e15e06a8 1014
120b1162
LH
1015 check_seq(mt, 50, false);
1016 mt_set_non_kernel(4);
1017 check_store_range(mt, 5, 47, xa_mk_value(47), 0);
1018 MT_BUG_ON(mt, !mt_height(mt));
1019 mtree_destroy(mt);
e15e06a8 1020
120b1162
LH
1021 /* Create tree of 1-100 */
1022 check_seq(mt, 100, false);
1023 /* Store 45-168 */
1024 mt_set_non_kernel(10);
1025 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1026 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1027 mtree_destroy(mt);
1028
120b1162
LH
1029 /* Create tree of 1-200 */
1030 check_seq(mt, 200, false);
1031 /* Store 45-168 */
1032 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1033 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1034 mtree_destroy(mt);
1035
120b1162
LH
1036 check_seq(mt, 30, false);
1037 check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1038 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1039 mtree_destroy(mt);
1040
120b1162
LH
1041 /* Overwrite across multiple levels. */
1042 /* Create tree of 1-400 */
1043 check_seq(mt, 400, false);
1044 mt_set_non_kernel(50);
1045 /* Store 118-128 */
1046 check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1047 mt_set_non_kernel(50);
1048 mtree_test_erase(mt, 140);
1049 mtree_test_erase(mt, 141);
1050 mtree_test_erase(mt, 142);
1051 mtree_test_erase(mt, 143);
1052 mtree_test_erase(mt, 130);
1053 mtree_test_erase(mt, 131);
1054 mtree_test_erase(mt, 132);
1055 mtree_test_erase(mt, 133);
1056 mtree_test_erase(mt, 134);
1057 mtree_test_erase(mt, 135);
1058 check_load(mt, r[12], xa_mk_value(r[12]));
1059 check_load(mt, r[13], xa_mk_value(r[12]));
1060 check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1061 check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1062 check_load(mt, 135, NULL);
1063 check_load(mt, 140, NULL);
e15e06a8 1064 mt_set_non_kernel(0);
120b1162 1065 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1066 mtree_destroy(mt);
1067
e15e06a8 1068
e15e06a8 1069
120b1162
LH
1070 /* Overwrite multiple levels at the end of the tree (slot 7) */
1071 mt_set_non_kernel(50);
1072 check_seq(mt, 400, false);
1073 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1074 check_store_range(mt, 347, 352, xa_mk_value(347), 0);
e15e06a8 1075
120b1162
LH
1076 check_load(mt, 346, xa_mk_value(346));
1077 for (i = 347; i <= 352; i++)
1078 check_load(mt, i, xa_mk_value(347));
1079 for (i = 353; i <= 361; i++)
1080 check_load(mt, i, xa_mk_value(353));
1081 check_load(mt, 362, xa_mk_value(362));
1082 mt_set_non_kernel(0);
1083 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1084 mtree_destroy(mt);
1085
120b1162
LH
1086 mt_set_non_kernel(50);
1087 check_seq(mt, 400, false);
1088 check_store_range(mt, 352, 364, NULL, 0);
1089 check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1090 check_load(mt, 350, xa_mk_value(350));
1091 check_load(mt, 351, xa_mk_value(352));
1092 for (i = 352; i <= 363; i++)
1093 check_load(mt, i, xa_mk_value(352));
1094 check_load(mt, 364, NULL);
1095 check_load(mt, 365, xa_mk_value(365));
1096 mt_set_non_kernel(0);
1097 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1098 mtree_destroy(mt);
1099
120b1162
LH
1100 mt_set_non_kernel(5);
1101 check_seq(mt, 400, false);
1102 check_store_range(mt, 352, 364, NULL, 0);
1103 check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1104 check_load(mt, 350, xa_mk_value(350));
1105 check_load(mt, 351, xa_mk_value(352));
1106 for (i = 352; i <= 364; i++)
1107 check_load(mt, i, xa_mk_value(352));
1108 check_load(mt, 365, xa_mk_value(365));
1109 mt_set_non_kernel(0);
1110 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1111 mtree_destroy(mt);
1112
e15e06a8 1113
120b1162
LH
1114 mt_set_non_kernel(50);
1115 check_seq(mt, 400, false);
1116 check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1117 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1118 mt_set_non_kernel(0);
1119 mt_validate(mt);
1120 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8 1121 mtree_destroy(mt);
120b1162
LH
1122 /*
1123 * Interesting cases:
1124 * 1. Overwrite the end of a node and end in the first entry of the next
1125 * node.
1126 * 2. Split a single range
1127 * 3. Overwrite the start of a range
1128 * 4. Overwrite the end of a range
1129 * 5. Overwrite the entire range
1130 * 6. Overwrite a range that causes multiple parent nodes to be
1131 * combined
1132 * 7. Overwrite a range that causes multiple parent nodes and part of
1133 * root to be combined
1134 * 8. Overwrite the whole tree
1135 * 9. Try to overwrite the zero entry of an alloc tree.
1136 * 10. Write a range larger than a nodes current pivot
1137 */
e15e06a8 1138
120b1162
LH
1139 mt_set_non_kernel(50);
1140 for (i = 0; i <= 500; i++) {
1141 val = i*5;
1142 val2 = (i+1)*5;
1143 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1144 }
1145 check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1146 check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1147 check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1148 check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1149 check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
e15e06a8 1150 mtree_destroy(mt);
120b1162 1151 mt_set_non_kernel(0);
e15e06a8 1152
120b1162
LH
1153 mt_set_non_kernel(50);
1154 for (i = 0; i <= 500; i++) {
1155 val = i*5;
1156 val2 = (i+1)*5;
1157 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1158 }
1159 check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1160 check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1161 check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1162 check_store_range(mt, 2460, 2470, NULL, 0);
1163 check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1164 check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
e15e06a8 1165 mt_set_non_kernel(0);
120b1162 1166 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1167 mtree_destroy(mt);
1168
d6e8d0dc
PZ
1169 /* Check in-place modifications */
1170 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1171 /* Append to the start of last range */
1172 mt_set_non_kernel(50);
1173 for (i = 0; i <= 500; i++) {
1174 val = i * 5 + 1;
1175 val2 = val + 4;
1176 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1177 }
1178
1179 /* Append to the last range without touching any boundaries */
1180 for (i = 0; i < 10; i++) {
1181 val = val2 + 5;
1182 val2 = val + 4;
1183 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1184 }
1185
1186 /* Append to the end of last range */
1187 val = val2;
1188 for (i = 0; i < 10; i++) {
1189 val += 5;
1190 MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1191 xa_mk_value(val)) != 0);
1192 }
1193
1194 /* Overwriting the range and over a part of the next range */
1195 for (i = 10; i < 30; i += 2) {
1196 val = i * 5 + 1;
1197 val2 = val + 5;
1198 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1199 }
1200
1201 /* Overwriting a part of the range and over the next range */
1202 for (i = 50; i < 70; i += 2) {
1203 val2 = i * 5;
1204 val = val2 - 5;
1205 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1206 }
1207
1208 /*
1209 * Expand the range, only partially overwriting the previous and
1210 * next ranges
1211 */
1212 for (i = 100; i < 130; i += 3) {
1213 val = i * 5 - 5;
1214 val2 = i * 5 + 1;
1215 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1216 }
1217
1218 /*
1219 * Expand the range, only partially overwriting the previous and
1220 * next ranges, in RCU mode
1221 */
1222 mt_set_in_rcu(mt);
1223 for (i = 150; i < 180; i += 3) {
1224 val = i * 5 - 5;
1225 val2 = i * 5 + 1;
1226 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1227 }
1228
1229 MT_BUG_ON(mt, !mt_height(mt));
1230 mt_validate(mt);
1231 mt_set_non_kernel(0);
1232 mtree_destroy(mt);
1233
120b1162 1234 /* Test rebalance gaps */
e15e06a8 1235 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1236 mt_set_non_kernel(50);
1237 for (i = 0; i <= 50; i++) {
1238 val = i*10;
1239 val2 = (i+1)*10;
1240 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1241 }
1242 check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1243 check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1244 check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1245 check_store_range(mt, 240, 249, NULL, 0);
1246 mtree_erase(mt, 200);
1247 mtree_erase(mt, 210);
1248 mtree_erase(mt, 220);
1249 mtree_erase(mt, 230);
e15e06a8 1250 mt_set_non_kernel(0);
120b1162 1251 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1252 mtree_destroy(mt);
1253
e15e06a8 1254 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1255 for (i = 0; i <= 500; i++) {
1256 val = i*10;
1257 val2 = (i+1)*10;
1258 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1259 }
1260 check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1261 mt_validate(mt);
1262 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1263 mtree_destroy(mt);
1264
1265 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1266 for (i = 0; i <= 500; i++) {
1267 val = i*10;
1268 val2 = (i+1)*10;
1269 check_store_range(mt, val, val2, xa_mk_value(val), 0);
e15e06a8 1270 }
120b1162
LH
1271 check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1272 check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1273 check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1274 check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1275 check_store_range(mt, 4842, 4849, NULL, 0);
1276 mt_validate(mt);
1277 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1278 mtree_destroy(mt);
1279
1280 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1281 for (i = 0; i <= 1300; i++) {
1282 val = i*10;
1283 val2 = (i+1)*10;
1284 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1285 MT_BUG_ON(mt, mt_height(mt) >= 4);
1286 }
1287 /* Cause a 3 child split all the way up the tree. */
1288 for (i = 5; i < 215; i += 10)
1289 check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1290 for (i = 5; i < 65; i += 10)
1291 check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
e15e06a8 1292
120b1162
LH
1293 MT_BUG_ON(mt, mt_height(mt) >= 4);
1294 for (i = 5; i < 45; i += 10)
1295 check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1296 if (!MAPLE_32BIT)
1297 MT_BUG_ON(mt, mt_height(mt) < 4);
e15e06a8
LH
1298 mtree_destroy(mt);
1299
120b1162 1300
e15e06a8 1301 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1302 for (i = 0; i <= 1200; i++) {
1303 val = i*10;
1304 val2 = (i+1)*10;
1305 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1306 MT_BUG_ON(mt, mt_height(mt) >= 4);
e15e06a8 1307 }
120b1162
LH
1308 /* Fill parents and leaves before split. */
1309 for (i = 5; i < 455; i += 10)
1310 check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
e15e06a8 1311
120b1162
LH
1312 for (i = 1; i < 16; i++)
1313 check_store_range(mt, 8185 + i, 8185 + i + 1,
1314 xa_mk_value(8185+i), 0);
1315 MT_BUG_ON(mt, mt_height(mt) >= 4);
1316 /* triple split across multiple levels. */
1317 check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1318 if (!MAPLE_32BIT)
1319 MT_BUG_ON(mt, mt_height(mt) != 4);
e15e06a8
LH
1320}
1321
eaf9790d 1322static noinline void __init check_next_entry(struct maple_tree *mt)
e15e06a8 1323{
120b1162
LH
1324 void *entry = NULL;
1325 unsigned long limit = 30, i = 0;
1326 MA_STATE(mas, mt, i, i);
e15e06a8 1327
120b1162 1328 MT_BUG_ON(mt, !mtree_empty(mt));
e15e06a8 1329
120b1162
LH
1330 check_seq(mt, limit, false);
1331 rcu_read_lock();
1332
1333 /* Check the first one and get ma_state in the correct state. */
1334 MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1335 for ( ; i <= limit + 1; i++) {
1336 entry = mas_next(&mas, limit);
1337 if (i > limit)
1338 MT_BUG_ON(mt, entry != NULL);
1339 else
1340 MT_BUG_ON(mt, xa_mk_value(i) != entry);
e15e06a8 1341 }
120b1162
LH
1342 rcu_read_unlock();
1343 mtree_destroy(mt);
e15e06a8 1344}
e15e06a8 1345
eaf9790d 1346static noinline void __init check_prev_entry(struct maple_tree *mt)
e15e06a8 1347{
120b1162
LH
1348 unsigned long index = 16;
1349 void *value;
1350 int i;
e15e06a8 1351
120b1162 1352 MA_STATE(mas, mt, index, index);
e15e06a8 1353
120b1162
LH
1354 MT_BUG_ON(mt, !mtree_empty(mt));
1355 check_seq(mt, 30, false);
e15e06a8 1356
120b1162
LH
1357 rcu_read_lock();
1358 value = mas_find(&mas, ULONG_MAX);
1359 MT_BUG_ON(mt, value != xa_mk_value(index));
1360 value = mas_prev(&mas, 0);
1361 MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1362 rcu_read_unlock();
1363 mtree_destroy(mt);
e15e06a8 1364
120b1162
LH
1365 /* Check limits on prev */
1366 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1367 mas_lock(&mas);
1368 for (i = 0; i <= index; i++) {
1369 mas_set_range(&mas, i*10, i*10+5);
1370 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1371 }
e15e06a8 1372
120b1162
LH
1373 mas_set(&mas, 20);
1374 value = mas_walk(&mas);
1375 MT_BUG_ON(mt, value != xa_mk_value(2));
e15e06a8 1376
120b1162
LH
1377 value = mas_prev(&mas, 19);
1378 MT_BUG_ON(mt, value != NULL);
e15e06a8 1379
120b1162
LH
1380 mas_set(&mas, 80);
1381 value = mas_walk(&mas);
1382 MT_BUG_ON(mt, value != xa_mk_value(8));
e15e06a8 1383
120b1162
LH
1384 value = mas_prev(&mas, 76);
1385 MT_BUG_ON(mt, value != NULL);
e15e06a8 1386
120b1162 1387 mas_unlock(&mas);
e15e06a8 1388}
e15e06a8 1389
431e1060
WY
1390static noinline void __init check_store_null(struct maple_tree *mt)
1391{
1392 MA_STATE(mas, mt, 0, ULONG_MAX);
1393
1394 /*
1395 * Store NULL at range [0, ULONG_MAX] to an empty tree should result
1396 * in an empty tree
1397 */
1398 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1399 mas_lock(&mas);
1400 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1401 MT_BUG_ON(mt, !mtree_empty(mt));
1402 mas_unlock(&mas);
1403 mtree_destroy(mt);
1404
1405 /*
1406 * Store NULL at any range to an empty tree should result in an empty
1407 * tree
1408 */
1409 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1410 mas_lock(&mas);
1411 mas_set_range(&mas, 3, 10);
1412 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1413 MT_BUG_ON(mt, !mtree_empty(mt));
1414 mas_unlock(&mas);
1415 mtree_destroy(mt);
1416
1417 /*
1418 * Store NULL at range [0, ULONG_MAX] to a single entry tree should
1419 * result in an empty tree
1420 */
1421 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1422 mas_lock(&mas);
1423 mas_set(&mas, 0);
1424 mas_store_gfp(&mas, &mas, GFP_KERNEL);
1425 mas_set_range(&mas, 0, ULONG_MAX);
1426 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1427 MT_BUG_ON(mt, !mtree_empty(mt));
1428 mas_unlock(&mas);
1429 mtree_destroy(mt);
1430
1431 /*
1432 * Store NULL at range [0, n] to a single entry tree should
1433 * result in an empty tree
1434 */
1435 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1436 mas_lock(&mas);
1437 mas_set(&mas, 0);
1438 mas_store_gfp(&mas, &mas, GFP_KERNEL);
1439 mas_set_range(&mas, 0, 5);
1440 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1441 MT_BUG_ON(mt, !mtree_empty(mt));
1442 mas_unlock(&mas);
1443 mtree_destroy(mt);
1444
1445 /*
1446 * Store NULL at range [m, n] where m > 0 to a single entry tree
1447 * should still be a single entry tree
1448 */
1449 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1450 mas_lock(&mas);
1451 mas_set(&mas, 0);
1452 mas_store_gfp(&mas, &mas, GFP_KERNEL);
1453 mas_set_range(&mas, 2, 5);
1454 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1455 MT_BUG_ON(mt, mtree_empty(mt));
1456// MT_BUG_ON(mt, xa_is_node(mas_root(&mas)));
1457 mas_unlock(&mas);
1458 mtree_destroy(mt);
1459
1460 /*
1461 * Store NULL at range [0, ULONG_MAX] to a tree with node should
1462 * result in an empty tree
1463 */
1464 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1465 mas_lock(&mas);
1466 mas_set_range(&mas, 1, 3);
1467 mas_store_gfp(&mas, &mas, GFP_KERNEL);
1468// MT_BUG_ON(mt, !xa_is_node(mas_root(&mas)));
1469 mas_set_range(&mas, 0, ULONG_MAX);
1470 mas_store_gfp(&mas, NULL, GFP_KERNEL);
1471 MT_BUG_ON(mt, !mtree_empty(mt));
1472 mas_unlock(&mas);
1473 mtree_destroy(mt);
1474}
1475
eaf9790d 1476static noinline void __init check_root_expand(struct maple_tree *mt)
e15e06a8 1477{
120b1162
LH
1478 MA_STATE(mas, mt, 0, 0);
1479 void *ptr;
e15e06a8 1480
e15e06a8 1481
120b1162
LH
1482 mas_lock(&mas);
1483 mas_set(&mas, 3);
1484 ptr = mas_walk(&mas);
eb2e817f 1485 MT_BUG_ON(mt, mas.index != 0);
120b1162
LH
1486 MT_BUG_ON(mt, ptr != NULL);
1487 MT_BUG_ON(mt, mas.index != 0);
1488 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 1489
120b1162
LH
1490 ptr = &check_prev_entry;
1491 mas_set(&mas, 1);
1492 mas_store_gfp(&mas, ptr, GFP_KERNEL);
e15e06a8 1493
120b1162
LH
1494 mas_set(&mas, 0);
1495 ptr = mas_walk(&mas);
1496 MT_BUG_ON(mt, ptr != NULL);
e15e06a8 1497
120b1162
LH
1498 mas_set(&mas, 1);
1499 ptr = mas_walk(&mas);
1500 MT_BUG_ON(mt, ptr != &check_prev_entry);
e15e06a8 1501
120b1162
LH
1502 mas_set(&mas, 2);
1503 ptr = mas_walk(&mas);
1504 MT_BUG_ON(mt, ptr != NULL);
1505 mas_unlock(&mas);
1506 mtree_destroy(mt);
e15e06a8 1507
e15e06a8 1508
120b1162
LH
1509 mt_init_flags(mt, 0);
1510 mas_lock(&mas);
e15e06a8 1511
120b1162
LH
1512 mas_set(&mas, 0);
1513 ptr = &check_prev_entry;
1514 mas_store_gfp(&mas, ptr, GFP_KERNEL);
e15e06a8 1515
120b1162
LH
1516 mas_set(&mas, 5);
1517 ptr = mas_walk(&mas);
1518 MT_BUG_ON(mt, ptr != NULL);
1519 MT_BUG_ON(mt, mas.index != 1);
1520 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 1521
120b1162
LH
1522 mas_set_range(&mas, 0, 100);
1523 ptr = mas_walk(&mas);
1524 MT_BUG_ON(mt, ptr != &check_prev_entry);
1525 MT_BUG_ON(mt, mas.last != 0);
1526 mas_unlock(&mas);
1527 mtree_destroy(mt);
e15e06a8 1528
120b1162
LH
1529 mt_init_flags(mt, 0);
1530 mas_lock(&mas);
1531
1532 mas_set(&mas, 0);
1533 ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1534 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1535 ptr = mas_next(&mas, ULONG_MAX);
1536 MT_BUG_ON(mt, ptr != NULL);
1537 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1538
1539 mas_set(&mas, 1);
1540 ptr = mas_prev(&mas, 0);
1541 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1542 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
e15e06a8 1543
120b1162 1544 mas_unlock(&mas);
e15e06a8 1545
120b1162 1546 mtree_destroy(mt);
e15e06a8 1547
120b1162
LH
1548 mt_init_flags(mt, 0);
1549 mas_lock(&mas);
1550 mas_set(&mas, 0);
1551 ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1552 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1553 ptr = mas_next(&mas, ULONG_MAX);
1554 MT_BUG_ON(mt, ptr != NULL);
eb2e817f 1555 MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
e15e06a8 1556
120b1162
LH
1557 mas_set(&mas, 1);
1558 ptr = mas_prev(&mas, 0);
1559 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1560 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
e15e06a8 1561
e15e06a8 1562
120b1162 1563 mas_unlock(&mas);
e15e06a8 1564}
e15e06a8 1565
c38279d4
WY
1566static noinline void __init check_deficient_node(struct maple_tree *mt)
1567{
1568 MA_STATE(mas, mt, 0, 0);
1569 int count;
1570
1571 mas_lock(&mas);
1572 for (count = 0; count < 10; count++) {
1573 mas_set(&mas, count);
1574 mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL);
1575 }
1576
1577 for (count = 20; count < 39; count++) {
1578 mas_set(&mas, count);
1579 mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL);
1580 }
1581
1582 for (count = 10; count < 12; count++) {
1583 mas_set(&mas, count);
1584 mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL);
1585 }
1586 mas_unlock(&mas);
1587 mt_validate(mt);
1588}
1589
eaf9790d 1590static noinline void __init check_gap_combining(struct maple_tree *mt)
e15e06a8 1591{
120b1162
LH
1592 struct maple_enode *mn1, *mn2;
1593 void *entry;
1594 unsigned long singletons = 100;
eaf9790d
LH
1595 static const unsigned long *seq100;
1596 static const unsigned long seq100_64[] = {
120b1162
LH
1597 /* 0-5 */
1598 74, 75, 76,
1599 50, 100, 2,
e15e06a8 1600
120b1162
LH
1601 /* 6-12 */
1602 44, 45, 46, 43,
1603 20, 50, 3,
e15e06a8 1604
120b1162
LH
1605 /* 13-20*/
1606 80, 81, 82,
1607 76, 2, 79, 85, 4,
1608 };
e15e06a8 1609
eaf9790d 1610 static const unsigned long seq100_32[] = {
120b1162
LH
1611 /* 0-5 */
1612 61, 62, 63,
1613 50, 100, 2,
e15e06a8 1614
120b1162
LH
1615 /* 6-12 */
1616 31, 32, 33, 30,
1617 20, 50, 3,
e15e06a8 1618
120b1162
LH
1619 /* 13-20*/
1620 80, 81, 82,
1621 76, 2, 79, 85, 4,
1622 };
e15e06a8 1623
eaf9790d 1624 static const unsigned long seq2000[] = {
120b1162
LH
1625 1152, 1151,
1626 1100, 1200, 2,
1627 };
eaf9790d 1628 static const unsigned long seq400[] = {
120b1162
LH
1629 286, 318,
1630 256, 260, 266, 270, 275, 280, 290, 398,
1631 286, 310,
1632 };
e15e06a8 1633
120b1162 1634 unsigned long index;
e15e06a8 1635
120b1162 1636 MA_STATE(mas, mt, 0, 0);
e15e06a8 1637
120b1162
LH
1638 if (MAPLE_32BIT)
1639 seq100 = seq100_32;
1640 else
1641 seq100 = seq100_64;
e15e06a8 1642
120b1162
LH
1643 index = seq100[0];
1644 mas_set(&mas, index);
1645 MT_BUG_ON(mt, !mtree_empty(mt));
1646 check_seq(mt, singletons, false); /* create 100 singletons. */
e15e06a8 1647
120b1162
LH
1648 mt_set_non_kernel(1);
1649 mtree_test_erase(mt, seq100[2]);
1650 check_load(mt, seq100[2], NULL);
1651 mtree_test_erase(mt, seq100[1]);
1652 check_load(mt, seq100[1], NULL);
e15e06a8 1653
120b1162
LH
1654 rcu_read_lock();
1655 entry = mas_find(&mas, ULONG_MAX);
1656 MT_BUG_ON(mt, entry != xa_mk_value(index));
1657 mn1 = mas.node;
1658 mas_next(&mas, ULONG_MAX);
1659 entry = mas_next(&mas, ULONG_MAX);
1660 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1661 mn2 = mas.node;
1662 MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
e15e06a8 1663
120b1162
LH
1664 /*
1665 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1666 * seq100[4]. Search for the gap.
1667 */
1668 mt_set_non_kernel(1);
e15e06a8 1669 mas_reset(&mas);
120b1162
LH
1670 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1671 seq100[5]));
1672 MT_BUG_ON(mt, mas.index != index + 1);
1673 rcu_read_unlock();
e15e06a8 1674
120b1162
LH
1675 mtree_test_erase(mt, seq100[6]);
1676 check_load(mt, seq100[6], NULL);
1677 mtree_test_erase(mt, seq100[7]);
1678 check_load(mt, seq100[7], NULL);
1679 mtree_test_erase(mt, seq100[8]);
1680 index = seq100[9];
e15e06a8 1681
120b1162
LH
1682 rcu_read_lock();
1683 mas.index = index;
1684 mas.last = index;
e15e06a8 1685 mas_reset(&mas);
120b1162
LH
1686 entry = mas_find(&mas, ULONG_MAX);
1687 MT_BUG_ON(mt, entry != xa_mk_value(index));
1688 mn1 = mas.node;
1689 entry = mas_next(&mas, ULONG_MAX);
1690 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1691 mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1692 mn2 = mas.node;
1693 MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
e15e06a8 1694
120b1162
LH
1695 /*
1696 * At this point, there is a gap of 3 at seq100[6]. Find it by
1697 * searching 20 - 50 for size 3.
1698 */
e15e06a8 1699 mas_reset(&mas);
120b1162
LH
1700 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1701 seq100[12]));
1702 MT_BUG_ON(mt, mas.index != seq100[6]);
1703 rcu_read_unlock();
e15e06a8 1704
120b1162
LH
1705 mt_set_non_kernel(1);
1706 mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1707 check_load(mt, seq100[13], NULL);
1708 check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1709 mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1710 check_load(mt, seq100[13], NULL);
1711 check_load(mt, seq100[14], NULL);
e15e06a8 1712
120b1162
LH
1713 mas_reset(&mas);
1714 rcu_read_lock();
1715 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1716 seq100[17]));
1717 MT_BUG_ON(mt, mas.index != seq100[13]);
1718 mt_validate(mt);
1719 rcu_read_unlock();
e15e06a8 1720
120b1162
LH
1721 /*
1722 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1723 * gap.
1724 */
1725 mt_set_non_kernel(2);
1726 mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1727 mtree_test_erase(mt, seq100[15]);
e15e06a8 1728 mas_reset(&mas);
120b1162
LH
1729 rcu_read_lock();
1730 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1731 seq100[20]));
1732 rcu_read_unlock();
1733 MT_BUG_ON(mt, mas.index != seq100[18]);
1734 mt_validate(mt);
1735 mtree_destroy(mt);
e15e06a8 1736
120b1162
LH
1737 /* seq 2000 tests are for multi-level tree gaps */
1738 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1739 check_seq(mt, 2000, false);
1740 mt_set_non_kernel(1);
1741 mtree_test_erase(mt, seq2000[0]);
1742 mtree_test_erase(mt, seq2000[1]);
e15e06a8 1743
120b1162
LH
1744 mt_set_non_kernel(2);
1745 mas_reset(&mas);
1746 rcu_read_lock();
1747 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1748 seq2000[4]));
1749 MT_BUG_ON(mt, mas.index != seq2000[1]);
1750 rcu_read_unlock();
1751 mt_validate(mt);
e15e06a8
LH
1752 mtree_destroy(mt);
1753
120b1162
LH
1754 /* seq 400 tests rebalancing over two levels. */
1755 mt_set_non_kernel(99);
1756 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1757 check_seq(mt, 400, false);
1758 mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1759 mt_set_non_kernel(0);
1760 mtree_destroy(mt);
e15e06a8 1761
120b1162
LH
1762 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1763 check_seq(mt, 400, false);
1764 mt_set_non_kernel(50);
1765 mtree_test_store_range(mt, seq400[2], seq400[9],
1766 xa_mk_value(seq400[2]));
1767 mtree_test_store_range(mt, seq400[3], seq400[9],
1768 xa_mk_value(seq400[3]));
1769 mtree_test_store_range(mt, seq400[4], seq400[9],
1770 xa_mk_value(seq400[4]));
1771 mtree_test_store_range(mt, seq400[5], seq400[9],
1772 xa_mk_value(seq400[5]));
1773 mtree_test_store_range(mt, seq400[0], seq400[9],
1774 xa_mk_value(seq400[0]));
1775 mtree_test_store_range(mt, seq400[6], seq400[9],
1776 xa_mk_value(seq400[6]));
1777 mtree_test_store_range(mt, seq400[7], seq400[9],
1778 xa_mk_value(seq400[7]));
1779 mtree_test_store_range(mt, seq400[8], seq400[9],
1780 xa_mk_value(seq400[8]));
1781 mtree_test_store_range(mt, seq400[10], seq400[11],
1782 xa_mk_value(seq400[10]));
1783 mt_validate(mt);
1784 mt_set_non_kernel(0);
1785 mtree_destroy(mt);
1786}
eaf9790d 1787static noinline void __init check_node_overwrite(struct maple_tree *mt)
e15e06a8 1788{
120b1162 1789 int i, max = 4000;
e15e06a8 1790
120b1162
LH
1791 for (i = 0; i < max; i++)
1792 mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
e15e06a8 1793
120b1162 1794 mtree_test_store_range(mt, 319951, 367950, NULL);
89f499f3 1795 /*mt_dump(mt, mt_dump_dec); */
120b1162 1796 mt_validate(mt);
e15e06a8
LH
1797}
1798
120b1162 1799#if defined(BENCH_SLOT_STORE)
eaf9790d 1800static noinline void __init bench_slot_store(struct maple_tree *mt)
e15e06a8 1801{
120b1162 1802 int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
e15e06a8 1803
120b1162
LH
1804 for (i = 0; i < max; i += 10)
1805 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1806
120b1162
LH
1807 for (i = 0; i < count; i++) {
1808 mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1809 mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1810 GFP_KERNEL);
e15e06a8 1811 }
e15e06a8 1812}
120b1162 1813#endif
e15e06a8 1814
120b1162 1815#if defined(BENCH_NODE_STORE)
eaf9790d 1816static noinline void __init bench_node_store(struct maple_tree *mt)
e15e06a8 1817{
120b1162 1818 int i, overwrite = 76, max = 240, count = 20000000;
e15e06a8 1819
120b1162
LH
1820 for (i = 0; i < max; i += 10)
1821 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1822
120b1162
LH
1823 for (i = 0; i < count; i++) {
1824 mtree_store_range(mt, overwrite, overwrite + 15,
1825 xa_mk_value(overwrite), GFP_KERNEL);
e15e06a8 1826
120b1162
LH
1827 overwrite += 5;
1828 if (overwrite >= 135)
1829 overwrite = 76;
e15e06a8
LH
1830 }
1831}
120b1162 1832#endif
e15e06a8 1833
120b1162 1834#if defined(BENCH_AWALK)
eaf9790d 1835static noinline void __init bench_awalk(struct maple_tree *mt)
e15e06a8 1836{
120b1162
LH
1837 int i, max = 2500, count = 50000000;
1838 MA_STATE(mas, mt, 1470, 1470);
e15e06a8 1839
120b1162
LH
1840 for (i = 0; i < max; i += 10)
1841 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1842
120b1162 1843 mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
e15e06a8 1844
120b1162
LH
1845 for (i = 0; i < count; i++) {
1846 mas_empty_area_rev(&mas, 0, 2000, 10);
1847 mas_reset(&mas);
e15e06a8 1848 }
120b1162
LH
1849}
1850#endif
1851#if defined(BENCH_WALK)
eaf9790d 1852static noinline void __init bench_walk(struct maple_tree *mt)
120b1162
LH
1853{
1854 int i, max = 2500, count = 550000000;
1855 MA_STATE(mas, mt, 1470, 1470);
e15e06a8 1856
120b1162
LH
1857 for (i = 0; i < max; i += 10)
1858 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1859
120b1162
LH
1860 for (i = 0; i < count; i++) {
1861 mas_walk(&mas);
1862 mas_reset(&mas);
e15e06a8
LH
1863 }
1864
e15e06a8 1865}
120b1162 1866#endif
e15e06a8 1867
24662dec
LH
1868#if defined(BENCH_LOAD)
1869static noinline void __init bench_load(struct maple_tree *mt)
1870{
1871 int i, max = 2500, count = 550000000;
1872
1873 for (i = 0; i < max; i += 10)
1874 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1875
1876 for (i = 0; i < count; i++)
1877 mtree_load(mt, 1470);
1878}
1879#endif
1880
120b1162 1881#if defined(BENCH_MT_FOR_EACH)
eaf9790d 1882static noinline void __init bench_mt_for_each(struct maple_tree *mt)
e15e06a8 1883{
120b1162
LH
1884 int i, count = 1000000;
1885 unsigned long max = 2500, index = 0;
1886 void *entry;
e15e06a8 1887
120b1162
LH
1888 for (i = 0; i < max; i += 5)
1889 mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1890
1891 for (i = 0; i < count; i++) {
1892 unsigned long j = 0;
e15e06a8 1893
120b1162
LH
1894 mt_for_each(mt, entry, index, max) {
1895 MT_BUG_ON(mt, entry != xa_mk_value(j));
1896 j += 5;
e15e06a8 1897 }
120b1162
LH
1898
1899 index = 0;
e15e06a8
LH
1900 }
1901
e15e06a8 1902}
120b1162 1903#endif
e15e06a8 1904
361c678b
LH
1905#if defined(BENCH_MAS_FOR_EACH)
1906static noinline void __init bench_mas_for_each(struct maple_tree *mt)
1907{
1908 int i, count = 1000000;
1909 unsigned long max = 2500;
1910 void *entry;
1911 MA_STATE(mas, mt, 0, 0);
1912
1913 for (i = 0; i < max; i += 5) {
1914 int gap = 4;
1915
1916 if (i % 30 == 0)
1917 gap = 3;
1918 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1919 }
1920
1921 rcu_read_lock();
1922 for (i = 0; i < count; i++) {
1923 unsigned long j = 0;
1924
1925 mas_for_each(&mas, entry, max) {
1926 MT_BUG_ON(mt, entry != xa_mk_value(j));
1927 j += 5;
1928 }
1929 mas_set(&mas, 0);
1930 }
1931 rcu_read_unlock();
1932
1933}
1934#endif
8c314f3b
LH
1935#if defined(BENCH_MAS_PREV)
1936static noinline void __init bench_mas_prev(struct maple_tree *mt)
1937{
1938 int i, count = 1000000;
1939 unsigned long max = 2500;
1940 void *entry;
1941 MA_STATE(mas, mt, 0, 0);
1942
1943 for (i = 0; i < max; i += 5) {
1944 int gap = 4;
1945
1946 if (i % 30 == 0)
1947 gap = 3;
1948 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1949 }
1950
1951 rcu_read_lock();
1952 for (i = 0; i < count; i++) {
1953 unsigned long j = 2495;
1954
1955 mas_set(&mas, ULONG_MAX);
1956 while ((entry = mas_prev(&mas, 0)) != NULL) {
1957 MT_BUG_ON(mt, entry != xa_mk_value(j));
1958 j -= 5;
1959 }
1960 }
1961 rcu_read_unlock();
361c678b 1962
8c314f3b
LH
1963}
1964#endif
120b1162 1965/* check_forking - simulate the kernel forking sequence with the tree. */
446e1867 1966static noinline void __init check_forking(void)
e15e06a8 1967{
446e1867
PZ
1968 struct maple_tree mt, newmt;
1969 int i, nr_entries = 134, ret;
120b1162 1970 void *val;
446e1867
PZ
1971 MA_STATE(mas, &mt, 0, 0);
1972 MA_STATE(newmas, &newmt, 0, 0);
1973 struct rw_semaphore mt_lock, newmt_lock;
099d7439 1974
446e1867 1975 init_rwsem(&mt_lock);
099d7439 1976 init_rwsem(&newmt_lock);
120b1162 1977
446e1867
PZ
1978 mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1979 mt_set_external_lock(&mt, &mt_lock);
120b1162 1980
099d7439
LH
1981 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1982 mt_set_external_lock(&newmt, &newmt_lock);
446e1867
PZ
1983
1984 down_write(&mt_lock);
1985 for (i = 0; i <= nr_entries; i++) {
1986 mas_set_range(&mas, i*10, i*10 + 5);
1987 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1988 }
1989
1990 down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
1991 ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
1992 if (ret) {
120b1162
LH
1993 pr_err("OOM!");
1994 BUG_ON(1);
1995 }
446e1867
PZ
1996
1997 mas_set(&newmas, 0);
1998 mas_for_each(&newmas, val, ULONG_MAX)
120b1162 1999 mas_store(&newmas, val);
446e1867 2000
120b1162 2001 mas_destroy(&newmas);
446e1867 2002 mas_destroy(&mas);
120b1162 2003 mt_validate(&newmt);
099d7439 2004 __mt_destroy(&newmt);
446e1867 2005 __mt_destroy(&mt);
099d7439 2006 up_write(&newmt_lock);
446e1867 2007 up_write(&mt_lock);
e15e06a8
LH
2008}
2009
eaf9790d 2010static noinline void __init check_iteration(struct maple_tree *mt)
5159d64b
LH
2011{
2012 int i, nr_entries = 125;
2013 void *val;
2014 MA_STATE(mas, mt, 0, 0);
2015
2016 for (i = 0; i <= nr_entries; i++)
2017 mtree_store_range(mt, i * 10, i * 10 + 9,
2018 xa_mk_value(i), GFP_KERNEL);
2019
2020 mt_set_non_kernel(99999);
2021
2022 i = 0;
2023 mas_lock(&mas);
2024 mas_for_each(&mas, val, 925) {
2025 MT_BUG_ON(mt, mas.index != i * 10);
2026 MT_BUG_ON(mt, mas.last != i * 10 + 9);
2027 /* Overwrite end of entry 92 */
2028 if (i == 92) {
2029 mas.index = 925;
2030 mas.last = 929;
2031 mas_store(&mas, val);
2032 }
2033 i++;
2034 }
2035 /* Ensure mas_find() gets the next value */
2036 val = mas_find(&mas, ULONG_MAX);
2037 MT_BUG_ON(mt, val != xa_mk_value(i));
2038
2039 mas_set(&mas, 0);
2040 i = 0;
2041 mas_for_each(&mas, val, 785) {
2042 MT_BUG_ON(mt, mas.index != i * 10);
2043 MT_BUG_ON(mt, mas.last != i * 10 + 9);
2044 /* Overwrite start of entry 78 */
2045 if (i == 78) {
2046 mas.index = 780;
2047 mas.last = 785;
2048 mas_store(&mas, val);
2049 } else {
2050 i++;
2051 }
2052 }
2053 val = mas_find(&mas, ULONG_MAX);
2054 MT_BUG_ON(mt, val != xa_mk_value(i));
2055
2056 mas_set(&mas, 0);
2057 i = 0;
2058 mas_for_each(&mas, val, 765) {
2059 MT_BUG_ON(mt, mas.index != i * 10);
2060 MT_BUG_ON(mt, mas.last != i * 10 + 9);
2061 /* Overwrite end of entry 76 and advance to the end */
2062 if (i == 76) {
2063 mas.index = 760;
2064 mas.last = 765;
2065 mas_store(&mas, val);
5159d64b
LH
2066 }
2067 i++;
2068 }
2069 /* Make sure the next find returns the one after 765, 766-769 */
2070 val = mas_find(&mas, ULONG_MAX);
2071 MT_BUG_ON(mt, val != xa_mk_value(76));
2072 mas_unlock(&mas);
2073 mas_destroy(&mas);
2074 mt_set_non_kernel(0);
2075}
2076
eaf9790d 2077static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
e15e06a8 2078{
e15e06a8 2079
120b1162
LH
2080 struct maple_tree newmt;
2081 int i, nr_entries = 135;
2082 void *val;
2083 MA_STATE(mas, mt, 0, 0);
2084 MA_STATE(newmas, mt, 0, 0);
e15e06a8 2085
120b1162
LH
2086 for (i = 0; i <= nr_entries; i++)
2087 mtree_store_range(mt, i*10, i*10 + 5,
2088 xa_mk_value(i), GFP_KERNEL);
e15e06a8 2089
120b1162
LH
2090 mt_set_non_kernel(99999);
2091 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2092 newmas.tree = &newmt;
2093 rcu_read_lock();
2094 mas_lock(&newmas);
2095 mas_reset(&newmas);
2096 mas_set(&mas, 0);
2097 mas_for_each(&mas, val, ULONG_MAX) {
2098 newmas.index = mas.index;
2099 newmas.last = mas.last;
2100 mas_store_gfp(&newmas, val, GFP_KERNEL);
e15e06a8 2101 }
120b1162
LH
2102 mas_unlock(&newmas);
2103 rcu_read_unlock();
2104 mt_validate(&newmt);
2105 mt_set_non_kernel(0);
2106 mtree_destroy(&newmt);
e15e06a8
LH
2107}
2108
120b1162 2109#if defined(BENCH_FORK)
446e1867 2110static noinline void __init bench_forking(void)
e15e06a8 2111{
446e1867
PZ
2112 struct maple_tree mt, newmt;
2113 int i, nr_entries = 134, nr_fork = 80000, ret;
120b1162 2114 void *val;
446e1867
PZ
2115 MA_STATE(mas, &mt, 0, 0);
2116 MA_STATE(newmas, &newmt, 0, 0);
2117 struct rw_semaphore mt_lock, newmt_lock;
099d7439 2118
446e1867 2119 init_rwsem(&mt_lock);
099d7439 2120 init_rwsem(&newmt_lock);
e15e06a8 2121
446e1867
PZ
2122 mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2123 mt_set_external_lock(&mt, &mt_lock);
2124
2125 down_write(&mt_lock);
2126 for (i = 0; i <= nr_entries; i++) {
2127 mas_set_range(&mas, i*10, i*10 + 5);
2128 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
2129 }
e15e06a8 2130
120b1162 2131 for (i = 0; i < nr_fork; i++) {
446e1867
PZ
2132 mt_init_flags(&newmt,
2133 MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2134 mt_set_external_lock(&newmt, &newmt_lock);
2135
2136 down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
2137 ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
2138 if (ret) {
2139 pr_err("OOM!");
120b1162
LH
2140 BUG_ON(1);
2141 }
446e1867
PZ
2142
2143 mas_set(&newmas, 0);
2144 mas_for_each(&newmas, val, ULONG_MAX)
120b1162 2145 mas_store(&newmas, val);
446e1867 2146
120b1162 2147 mas_destroy(&newmas);
120b1162 2148 mt_validate(&newmt);
099d7439
LH
2149 __mt_destroy(&newmt);
2150 up_write(&newmt_lock);
e15e06a8 2151 }
446e1867
PZ
2152 mas_destroy(&mas);
2153 __mt_destroy(&mt);
2154 up_write(&mt_lock);
e15e06a8 2155}
120b1162 2156#endif
e15e06a8 2157
eaf9790d 2158static noinline void __init next_prev_test(struct maple_tree *mt)
e15e06a8 2159{
120b1162
LH
2160 int i, nr_entries;
2161 void *val;
2162 MA_STATE(mas, mt, 0, 0);
2163 struct maple_enode *mn;
eaf9790d
LH
2164 static const unsigned long *level2;
2165 static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
2166 725};
2167 static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
2168 1760, 1765};
7a93c71a 2169 unsigned long last_index;
e15e06a8 2170
120b1162
LH
2171 if (MAPLE_32BIT) {
2172 nr_entries = 500;
2173 level2 = level2_32;
7a93c71a 2174 last_index = 0x138e;
120b1162
LH
2175 } else {
2176 nr_entries = 200;
2177 level2 = level2_64;
7a93c71a 2178 last_index = 0x7d6;
120b1162 2179 }
e15e06a8 2180
120b1162
LH
2181 for (i = 0; i <= nr_entries; i++)
2182 mtree_store_range(mt, i*10, i*10 + 5,
2183 xa_mk_value(i), GFP_KERNEL);
e15e06a8 2184
120b1162
LH
2185 mas_lock(&mas);
2186 for (i = 0; i <= nr_entries / 2; i++) {
2187 mas_next(&mas, 1000);
2188 if (mas_is_none(&mas))
2189 break;
e15e06a8 2190
e15e06a8 2191 }
120b1162
LH
2192 mas_reset(&mas);
2193 mas_set(&mas, 0);
2194 i = 0;
2195 mas_for_each(&mas, val, 1000) {
2196 i++;
e15e06a8
LH
2197 }
2198
120b1162
LH
2199 mas_reset(&mas);
2200 mas_set(&mas, 0);
2201 i = 0;
2202 mas_for_each(&mas, val, 1000) {
2203 mas_pause(&mas);
2204 i++;
2205 }
e15e06a8 2206
120b1162
LH
2207 /*
2208 * 680 - 685 = 0x61a00001930c
2209 * 686 - 689 = NULL;
2210 * 690 - 695 = 0x61a00001930c
2211 * Check simple next/prev
2212 */
2213 mas_set(&mas, 686);
2214 val = mas_walk(&mas);
2215 MT_BUG_ON(mt, val != NULL);
e15e06a8 2216
120b1162
LH
2217 val = mas_next(&mas, 1000);
2218 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2219 MT_BUG_ON(mt, mas.index != 690);
2220 MT_BUG_ON(mt, mas.last != 695);
e15e06a8 2221
120b1162
LH
2222 val = mas_prev(&mas, 0);
2223 MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2224 MT_BUG_ON(mt, mas.index != 680);
2225 MT_BUG_ON(mt, mas.last != 685);
e15e06a8 2226
120b1162
LH
2227 val = mas_next(&mas, 1000);
2228 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2229 MT_BUG_ON(mt, mas.index != 690);
2230 MT_BUG_ON(mt, mas.last != 695);
e15e06a8 2231
120b1162
LH
2232 val = mas_next(&mas, 1000);
2233 MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2234 MT_BUG_ON(mt, mas.index != 700);
2235 MT_BUG_ON(mt, mas.last != 705);
e15e06a8 2236
120b1162
LH
2237 /* Check across node boundaries of the tree */
2238 mas_set(&mas, 70);
2239 val = mas_walk(&mas);
2240 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2241 MT_BUG_ON(mt, mas.index != 70);
2242 MT_BUG_ON(mt, mas.last != 75);
e15e06a8 2243
120b1162
LH
2244 val = mas_next(&mas, 1000);
2245 MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2246 MT_BUG_ON(mt, mas.index != 80);
2247 MT_BUG_ON(mt, mas.last != 85);
e15e06a8 2248
120b1162
LH
2249 val = mas_prev(&mas, 70);
2250 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2251 MT_BUG_ON(mt, mas.index != 70);
2252 MT_BUG_ON(mt, mas.last != 75);
e15e06a8 2253
120b1162
LH
2254 /* Check across two levels of the tree */
2255 mas_reset(&mas);
2256 mas_set(&mas, level2[0]);
2257 val = mas_walk(&mas);
2258 MT_BUG_ON(mt, val != NULL);
2259 val = mas_next(&mas, level2[1]);
2260 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2261 MT_BUG_ON(mt, mas.index != level2[2]);
2262 MT_BUG_ON(mt, mas.last != level2[3]);
2263 mn = mas.node;
e15e06a8 2264
120b1162
LH
2265 val = mas_next(&mas, level2[1]);
2266 MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2267 MT_BUG_ON(mt, mas.index != level2[4]);
2268 MT_BUG_ON(mt, mas.last != level2[5]);
2269 MT_BUG_ON(mt, mn == mas.node);
e15e06a8 2270
120b1162
LH
2271 val = mas_prev(&mas, 0);
2272 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2273 MT_BUG_ON(mt, mas.index != level2[2]);
2274 MT_BUG_ON(mt, mas.last != level2[3]);
e15e06a8 2275
120b1162
LH
2276 /* Check running off the end and back on */
2277 mas_set(&mas, nr_entries * 10);
2278 val = mas_walk(&mas);
2279 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2280 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2281 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
e15e06a8 2282
120b1162
LH
2283 val = mas_next(&mas, ULONG_MAX);
2284 MT_BUG_ON(mt, val != NULL);
7a93c71a 2285 MT_BUG_ON(mt, mas.index != last_index);
120b1162 2286 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 2287
120b1162
LH
2288 val = mas_prev(&mas, 0);
2289 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2290 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2291 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
e15e06a8 2292
120b1162
LH
2293 /* Check running off the start and back on */
2294 mas_reset(&mas);
2295 mas_set(&mas, 10);
2296 val = mas_walk(&mas);
2297 MT_BUG_ON(mt, val != xa_mk_value(1));
2298 MT_BUG_ON(mt, mas.index != 10);
2299 MT_BUG_ON(mt, mas.last != 15);
e15e06a8 2300
120b1162
LH
2301 val = mas_prev(&mas, 0);
2302 MT_BUG_ON(mt, val != xa_mk_value(0));
2303 MT_BUG_ON(mt, mas.index != 0);
2304 MT_BUG_ON(mt, mas.last != 5);
e15e06a8 2305
120b1162
LH
2306 val = mas_prev(&mas, 0);
2307 MT_BUG_ON(mt, val != NULL);
2308 MT_BUG_ON(mt, mas.index != 0);
eb2e817f 2309 MT_BUG_ON(mt, mas.last != 5);
067311d3 2310 MT_BUG_ON(mt, !mas_is_underflow(&mas));
e15e06a8 2311
120b1162
LH
2312 mas.index = 0;
2313 mas.last = 5;
2314 mas_store(&mas, NULL);
2315 mas_reset(&mas);
2316 mas_set(&mas, 10);
2317 mas_walk(&mas);
e15e06a8 2318
120b1162
LH
2319 val = mas_prev(&mas, 0);
2320 MT_BUG_ON(mt, val != NULL);
2321 MT_BUG_ON(mt, mas.index != 0);
eb2e817f 2322 MT_BUG_ON(mt, mas.last != 9);
120b1162 2323 mas_unlock(&mas);
e15e06a8 2324
120b1162 2325 mtree_destroy(mt);
e15e06a8 2326
120b1162
LH
2327 mt_init(mt);
2328 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2329 mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
e15e06a8 2330 rcu_read_lock();
120b1162
LH
2331 mas_set(&mas, 5);
2332 val = mas_prev(&mas, 4);
2333 MT_BUG_ON(mt, val != NULL);
e15e06a8 2334 rcu_read_unlock();
e15e06a8
LH
2335}
2336
e15e06a8 2337
e15e06a8
LH
2338
2339/* Test spanning writes that require balancing right sibling or right cousin */
eaf9790d 2340static noinline void __init check_spanning_relatives(struct maple_tree *mt)
e15e06a8
LH
2341{
2342
2343 unsigned long i, nr_entries = 1000;
2344
2345 for (i = 0; i <= nr_entries; i++)
2346 mtree_store_range(mt, i*10, i*10 + 5,
2347 xa_mk_value(i), GFP_KERNEL);
2348
2349
2350 mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2351}
2352
eaf9790d 2353static noinline void __init check_fuzzer(struct maple_tree *mt)
e15e06a8
LH
2354{
2355 /*
2356 * 1. Causes a spanning rebalance of a single root node.
2357 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2358 * entire right side is consumed.
2359 */
2360 mtree_test_insert(mt, 88, (void *)0xb1);
2361 mtree_test_insert(mt, 84, (void *)0xa9);
2362 mtree_test_insert(mt, 2, (void *)0x5);
2363 mtree_test_insert(mt, 4, (void *)0x9);
2364 mtree_test_insert(mt, 14, (void *)0x1d);
2365 mtree_test_insert(mt, 7, (void *)0xf);
2366 mtree_test_insert(mt, 12, (void *)0x19);
2367 mtree_test_insert(mt, 18, (void *)0x25);
2368 mtree_test_store_range(mt, 8, 18, (void *)0x11);
2369 mtree_destroy(mt);
2370
2371
2372 /*
2373 * 2. Cause a spanning rebalance of two nodes in root.
2374 * Fixed by setting mast->r->max correctly.
2375 */
2376 mt_init_flags(mt, 0);
2377 mtree_test_store(mt, 87, (void *)0xaf);
2378 mtree_test_store(mt, 0, (void *)0x1);
2379 mtree_test_load(mt, 4);
2380 mtree_test_insert(mt, 4, (void *)0x9);
2381 mtree_test_store(mt, 8, (void *)0x11);
2382 mtree_test_store(mt, 44, (void *)0x59);
2383 mtree_test_store(mt, 68, (void *)0x89);
2384 mtree_test_store(mt, 2, (void *)0x5);
2385 mtree_test_insert(mt, 43, (void *)0x57);
2386 mtree_test_insert(mt, 24, (void *)0x31);
2387 mtree_test_insert(mt, 844, (void *)0x699);
2388 mtree_test_store(mt, 84, (void *)0xa9);
2389 mtree_test_store(mt, 4, (void *)0x9);
2390 mtree_test_erase(mt, 4);
2391 mtree_test_load(mt, 5);
2392 mtree_test_erase(mt, 0);
2393 mtree_destroy(mt);
2394
2395 /*
2396 * 3. Cause a node overflow on copy
2397 * Fixed by using the correct check for node size in mas_wr_modify()
2398 * Also discovered issue with metadata setting.
2399 */
2400 mt_init_flags(mt, 0);
120b1162 2401 mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
e15e06a8
LH
2402 mtree_test_store(mt, 4, (void *)0x9);
2403 mtree_test_erase(mt, 5);
2404 mtree_test_erase(mt, 0);
2405 mtree_test_erase(mt, 4);
2406 mtree_test_store(mt, 5, (void *)0xb);
2407 mtree_test_erase(mt, 5);
2408 mtree_test_store(mt, 5, (void *)0xb);
2409 mtree_test_erase(mt, 5);
2410 mtree_test_erase(mt, 4);
2411 mtree_test_store(mt, 4, (void *)0x9);
2412 mtree_test_store(mt, 444, (void *)0x379);
2413 mtree_test_store(mt, 0, (void *)0x1);
2414 mtree_test_load(mt, 0);
2415 mtree_test_store(mt, 5, (void *)0xb);
2416 mtree_test_erase(mt, 0);
2417 mtree_destroy(mt);
2418
2419 /*
2420 * 4. spanning store failure due to writing incorrect pivot value at
2421 * last slot.
2422 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2423 *
2424 */
2425 mt_init_flags(mt, 0);
2426 mtree_test_insert(mt, 261, (void *)0x20b);
2427 mtree_test_store(mt, 516, (void *)0x409);
2428 mtree_test_store(mt, 6, (void *)0xd);
2429 mtree_test_insert(mt, 5, (void *)0xb);
2430 mtree_test_insert(mt, 1256, (void *)0x9d1);
2431 mtree_test_store(mt, 4, (void *)0x9);
2432 mtree_test_erase(mt, 1);
2433 mtree_test_store(mt, 56, (void *)0x71);
2434 mtree_test_insert(mt, 1, (void *)0x3);
2435 mtree_test_store(mt, 24, (void *)0x31);
2436 mtree_test_erase(mt, 1);
2437 mtree_test_insert(mt, 2263, (void *)0x11af);
2438 mtree_test_insert(mt, 446, (void *)0x37d);
2439 mtree_test_store_range(mt, 6, 45, (void *)0xd);
2440 mtree_test_store_range(mt, 3, 446, (void *)0x7);
2441 mtree_destroy(mt);
2442
2443 /*
2444 * 5. mas_wr_extend_null() may overflow slots.
2445 * Fix by checking against wr_mas->node_end.
2446 */
2447 mt_init_flags(mt, 0);
2448 mtree_test_store(mt, 48, (void *)0x61);
2449 mtree_test_store(mt, 3, (void *)0x7);
2450 mtree_test_load(mt, 0);
2451 mtree_test_store(mt, 88, (void *)0xb1);
2452 mtree_test_store(mt, 81, (void *)0xa3);
2453 mtree_test_insert(mt, 0, (void *)0x1);
2454 mtree_test_insert(mt, 8, (void *)0x11);
2455 mtree_test_insert(mt, 4, (void *)0x9);
2456 mtree_test_insert(mt, 2480, (void *)0x1361);
120b1162 2457 mtree_test_insert(mt, ULONG_MAX,
e15e06a8 2458 (void *)0xffffffffffffffff);
120b1162 2459 mtree_test_erase(mt, ULONG_MAX);
e15e06a8
LH
2460 mtree_destroy(mt);
2461
2462 /*
2463 * 6. When reusing a node with an implied pivot and the node is
2464 * shrinking, old data would be left in the implied slot
2465 * Fixed by checking the last pivot for the mas->max and clear
2466 * accordingly. This only affected the left-most node as that node is
2467 * the only one allowed to end in NULL.
2468 */
2469 mt_init_flags(mt, 0);
2470 mtree_test_erase(mt, 3);
2471 mtree_test_insert(mt, 22, (void *)0x2d);
2472 mtree_test_insert(mt, 15, (void *)0x1f);
2473 mtree_test_load(mt, 2);
2474 mtree_test_insert(mt, 1, (void *)0x3);
2475 mtree_test_insert(mt, 1, (void *)0x3);
2476 mtree_test_insert(mt, 5, (void *)0xb);
2477 mtree_test_erase(mt, 1);
2478 mtree_test_insert(mt, 1, (void *)0x3);
2479 mtree_test_insert(mt, 4, (void *)0x9);
2480 mtree_test_insert(mt, 1, (void *)0x3);
2481 mtree_test_erase(mt, 1);
2482 mtree_test_insert(mt, 2, (void *)0x5);
2483 mtree_test_insert(mt, 1, (void *)0x3);
2484 mtree_test_erase(mt, 3);
2485 mtree_test_insert(mt, 22, (void *)0x2d);
2486 mtree_test_insert(mt, 15, (void *)0x1f);
2487 mtree_test_insert(mt, 2, (void *)0x5);
2488 mtree_test_insert(mt, 1, (void *)0x3);
2489 mtree_test_insert(mt, 8, (void *)0x11);
2490 mtree_test_load(mt, 2);
2491 mtree_test_insert(mt, 1, (void *)0x3);
2492 mtree_test_insert(mt, 1, (void *)0x3);
2493 mtree_test_store(mt, 1, (void *)0x3);
2494 mtree_test_insert(mt, 5, (void *)0xb);
2495 mtree_test_erase(mt, 1);
2496 mtree_test_insert(mt, 1, (void *)0x3);
2497 mtree_test_insert(mt, 4, (void *)0x9);
2498 mtree_test_insert(mt, 1, (void *)0x3);
2499 mtree_test_erase(mt, 1);
2500 mtree_test_insert(mt, 2, (void *)0x5);
2501 mtree_test_insert(mt, 1, (void *)0x3);
2502 mtree_test_erase(mt, 3);
2503 mtree_test_insert(mt, 22, (void *)0x2d);
2504 mtree_test_insert(mt, 15, (void *)0x1f);
2505 mtree_test_insert(mt, 2, (void *)0x5);
2506 mtree_test_insert(mt, 1, (void *)0x3);
2507 mtree_test_insert(mt, 8, (void *)0x11);
2508 mtree_test_insert(mt, 12, (void *)0x19);
2509 mtree_test_erase(mt, 1);
2510 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2511 mtree_test_erase(mt, 62);
2512 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2513 mtree_test_insert(mt, 11, (void *)0x17);
2514 mtree_test_insert(mt, 3, (void *)0x7);
2515 mtree_test_insert(mt, 3, (void *)0x7);
2516 mtree_test_store(mt, 62, (void *)0x7d);
2517 mtree_test_erase(mt, 62);
2518 mtree_test_store_range(mt, 1, 15, (void *)0x3);
2519 mtree_test_erase(mt, 1);
2520 mtree_test_insert(mt, 22, (void *)0x2d);
2521 mtree_test_insert(mt, 12, (void *)0x19);
2522 mtree_test_erase(mt, 1);
2523 mtree_test_insert(mt, 3, (void *)0x7);
2524 mtree_test_store(mt, 62, (void *)0x7d);
2525 mtree_test_erase(mt, 62);
2526 mtree_test_insert(mt, 122, (void *)0xf5);
2527 mtree_test_store(mt, 3, (void *)0x7);
2528 mtree_test_insert(mt, 0, (void *)0x1);
2529 mtree_test_store_range(mt, 0, 1, (void *)0x1);
2530 mtree_test_insert(mt, 85, (void *)0xab);
2531 mtree_test_insert(mt, 72, (void *)0x91);
2532 mtree_test_insert(mt, 81, (void *)0xa3);
2533 mtree_test_insert(mt, 726, (void *)0x5ad);
2534 mtree_test_insert(mt, 0, (void *)0x1);
2535 mtree_test_insert(mt, 1, (void *)0x3);
2536 mtree_test_store(mt, 51, (void *)0x67);
2537 mtree_test_insert(mt, 611, (void *)0x4c7);
2538 mtree_test_insert(mt, 485, (void *)0x3cb);
2539 mtree_test_insert(mt, 1, (void *)0x3);
2540 mtree_test_erase(mt, 1);
2541 mtree_test_insert(mt, 0, (void *)0x1);
2542 mtree_test_insert(mt, 1, (void *)0x3);
2543 mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2544 mtree_test_load(mt, 1);
2545 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2546 mtree_test_insert(mt, 1, (void *)0x3);
2547 mtree_test_erase(mt, 1);
2548 mtree_test_load(mt, 53);
2549 mtree_test_load(mt, 1);
2550 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2551 mtree_test_insert(mt, 222, (void *)0x1bd);
2552 mtree_test_insert(mt, 485, (void *)0x3cb);
2553 mtree_test_insert(mt, 1, (void *)0x3);
2554 mtree_test_erase(mt, 1);
2555 mtree_test_load(mt, 0);
2556 mtree_test_insert(mt, 21, (void *)0x2b);
2557 mtree_test_insert(mt, 3, (void *)0x7);
2558 mtree_test_store(mt, 621, (void *)0x4db);
2559 mtree_test_insert(mt, 0, (void *)0x1);
2560 mtree_test_erase(mt, 5);
2561 mtree_test_insert(mt, 1, (void *)0x3);
2562 mtree_test_store(mt, 62, (void *)0x7d);
2563 mtree_test_erase(mt, 62);
2564 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2565 mtree_test_insert(mt, 22, (void *)0x2d);
2566 mtree_test_insert(mt, 12, (void *)0x19);
2567 mtree_test_erase(mt, 1);
2568 mtree_test_insert(mt, 1, (void *)0x3);
2569 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2570 mtree_test_erase(mt, 62);
2571 mtree_test_erase(mt, 1);
2572 mtree_test_load(mt, 1);
2573 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2574 mtree_test_insert(mt, 1, (void *)0x3);
2575 mtree_test_erase(mt, 1);
2576 mtree_test_load(mt, 53);
2577 mtree_test_load(mt, 1);
2578 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2579 mtree_test_insert(mt, 222, (void *)0x1bd);
2580 mtree_test_insert(mt, 485, (void *)0x3cb);
2581 mtree_test_insert(mt, 1, (void *)0x3);
2582 mtree_test_erase(mt, 1);
2583 mtree_test_insert(mt, 1, (void *)0x3);
2584 mtree_test_load(mt, 0);
2585 mtree_test_load(mt, 0);
2586 mtree_destroy(mt);
2587
2588 /*
2589 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2590 * data by overwriting it first - that way metadata is of no concern.
2591 */
2592 mt_init_flags(mt, 0);
2593 mtree_test_load(mt, 1);
2594 mtree_test_insert(mt, 102, (void *)0xcd);
2595 mtree_test_erase(mt, 2);
2596 mtree_test_erase(mt, 0);
2597 mtree_test_load(mt, 0);
2598 mtree_test_insert(mt, 4, (void *)0x9);
2599 mtree_test_insert(mt, 2, (void *)0x5);
2600 mtree_test_insert(mt, 110, (void *)0xdd);
2601 mtree_test_insert(mt, 1, (void *)0x3);
2602 mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2603 mtree_test_erase(mt, 2);
2604 mtree_test_store(mt, 0, (void *)0x1);
2605 mtree_test_store(mt, 112, (void *)0xe1);
2606 mtree_test_insert(mt, 21, (void *)0x2b);
2607 mtree_test_store(mt, 1, (void *)0x3);
2608 mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2609 mtree_test_store(mt, 2, (void *)0x5);
2610 mtree_test_load(mt, 22);
2611 mtree_test_erase(mt, 2);
2612 mtree_test_store(mt, 210, (void *)0x1a5);
2613 mtree_test_store_range(mt, 0, 2, (void *)0x1);
2614 mtree_test_store(mt, 2, (void *)0x5);
2615 mtree_test_erase(mt, 2);
2616 mtree_test_erase(mt, 22);
2617 mtree_test_erase(mt, 1);
2618 mtree_test_erase(mt, 2);
2619 mtree_test_store(mt, 0, (void *)0x1);
2620 mtree_test_load(mt, 112);
2621 mtree_test_insert(mt, 2, (void *)0x5);
2622 mtree_test_erase(mt, 2);
2623 mtree_test_store(mt, 1, (void *)0x3);
2624 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2625 mtree_test_erase(mt, 0);
2626 mtree_test_erase(mt, 2);
2627 mtree_test_store(mt, 2, (void *)0x5);
2628 mtree_test_erase(mt, 0);
2629 mtree_test_erase(mt, 2);
2630 mtree_test_store(mt, 0, (void *)0x1);
2631 mtree_test_store(mt, 0, (void *)0x1);
2632 mtree_test_erase(mt, 2);
2633 mtree_test_store(mt, 2, (void *)0x5);
2634 mtree_test_erase(mt, 2);
2635 mtree_test_insert(mt, 2, (void *)0x5);
2636 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2637 mtree_test_erase(mt, 0);
2638 mtree_test_erase(mt, 2);
2639 mtree_test_store(mt, 0, (void *)0x1);
2640 mtree_test_load(mt, 112);
2641 mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2642 mtree_test_store(mt, 2, (void *)0x5);
2643 mtree_test_load(mt, 110);
2644 mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2645 mtree_test_load(mt, 2);
2646 mtree_test_store(mt, 2, (void *)0x5);
2647 mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2648 mtree_test_erase(mt, 12);
2649 mtree_test_store(mt, 2, (void *)0x5);
2650 mtree_test_load(mt, 22);
2651 mtree_destroy(mt);
2652
2653
2654 /*
2655 * 8. When rebalancing or spanning_rebalance(), the max of the new node
2656 * may be set incorrectly to the final pivot and not the right max.
2657 * Fix by setting the left max to orig right max if the entire node is
2658 * consumed.
2659 */
2660 mt_init_flags(mt, 0);
2661 mtree_test_store(mt, 6, (void *)0xd);
2662 mtree_test_store(mt, 67, (void *)0x87);
2663 mtree_test_insert(mt, 15, (void *)0x1f);
2664 mtree_test_insert(mt, 6716, (void *)0x3479);
2665 mtree_test_store(mt, 61, (void *)0x7b);
2666 mtree_test_insert(mt, 13, (void *)0x1b);
2667 mtree_test_store(mt, 8, (void *)0x11);
2668 mtree_test_insert(mt, 1, (void *)0x3);
2669 mtree_test_load(mt, 0);
2670 mtree_test_erase(mt, 67167);
2671 mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2672 mtree_test_insert(mt, 6, (void *)0xd);
2673 mtree_test_erase(mt, 67);
2674 mtree_test_insert(mt, 1, (void *)0x3);
2675 mtree_test_erase(mt, 667167);
2676 mtree_test_insert(mt, 6, (void *)0xd);
2677 mtree_test_store(mt, 67, (void *)0x87);
2678 mtree_test_insert(mt, 5, (void *)0xb);
2679 mtree_test_erase(mt, 1);
2680 mtree_test_insert(mt, 6, (void *)0xd);
2681 mtree_test_erase(mt, 67);
2682 mtree_test_insert(mt, 15, (void *)0x1f);
2683 mtree_test_insert(mt, 67167, (void *)0x20cbf);
2684 mtree_test_insert(mt, 1, (void *)0x3);
2685 mtree_test_load(mt, 7);
2686 mtree_test_insert(mt, 16, (void *)0x21);
2687 mtree_test_insert(mt, 36, (void *)0x49);
2688 mtree_test_store(mt, 67, (void *)0x87);
2689 mtree_test_store(mt, 6, (void *)0xd);
2690 mtree_test_insert(mt, 367, (void *)0x2df);
2691 mtree_test_insert(mt, 115, (void *)0xe7);
2692 mtree_test_store(mt, 0, (void *)0x1);
2693 mtree_test_store_range(mt, 1, 3, (void *)0x3);
2694 mtree_test_store(mt, 1, (void *)0x3);
2695 mtree_test_erase(mt, 67167);
2696 mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2697 mtree_test_store(mt, 1, (void *)0x3);
2698 mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2699 mtree_test_load(mt, 67);
2700 mtree_test_insert(mt, 1, (void *)0x3);
2701 mtree_test_erase(mt, 67167);
2702 mtree_destroy(mt);
2703
2704 /*
2705 * 9. spanning store to the end of data caused an invalid metadata
2706 * length which resulted in a crash eventually.
2707 * Fix by checking if there is a value in pivot before incrementing the
2708 * metadata end in mab_mas_cp(). To ensure this doesn't happen again,
2709 * abstract the two locations this happens into a function called
2710 * mas_leaf_set_meta().
2711 */
2712 mt_init_flags(mt, 0);
2713 mtree_test_insert(mt, 21, (void *)0x2b);
2714 mtree_test_insert(mt, 12, (void *)0x19);
2715 mtree_test_insert(mt, 6, (void *)0xd);
2716 mtree_test_insert(mt, 8, (void *)0x11);
2717 mtree_test_insert(mt, 2, (void *)0x5);
2718 mtree_test_insert(mt, 91, (void *)0xb7);
2719 mtree_test_insert(mt, 18, (void *)0x25);
2720 mtree_test_insert(mt, 81, (void *)0xa3);
2721 mtree_test_store_range(mt, 0, 128, (void *)0x1);
2722 mtree_test_store(mt, 1, (void *)0x3);
2723 mtree_test_erase(mt, 8);
2724 mtree_test_insert(mt, 11, (void *)0x17);
2725 mtree_test_insert(mt, 8, (void *)0x11);
2726 mtree_test_insert(mt, 21, (void *)0x2b);
2727 mtree_test_insert(mt, 2, (void *)0x5);
120b1162
LH
2728 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2729 mtree_test_erase(mt, ULONG_MAX - 10);
e15e06a8
LH
2730 mtree_test_store_range(mt, 0, 281, (void *)0x1);
2731 mtree_test_erase(mt, 2);
2732 mtree_test_insert(mt, 1211, (void *)0x977);
2733 mtree_test_insert(mt, 111, (void *)0xdf);
2734 mtree_test_insert(mt, 13, (void *)0x1b);
2735 mtree_test_insert(mt, 211, (void *)0x1a7);
2736 mtree_test_insert(mt, 11, (void *)0x17);
2737 mtree_test_insert(mt, 5, (void *)0xb);
2738 mtree_test_insert(mt, 1218, (void *)0x985);
2739 mtree_test_insert(mt, 61, (void *)0x7b);
2740 mtree_test_store(mt, 1, (void *)0x3);
2741 mtree_test_insert(mt, 121, (void *)0xf3);
2742 mtree_test_insert(mt, 8, (void *)0x11);
2743 mtree_test_insert(mt, 21, (void *)0x2b);
2744 mtree_test_insert(mt, 2, (void *)0x5);
120b1162
LH
2745 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2746 mtree_test_erase(mt, ULONG_MAX - 10);
e15e06a8 2747}
120b1162
LH
2748
2749/* duplicate the tree with a specific gap */
eaf9790d 2750static noinline void __init check_dup_gaps(struct maple_tree *mt,
e15e06a8
LH
2751 unsigned long nr_entries, bool zero_start,
2752 unsigned long gap)
2753{
2754 unsigned long i = 0;
2755 struct maple_tree newmt;
2756 int ret;
2757 void *tmp;
2758 MA_STATE(mas, mt, 0, 0);
2759 MA_STATE(newmas, &newmt, 0, 0);
099d7439
LH
2760 struct rw_semaphore newmt_lock;
2761
2762 init_rwsem(&newmt_lock);
2763 mt_set_external_lock(&newmt, &newmt_lock);
e15e06a8 2764
e15e06a8
LH
2765 if (!zero_start)
2766 i = 1;
2767
2768 mt_zero_nr_tallocated();
2769 for (; i <= nr_entries; i++)
2770 mtree_store_range(mt, i*10, (i+1)*10 - gap,
2771 xa_mk_value(i), GFP_KERNEL);
2772
099d7439 2773 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
e15e06a8 2774 mt_set_non_kernel(99999);
099d7439 2775 down_write(&newmt_lock);
e15e06a8
LH
2776 ret = mas_expected_entries(&newmas, nr_entries);
2777 mt_set_non_kernel(0);
2778 MT_BUG_ON(mt, ret != 0);
2779
120b1162 2780 rcu_read_lock();
e15e06a8
LH
2781 mas_for_each(&mas, tmp, ULONG_MAX) {
2782 newmas.index = mas.index;
2783 newmas.last = mas.last;
2784 mas_store(&newmas, tmp);
2785 }
120b1162 2786 rcu_read_unlock();
e15e06a8 2787 mas_destroy(&newmas);
120b1162 2788
099d7439
LH
2789 __mt_destroy(&newmt);
2790 up_write(&newmt_lock);
e15e06a8
LH
2791}
2792
120b1162 2793/* Duplicate many sizes of trees. Mainly to test expected entry values */
eaf9790d 2794static noinline void __init check_dup(struct maple_tree *mt)
e15e06a8
LH
2795{
2796 int i;
120b1162 2797 int big_start = 100010;
e15e06a8
LH
2798
2799 /* Check with a value at zero */
2800 for (i = 10; i < 1000; i++) {
2801 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2802 check_dup_gaps(mt, i, true, 5);
2803 mtree_destroy(mt);
120b1162 2804 rcu_barrier();
e15e06a8
LH
2805 }
2806
120b1162
LH
2807 cond_resched();
2808 mt_cache_shrink();
e15e06a8
LH
2809 /* Check with a value at zero, no gap */
2810 for (i = 1000; i < 2000; i++) {
2811 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2812 check_dup_gaps(mt, i, true, 0);
2813 mtree_destroy(mt);
120b1162 2814 rcu_barrier();
e15e06a8
LH
2815 }
2816
120b1162
LH
2817 cond_resched();
2818 mt_cache_shrink();
e15e06a8 2819 /* Check with a value at zero and unreasonably large */
120b1162 2820 for (i = big_start; i < big_start + 10; i++) {
e15e06a8
LH
2821 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2822 check_dup_gaps(mt, i, true, 5);
2823 mtree_destroy(mt);
120b1162 2824 rcu_barrier();
e15e06a8
LH
2825 }
2826
120b1162
LH
2827 cond_resched();
2828 mt_cache_shrink();
e15e06a8
LH
2829 /* Small to medium size not starting at zero*/
2830 for (i = 200; i < 1000; i++) {
2831 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2832 check_dup_gaps(mt, i, false, 5);
2833 mtree_destroy(mt);
120b1162 2834 rcu_barrier();
e15e06a8
LH
2835 }
2836
120b1162
LH
2837 cond_resched();
2838 mt_cache_shrink();
e15e06a8 2839 /* Unreasonably large not starting at zero*/
120b1162 2840 for (i = big_start; i < big_start + 10; i++) {
e15e06a8
LH
2841 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2842 check_dup_gaps(mt, i, false, 5);
2843 mtree_destroy(mt);
120b1162
LH
2844 rcu_barrier();
2845 cond_resched();
2846 mt_cache_shrink();
e15e06a8
LH
2847 }
2848
2849 /* Check non-allocation tree not starting at zero */
2850 for (i = 1500; i < 3000; i++) {
2851 mt_init_flags(mt, 0);
2852 check_dup_gaps(mt, i, false, 5);
2853 mtree_destroy(mt);
120b1162
LH
2854 rcu_barrier();
2855 cond_resched();
2856 if (i % 2 == 0)
2857 mt_cache_shrink();
e15e06a8
LH
2858 }
2859
120b1162 2860 mt_cache_shrink();
e15e06a8
LH
2861 /* Check non-allocation tree starting at zero */
2862 for (i = 200; i < 1000; i++) {
2863 mt_init_flags(mt, 0);
2864 check_dup_gaps(mt, i, true, 5);
2865 mtree_destroy(mt);
120b1162
LH
2866 rcu_barrier();
2867 cond_resched();
e15e06a8
LH
2868 }
2869
120b1162 2870 mt_cache_shrink();
e15e06a8 2871 /* Unreasonably large */
120b1162 2872 for (i = big_start + 5; i < big_start + 10; i++) {
e15e06a8
LH
2873 mt_init_flags(mt, 0);
2874 check_dup_gaps(mt, i, true, 5);
2875 mtree_destroy(mt);
120b1162
LH
2876 rcu_barrier();
2877 mt_cache_shrink();
2878 cond_resched();
e15e06a8 2879 }
e15e06a8
LH
2880}
2881
eaf9790d 2882static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
c5651b31
LH
2883{
2884 int i = 50;
2885 MA_STATE(mas, mt, 0, 0);
2886
2887 mt_set_non_kernel(9999);
2888 mas_lock(&mas);
2889 do {
2890 mas_set_range(&mas, i*10, i*10+9);
2891 mas_store(&mas, check_bnode_min_spanning);
2892 } while (i--);
2893
2894 mas_set_range(&mas, 240, 509);
2895 mas_store(&mas, NULL);
2896 mas_unlock(&mas);
2897 mas_destroy(&mas);
2898 mt_set_non_kernel(0);
2899}
2900
eaf9790d 2901static noinline void __init check_empty_area_window(struct maple_tree *mt)
7327e811
LH
2902{
2903 unsigned long i, nr_entries = 20;
2904 MA_STATE(mas, mt, 0, 0);
2905
2906 for (i = 1; i <= nr_entries; i++)
2907 mtree_store_range(mt, i*10, i*10 + 9,
2908 xa_mk_value(i), GFP_KERNEL);
2909
2910 /* Create another hole besides the one at 0 */
2911 mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2912
2913 /* Check lower bounds that don't fit */
2914 rcu_read_lock();
2915 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2916
2917 mas_reset(&mas);
2918 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2919
2920 /* Check lower bound that does fit */
2921 mas_reset(&mas);
2922 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2923 MT_BUG_ON(mt, mas.index != 5);
2924 MT_BUG_ON(mt, mas.last != 9);
2925 rcu_read_unlock();
2926
2927 /* Check one gap that doesn't fit and one that does */
2928 rcu_read_lock();
2929 mas_reset(&mas);
2930 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2931 MT_BUG_ON(mt, mas.index != 161);
2932 MT_BUG_ON(mt, mas.last != 169);
2933
2934 /* Check one gap that does fit above the min */
2935 mas_reset(&mas);
2936 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2937 MT_BUG_ON(mt, mas.index != 216);
2938 MT_BUG_ON(mt, mas.last != 218);
2939
2940 /* Check size that doesn't fit any gap */
2941 mas_reset(&mas);
2942 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2943
2944 /*
2945 * Check size that doesn't fit the lower end of the window but
2946 * does fit the gap
2947 */
2948 mas_reset(&mas);
2949 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2950
2951 /*
2952 * Check size that doesn't fit the upper end of the window but
2953 * does fit the gap
2954 */
2955 mas_reset(&mas);
2956 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2957
2958 /* Check mas_empty_area forward */
2959 mas_reset(&mas);
2960 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2961 MT_BUG_ON(mt, mas.index != 0);
2962 MT_BUG_ON(mt, mas.last != 8);
2963
2964 mas_reset(&mas);
2965 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2966 MT_BUG_ON(mt, mas.index != 0);
2967 MT_BUG_ON(mt, mas.last != 3);
2968
2969 mas_reset(&mas);
2970 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2971
2972 mas_reset(&mas);
2973 MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2974
2975 mas_reset(&mas);
17e7436b 2976 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
7327e811
LH
2977
2978 mas_reset(&mas);
2979 mas_empty_area(&mas, 100, 165, 3);
2980
2981 mas_reset(&mas);
2982 MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2983 rcu_read_unlock();
2984}
2985
eaf9790d 2986static noinline void __init check_empty_area_fill(struct maple_tree *mt)
4bd6dded
LH
2987{
2988 const unsigned long max = 0x25D78000;
2989 unsigned long size;
2990 int loop, shift;
2991 MA_STATE(mas, mt, 0, 0);
2992
2993 mt_set_non_kernel(99999);
2994 for (shift = 12; shift <= 16; shift++) {
2995 loop = 5000;
2996 size = 1 << shift;
2997 while (loop--) {
2998 mas_set(&mas, 0);
2999 mas_lock(&mas);
3000 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
3001 MT_BUG_ON(mt, mas.last != mas.index + size - 1);
3002 mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
3003 mas_unlock(&mas);
3004 mas_reset(&mas);
3005 }
3006 }
3007
3008 /* No space left. */
3009 size = 0x1000;
3010 rcu_read_lock();
3011 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
3012 rcu_read_unlock();
3013
3014 /* Fill a depth 3 node to the maximum */
3015 for (unsigned long i = 629440511; i <= 629440800; i += 6)
3016 mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
3017 /* Make space in the second-last depth 4 node */
3018 mtree_erase(mt, 631668735);
3019 /* Make space in the last depth 4 node */
3020 mtree_erase(mt, 629506047);
3021 mas_reset(&mas);
3022 /* Search from just after the gap in the second-last depth 4 */
3023 rcu_read_lock();
3024 MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
3025 rcu_read_unlock();
3026 mt_set_non_kernel(0);
3027}
3028
eb2e817f
LH
3029/*
3030 * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
3031 *
3032 * The table below shows the single entry tree (0-0 pointer) and normal tree
3033 * with nodes.
3034 *
3035 * Function ENTRY Start Result index & last
3036 * ┬ ┬ ┬ ┬ ┬
3037 * │ │ │ │ └─ the final range
3038 * │ │ │ └─ The node value after execution
3039 * │ │ └─ The node value before execution
3040 * │ └─ If the entry exists or does not exists (DNE)
3041 * └─ The function name
3042 *
3043 * Function ENTRY Start Result index & last
3044 * mas_next()
3045 * - after last
3046 * Single entry tree at 0-0
3047 * ------------------------
3048 * DNE MAS_START MAS_NONE 1 - oo
3049 * DNE MAS_PAUSE MAS_NONE 1 - oo
3050 * DNE MAS_ROOT MAS_NONE 1 - oo
3051 * when index = 0
3052 * DNE MAS_NONE MAS_ROOT 0
3053 * when index > 0
3054 * DNE MAS_NONE MAS_NONE 1 - oo
3055 *
3056 * Normal tree
3057 * -----------
3058 * exists MAS_START active range
3059 * DNE MAS_START active set to last range
3060 * exists MAS_PAUSE active range
3061 * DNE MAS_PAUSE active set to last range
3062 * exists MAS_NONE active range
3063 * exists active active range
3064 * DNE active active set to last range
a8091f03 3065 * ERANGE active MAS_OVERFLOW last range
eb2e817f
LH
3066 *
3067 * Function ENTRY Start Result index & last
3068 * mas_prev()
3069 * - before index
3070 * Single entry tree at 0-0
3071 * ------------------------
3072 * if index > 0
3073 * exists MAS_START MAS_ROOT 0
3074 * exists MAS_PAUSE MAS_ROOT 0
3075 * exists MAS_NONE MAS_ROOT 0
3076 *
3077 * if index == 0
3078 * DNE MAS_START MAS_NONE 0
3079 * DNE MAS_PAUSE MAS_NONE 0
3080 * DNE MAS_NONE MAS_NONE 0
3081 * DNE MAS_ROOT MAS_NONE 0
3082 *
3083 * Normal tree
3084 * -----------
3085 * exists MAS_START active range
3086 * DNE MAS_START active set to min
3087 * exists MAS_PAUSE active range
3088 * DNE MAS_PAUSE active set to min
3089 * exists MAS_NONE active range
3090 * DNE MAS_NONE MAS_NONE set to min
3091 * any MAS_ROOT MAS_NONE 0
3092 * exists active active range
3093 * DNE active active last range
a8091f03 3094 * ERANGE active MAS_UNDERFLOW last range
eb2e817f
LH
3095 *
3096 * Function ENTRY Start Result index & last
3097 * mas_find()
3098 * - at index or next
3099 * Single entry tree at 0-0
3100 * ------------------------
3101 * if index > 0
3102 * DNE MAS_START MAS_NONE 0
3103 * DNE MAS_PAUSE MAS_NONE 0
3104 * DNE MAS_ROOT MAS_NONE 0
a8091f03 3105 * DNE MAS_NONE MAS_NONE 1
eb2e817f
LH
3106 * if index == 0
3107 * exists MAS_START MAS_ROOT 0
3108 * exists MAS_PAUSE MAS_ROOT 0
3109 * exists MAS_NONE MAS_ROOT 0
3110 *
3111 * Normal tree
3112 * -----------
3113 * exists MAS_START active range
3114 * DNE MAS_START active set to max
3115 * exists MAS_PAUSE active range
3116 * DNE MAS_PAUSE active set to max
a8091f03 3117 * exists MAS_NONE active range (start at last)
eb2e817f
LH
3118 * exists active active range
3119 * DNE active active last range (max < last)
3120 *
3121 * Function ENTRY Start Result index & last
3122 * mas_find_rev()
3123 * - at index or before
3124 * Single entry tree at 0-0
3125 * ------------------------
3126 * if index > 0
3127 * exists MAS_START MAS_ROOT 0
3128 * exists MAS_PAUSE MAS_ROOT 0
3129 * exists MAS_NONE MAS_ROOT 0
3130 * if index == 0
3131 * DNE MAS_START MAS_NONE 0
3132 * DNE MAS_PAUSE MAS_NONE 0
3133 * DNE MAS_NONE MAS_NONE 0
3134 * DNE MAS_ROOT MAS_NONE 0
3135 *
3136 * Normal tree
3137 * -----------
3138 * exists MAS_START active range
3139 * DNE MAS_START active set to min
3140 * exists MAS_PAUSE active range
3141 * DNE MAS_PAUSE active set to min
a8091f03 3142 * exists MAS_NONE active range (start at index)
eb2e817f
LH
3143 * exists active active range
3144 * DNE active active last range (min > index)
3145 *
3146 * Function ENTRY Start Result index & last
3147 * mas_walk()
3148 * - Look up index
3149 * Single entry tree at 0-0
3150 * ------------------------
3151 * if index > 0
3152 * DNE MAS_START MAS_ROOT 1 - oo
3153 * DNE MAS_PAUSE MAS_ROOT 1 - oo
3154 * DNE MAS_NONE MAS_ROOT 1 - oo
3155 * DNE MAS_ROOT MAS_ROOT 1 - oo
3156 * if index == 0
3157 * exists MAS_START MAS_ROOT 0
3158 * exists MAS_PAUSE MAS_ROOT 0
3159 * exists MAS_NONE MAS_ROOT 0
3160 * exists MAS_ROOT MAS_ROOT 0
3161 *
3162 * Normal tree
3163 * -----------
3164 * exists MAS_START active range
3165 * DNE MAS_START active range of NULL
3166 * exists MAS_PAUSE active range
3167 * DNE MAS_PAUSE active range of NULL
3168 * exists MAS_NONE active range
3169 * DNE MAS_NONE active range of NULL
3170 * exists active active range
3171 * DNE active active range of NULL
3172 */
3173
eb2e817f
LH
3174static noinline void __init check_state_handling(struct maple_tree *mt)
3175{
3176 MA_STATE(mas, mt, 0, 0);
3177 void *entry, *ptr = (void *) 0x1234500;
3178 void *ptr2 = &ptr;
3179 void *ptr3 = &ptr2;
3180
3181 /* Check MAS_ROOT First */
3182 mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
3183
3184 mas_lock(&mas);
a8091f03 3185 /* prev: Start -> underflow*/
eb2e817f
LH
3186 entry = mas_prev(&mas, 0);
3187 MT_BUG_ON(mt, entry != NULL);
067311d3 3188 MT_BUG_ON(mt, mas.status != ma_underflow);
eb2e817f
LH
3189
3190 /* prev: Start -> root */
3191 mas_set(&mas, 10);
3192 entry = mas_prev(&mas, 0);
3193 MT_BUG_ON(mt, entry != ptr);
3194 MT_BUG_ON(mt, mas.index != 0);
3195 MT_BUG_ON(mt, mas.last != 0);
067311d3 3196 MT_BUG_ON(mt, mas.status != ma_root);
eb2e817f
LH
3197
3198 /* prev: pause -> root */
3199 mas_set(&mas, 10);
3200 mas_pause(&mas);
3201 entry = mas_prev(&mas, 0);
3202 MT_BUG_ON(mt, entry != ptr);
3203 MT_BUG_ON(mt, mas.index != 0);
3204 MT_BUG_ON(mt, mas.last != 0);
067311d3 3205 MT_BUG_ON(mt, mas.status != ma_root);
eb2e817f
LH
3206
3207 /* next: start -> none */
3208 mas_set(&mas, 0);
3209 entry = mas_next(&mas, ULONG_MAX);
3210 MT_BUG_ON(mt, mas.index != 1);
3211 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3212 MT_BUG_ON(mt, entry != NULL);
067311d3 3213 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f 3214
a8091f03 3215 /* next: start -> none*/
eb2e817f
LH
3216 mas_set(&mas, 10);
3217 entry = mas_next(&mas, ULONG_MAX);
3218 MT_BUG_ON(mt, mas.index != 1);
3219 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3220 MT_BUG_ON(mt, entry != NULL);
067311d3 3221 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f
LH
3222
3223 /* find: start -> root */
3224 mas_set(&mas, 0);
3225 entry = mas_find(&mas, ULONG_MAX);
3226 MT_BUG_ON(mt, entry != ptr);
3227 MT_BUG_ON(mt, mas.index != 0);
3228 MT_BUG_ON(mt, mas.last != 0);
067311d3 3229 MT_BUG_ON(mt, mas.status != ma_root);
eb2e817f
LH
3230
3231 /* find: root -> none */
3232 entry = mas_find(&mas, ULONG_MAX);
3233 MT_BUG_ON(mt, entry != NULL);
3234 MT_BUG_ON(mt, mas.index != 1);
3235 MT_BUG_ON(mt, mas.last != ULONG_MAX);
067311d3 3236 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f
LH
3237
3238 /* find: none -> none */
3239 entry = mas_find(&mas, ULONG_MAX);
3240 MT_BUG_ON(mt, entry != NULL);
3241 MT_BUG_ON(mt, mas.index != 1);
3242 MT_BUG_ON(mt, mas.last != ULONG_MAX);
067311d3 3243 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f
LH
3244
3245 /* find: start -> none */
3246 mas_set(&mas, 10);
3247 entry = mas_find(&mas, ULONG_MAX);
3248 MT_BUG_ON(mt, entry != NULL);
3249 MT_BUG_ON(mt, mas.index != 1);
3250 MT_BUG_ON(mt, mas.last != ULONG_MAX);
067311d3 3251 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f
LH
3252
3253 /* find_rev: none -> root */
3254 entry = mas_find_rev(&mas, 0);
3255 MT_BUG_ON(mt, entry != ptr);
3256 MT_BUG_ON(mt, mas.index != 0);
3257 MT_BUG_ON(mt, mas.last != 0);
067311d3 3258 MT_BUG_ON(mt, mas.status != ma_root);
eb2e817f
LH
3259
3260 /* find_rev: start -> root */
3261 mas_set(&mas, 0);
3262 entry = mas_find_rev(&mas, 0);
3263 MT_BUG_ON(mt, entry != ptr);
3264 MT_BUG_ON(mt, mas.index != 0);
3265 MT_BUG_ON(mt, mas.last != 0);
067311d3 3266 MT_BUG_ON(mt, mas.status != ma_root);
eb2e817f
LH
3267
3268 /* find_rev: root -> none */
3269 entry = mas_find_rev(&mas, 0);
3270 MT_BUG_ON(mt, entry != NULL);
3271 MT_BUG_ON(mt, mas.index != 0);
3272 MT_BUG_ON(mt, mas.last != 0);
067311d3 3273 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f
LH
3274
3275 /* find_rev: none -> none */
3276 entry = mas_find_rev(&mas, 0);
3277 MT_BUG_ON(mt, entry != NULL);
3278 MT_BUG_ON(mt, mas.index != 0);
3279 MT_BUG_ON(mt, mas.last != 0);
067311d3 3280 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f
LH
3281
3282 /* find_rev: start -> root */
3283 mas_set(&mas, 10);
3284 entry = mas_find_rev(&mas, 0);
3285 MT_BUG_ON(mt, entry != ptr);
3286 MT_BUG_ON(mt, mas.index != 0);
3287 MT_BUG_ON(mt, mas.last != 0);
067311d3 3288 MT_BUG_ON(mt, mas.status != ma_root);
eb2e817f
LH
3289
3290 /* walk: start -> none */
3291 mas_set(&mas, 10);
3292 entry = mas_walk(&mas);
3293 MT_BUG_ON(mt, entry != NULL);
3294 MT_BUG_ON(mt, mas.index != 1);
3295 MT_BUG_ON(mt, mas.last != ULONG_MAX);
067311d3 3296 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f
LH
3297
3298 /* walk: pause -> none*/
3299 mas_set(&mas, 10);
3300 mas_pause(&mas);
3301 entry = mas_walk(&mas);
3302 MT_BUG_ON(mt, entry != NULL);
3303 MT_BUG_ON(mt, mas.index != 1);
3304 MT_BUG_ON(mt, mas.last != ULONG_MAX);
067311d3 3305 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f
LH
3306
3307 /* walk: none -> none */
3308 mas.index = mas.last = 10;
3309 entry = mas_walk(&mas);
3310 MT_BUG_ON(mt, entry != NULL);
3311 MT_BUG_ON(mt, mas.index != 1);
3312 MT_BUG_ON(mt, mas.last != ULONG_MAX);
067311d3 3313 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f
LH
3314
3315 /* walk: none -> none */
3316 entry = mas_walk(&mas);
3317 MT_BUG_ON(mt, entry != NULL);
3318 MT_BUG_ON(mt, mas.index != 1);
3319 MT_BUG_ON(mt, mas.last != ULONG_MAX);
067311d3 3320 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f
LH
3321
3322 /* walk: start -> root */
3323 mas_set(&mas, 0);
3324 entry = mas_walk(&mas);
3325 MT_BUG_ON(mt, entry != ptr);
3326 MT_BUG_ON(mt, mas.index != 0);
3327 MT_BUG_ON(mt, mas.last != 0);
067311d3 3328 MT_BUG_ON(mt, mas.status != ma_root);
eb2e817f
LH
3329
3330 /* walk: pause -> root */
3331 mas_set(&mas, 0);
3332 mas_pause(&mas);
3333 entry = mas_walk(&mas);
3334 MT_BUG_ON(mt, entry != ptr);
3335 MT_BUG_ON(mt, mas.index != 0);
3336 MT_BUG_ON(mt, mas.last != 0);
067311d3 3337 MT_BUG_ON(mt, mas.status != ma_root);
eb2e817f
LH
3338
3339 /* walk: none -> root */
067311d3 3340 mas.status = ma_none;
eb2e817f
LH
3341 entry = mas_walk(&mas);
3342 MT_BUG_ON(mt, entry != ptr);
3343 MT_BUG_ON(mt, mas.index != 0);
3344 MT_BUG_ON(mt, mas.last != 0);
067311d3 3345 MT_BUG_ON(mt, mas.status != ma_root);
eb2e817f
LH
3346
3347 /* walk: root -> root */
3348 entry = mas_walk(&mas);
3349 MT_BUG_ON(mt, entry != ptr);
3350 MT_BUG_ON(mt, mas.index != 0);
3351 MT_BUG_ON(mt, mas.last != 0);
067311d3 3352 MT_BUG_ON(mt, mas.status != ma_root);
eb2e817f
LH
3353
3354 /* walk: root -> none */
3355 mas_set(&mas, 10);
3356 entry = mas_walk(&mas);
3357 MT_BUG_ON(mt, entry != NULL);
3358 MT_BUG_ON(mt, mas.index != 1);
3359 MT_BUG_ON(mt, mas.last != ULONG_MAX);
067311d3 3360 MT_BUG_ON(mt, mas.status != ma_none);
eb2e817f
LH
3361
3362 /* walk: none -> root */
3363 mas.index = mas.last = 0;
3364 entry = mas_walk(&mas);
3365 MT_BUG_ON(mt, entry != ptr);
3366 MT_BUG_ON(mt, mas.index != 0);
3367 MT_BUG_ON(mt, mas.last != 0);
067311d3 3368 MT_BUG_ON(mt, mas.status != ma_root);
eb2e817f
LH
3369
3370 mas_unlock(&mas);
3371
3372 /* Check when there is an actual node */
3373 mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3374 mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3375 mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3376 mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3377
3378 mas_lock(&mas);
3379
3380 /* next: start ->active */
3381 mas_set(&mas, 0);
3382 entry = mas_next(&mas, ULONG_MAX);
3383 MT_BUG_ON(mt, entry != ptr);
3384 MT_BUG_ON(mt, mas.index != 0x1000);
3385 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3386 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3387
3388 /* next: pause ->active */
3389 mas_set(&mas, 0);
3390 mas_pause(&mas);
3391 entry = mas_next(&mas, ULONG_MAX);
3392 MT_BUG_ON(mt, entry != ptr);
3393 MT_BUG_ON(mt, mas.index != 0x1000);
3394 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3395 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3396
3397 /* next: none ->active */
3398 mas.index = mas.last = 0;
3399 mas.offset = 0;
067311d3 3400 mas.status = ma_none;
eb2e817f
LH
3401 entry = mas_next(&mas, ULONG_MAX);
3402 MT_BUG_ON(mt, entry != ptr);
3403 MT_BUG_ON(mt, mas.index != 0x1000);
3404 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3405 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f 3406
067311d3
LH
3407 /* next:active ->active (spanning limit) */
3408 entry = mas_next(&mas, 0x2100);
eb2e817f
LH
3409 MT_BUG_ON(mt, entry != ptr2);
3410 MT_BUG_ON(mt, mas.index != 0x2000);
3411 MT_BUG_ON(mt, mas.last != 0x2500);
067311d3 3412 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f 3413
067311d3 3414 /* next:active -> overflow (limit reached) beyond data */
eb2e817f
LH
3415 entry = mas_next(&mas, 0x2999);
3416 MT_BUG_ON(mt, entry != NULL);
3417 MT_BUG_ON(mt, mas.index != 0x2501);
3418 MT_BUG_ON(mt, mas.last != 0x2fff);
067311d3 3419 MT_BUG_ON(mt, !mas_is_overflow(&mas));
eb2e817f 3420
067311d3 3421 /* next:overflow -> active (limit changed) */
eb2e817f
LH
3422 entry = mas_next(&mas, ULONG_MAX);
3423 MT_BUG_ON(mt, entry != ptr3);
3424 MT_BUG_ON(mt, mas.index != 0x3000);
3425 MT_BUG_ON(mt, mas.last != 0x3500);
067311d3 3426 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f 3427
067311d3 3428 /* next:active -> overflow (limit reached) */
a8091f03
LH
3429 entry = mas_next(&mas, ULONG_MAX);
3430 MT_BUG_ON(mt, entry != NULL);
3431 MT_BUG_ON(mt, mas.index != 0x3501);
3432 MT_BUG_ON(mt, mas.last != ULONG_MAX);
067311d3 3433 MT_BUG_ON(mt, !mas_is_overflow(&mas));
a8091f03
LH
3434
3435 /* next:overflow -> overflow */
3436 entry = mas_next(&mas, ULONG_MAX);
3437 MT_BUG_ON(mt, entry != NULL);
3438 MT_BUG_ON(mt, mas.index != 0x3501);
3439 MT_BUG_ON(mt, mas.last != ULONG_MAX);
067311d3 3440 MT_BUG_ON(mt, !mas_is_overflow(&mas));
a8091f03
LH
3441
3442 /* prev:overflow -> active */
3443 entry = mas_prev(&mas, 0);
3444 MT_BUG_ON(mt, entry != ptr3);
3445 MT_BUG_ON(mt, mas.index != 0x3000);
3446 MT_BUG_ON(mt, mas.last != 0x3500);
067311d3 3447 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3448
3449 /* next: none -> active, skip value at location */
3450 mas_set(&mas, 0);
3451 entry = mas_next(&mas, ULONG_MAX);
067311d3 3452 mas.status = ma_none;
eb2e817f
LH
3453 mas.offset = 0;
3454 entry = mas_next(&mas, ULONG_MAX);
3455 MT_BUG_ON(mt, entry != ptr2);
3456 MT_BUG_ON(mt, mas.index != 0x2000);
3457 MT_BUG_ON(mt, mas.last != 0x2500);
067311d3 3458 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3459
3460 /* prev:active ->active */
3461 entry = mas_prev(&mas, 0);
3462 MT_BUG_ON(mt, entry != ptr);
3463 MT_BUG_ON(mt, mas.index != 0x1000);
3464 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3465 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f 3466
067311d3
LH
3467 /* prev:active -> underflow (span limit) */
3468 mas_next(&mas, ULONG_MAX);
3469 entry = mas_prev(&mas, 0x1200);
3470 MT_BUG_ON(mt, entry != ptr);
3471 MT_BUG_ON(mt, mas.index != 0x1000);
3472 MT_BUG_ON(mt, mas.last != 0x1500);
3473 MT_BUG_ON(mt, !mas_is_active(&mas)); /* spanning limit */
3474 entry = mas_prev(&mas, 0x1200); /* underflow */
3475 MT_BUG_ON(mt, entry != NULL);
3476 MT_BUG_ON(mt, mas.index != 0x1000);
3477 MT_BUG_ON(mt, mas.last != 0x1500);
3478 MT_BUG_ON(mt, !mas_is_underflow(&mas));
3479
3480 /* prev:underflow -> underflow (lower limit) spanning end range */
a8091f03
LH
3481 entry = mas_prev(&mas, 0x0100);
3482 MT_BUG_ON(mt, entry != NULL);
3483 MT_BUG_ON(mt, mas.index != 0);
3484 MT_BUG_ON(mt, mas.last != 0x0FFF);
067311d3 3485 MT_BUG_ON(mt, !mas_is_underflow(&mas));
a8091f03 3486
067311d3 3487 /* prev:underflow -> underflow */
a8091f03
LH
3488 entry = mas_prev(&mas, 0);
3489 MT_BUG_ON(mt, entry != NULL);
3490 MT_BUG_ON(mt, mas.index != 0);
3491 MT_BUG_ON(mt, mas.last != 0x0FFF);
067311d3 3492 MT_BUG_ON(mt, !mas_is_underflow(&mas));
a8091f03
LH
3493
3494 /* prev:underflow -> underflow */
eb2e817f
LH
3495 entry = mas_prev(&mas, 0);
3496 MT_BUG_ON(mt, entry != NULL);
3497 MT_BUG_ON(mt, mas.index != 0);
3498 MT_BUG_ON(mt, mas.last != 0x0FFF);
067311d3 3499 MT_BUG_ON(mt, !mas_is_underflow(&mas));
a8091f03
LH
3500
3501 /* next:underflow -> active */
3502 entry = mas_next(&mas, ULONG_MAX);
3503 MT_BUG_ON(mt, entry != ptr);
3504 MT_BUG_ON(mt, mas.index != 0x1000);
3505 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3506 MT_BUG_ON(mt, !mas_is_active(&mas));
a8091f03
LH
3507
3508 /* prev:first value -> underflow */
3509 entry = mas_prev(&mas, 0x1000);
3510 MT_BUG_ON(mt, entry != NULL);
3511 MT_BUG_ON(mt, mas.index != 0x1000);
3512 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3513 MT_BUG_ON(mt, !mas_is_underflow(&mas));
a8091f03
LH
3514
3515 /* find:underflow -> first value */
3516 entry = mas_find(&mas, ULONG_MAX);
3517 MT_BUG_ON(mt, entry != ptr);
3518 MT_BUG_ON(mt, mas.index != 0x1000);
3519 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3520 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3521
3522 /* prev: pause ->active */
3523 mas_set(&mas, 0x3600);
3524 entry = mas_prev(&mas, 0);
3525 MT_BUG_ON(mt, entry != ptr3);
3526 mas_pause(&mas);
3527 entry = mas_prev(&mas, 0);
3528 MT_BUG_ON(mt, entry != ptr2);
3529 MT_BUG_ON(mt, mas.index != 0x2000);
3530 MT_BUG_ON(mt, mas.last != 0x2500);
067311d3 3531 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f 3532
067311d3 3533 /* prev:active -> underflow spanning min */
eb2e817f
LH
3534 entry = mas_prev(&mas, 0x1600);
3535 MT_BUG_ON(mt, entry != NULL);
3536 MT_BUG_ON(mt, mas.index != 0x1501);
3537 MT_BUG_ON(mt, mas.last != 0x1FFF);
067311d3 3538 MT_BUG_ON(mt, !mas_is_underflow(&mas));
eb2e817f 3539
a8091f03 3540 /* prev: active ->active, continue */
eb2e817f
LH
3541 entry = mas_prev(&mas, 0);
3542 MT_BUG_ON(mt, entry != ptr);
3543 MT_BUG_ON(mt, mas.index != 0x1000);
3544 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3545 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3546
3547 /* find: start ->active */
3548 mas_set(&mas, 0);
3549 entry = mas_find(&mas, ULONG_MAX);
3550 MT_BUG_ON(mt, entry != ptr);
3551 MT_BUG_ON(mt, mas.index != 0x1000);
3552 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3553 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3554
3555 /* find: pause ->active */
3556 mas_set(&mas, 0);
3557 mas_pause(&mas);
3558 entry = mas_find(&mas, ULONG_MAX);
3559 MT_BUG_ON(mt, entry != ptr);
3560 MT_BUG_ON(mt, mas.index != 0x1000);
3561 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3562 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3563
3564 /* find: start ->active on value */;
3565 mas_set(&mas, 1200);
3566 entry = mas_find(&mas, ULONG_MAX);
3567 MT_BUG_ON(mt, entry != ptr);
3568 MT_BUG_ON(mt, mas.index != 0x1000);
3569 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3570 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3571
3572 /* find:active ->active */
3573 entry = mas_find(&mas, ULONG_MAX);
3574 MT_BUG_ON(mt, entry != ptr2);
3575 MT_BUG_ON(mt, mas.index != 0x2000);
3576 MT_BUG_ON(mt, mas.last != 0x2500);
067311d3 3577 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3578
3579
3580 /* find:active -> active (NULL)*/
3581 entry = mas_find(&mas, 0x2700);
3582 MT_BUG_ON(mt, entry != NULL);
3583 MT_BUG_ON(mt, mas.index != 0x2501);
3584 MT_BUG_ON(mt, mas.last != 0x2FFF);
067311d3 3585 MAS_BUG_ON(&mas, !mas_is_active(&mas));
eb2e817f 3586
a8091f03 3587 /* find: overflow ->active */
eb2e817f
LH
3588 entry = mas_find(&mas, 0x5000);
3589 MT_BUG_ON(mt, entry != ptr3);
3590 MT_BUG_ON(mt, mas.index != 0x3000);
3591 MT_BUG_ON(mt, mas.last != 0x3500);
067311d3 3592 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3593
3594 /* find:active -> active (NULL) end*/
3595 entry = mas_find(&mas, ULONG_MAX);
3596 MT_BUG_ON(mt, entry != NULL);
3597 MT_BUG_ON(mt, mas.index != 0x3501);
3598 MT_BUG_ON(mt, mas.last != ULONG_MAX);
067311d3 3599 MAS_BUG_ON(&mas, !mas_is_active(&mas));
eb2e817f
LH
3600
3601 /* find_rev: active (END) ->active */
3602 entry = mas_find_rev(&mas, 0);
3603 MT_BUG_ON(mt, entry != ptr3);
3604 MT_BUG_ON(mt, mas.index != 0x3000);
3605 MT_BUG_ON(mt, mas.last != 0x3500);
067311d3 3606 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3607
3608 /* find_rev:active ->active */
3609 entry = mas_find_rev(&mas, 0);
3610 MT_BUG_ON(mt, entry != ptr2);
3611 MT_BUG_ON(mt, mas.index != 0x2000);
3612 MT_BUG_ON(mt, mas.last != 0x2500);
067311d3 3613 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3614
3615 /* find_rev: pause ->active */
3616 mas_pause(&mas);
3617 entry = mas_find_rev(&mas, 0);
3618 MT_BUG_ON(mt, entry != ptr);
3619 MT_BUG_ON(mt, mas.index != 0x1000);
3620 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3621 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f 3622
067311d3 3623 /* find_rev:active -> underflow */
eb2e817f
LH
3624 entry = mas_find_rev(&mas, 0);
3625 MT_BUG_ON(mt, entry != NULL);
3626 MT_BUG_ON(mt, mas.index != 0);
3627 MT_BUG_ON(mt, mas.last != 0x0FFF);
067311d3 3628 MT_BUG_ON(mt, !mas_is_underflow(&mas));
eb2e817f
LH
3629
3630 /* find_rev: start ->active */
3631 mas_set(&mas, 0x1200);
3632 entry = mas_find_rev(&mas, 0);
3633 MT_BUG_ON(mt, entry != ptr);
3634 MT_BUG_ON(mt, mas.index != 0x1000);
3635 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3636 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3637
3638 /* mas_walk start ->active */
3639 mas_set(&mas, 0x1200);
3640 entry = mas_walk(&mas);
3641 MT_BUG_ON(mt, entry != ptr);
3642 MT_BUG_ON(mt, mas.index != 0x1000);
3643 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3644 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3645
3646 /* mas_walk start ->active */
3647 mas_set(&mas, 0x1600);
3648 entry = mas_walk(&mas);
3649 MT_BUG_ON(mt, entry != NULL);
3650 MT_BUG_ON(mt, mas.index != 0x1501);
3651 MT_BUG_ON(mt, mas.last != 0x1fff);
067311d3 3652 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3653
3654 /* mas_walk pause ->active */
3655 mas_set(&mas, 0x1200);
3656 mas_pause(&mas);
3657 entry = mas_walk(&mas);
3658 MT_BUG_ON(mt, entry != ptr);
3659 MT_BUG_ON(mt, mas.index != 0x1000);
3660 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3661 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3662
3663 /* mas_walk pause -> active */
3664 mas_set(&mas, 0x1600);
3665 mas_pause(&mas);
3666 entry = mas_walk(&mas);
3667 MT_BUG_ON(mt, entry != NULL);
3668 MT_BUG_ON(mt, mas.index != 0x1501);
3669 MT_BUG_ON(mt, mas.last != 0x1fff);
067311d3 3670 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3671
3672 /* mas_walk none -> active */
3673 mas_set(&mas, 0x1200);
067311d3 3674 mas.status = ma_none;
eb2e817f
LH
3675 entry = mas_walk(&mas);
3676 MT_BUG_ON(mt, entry != ptr);
3677 MT_BUG_ON(mt, mas.index != 0x1000);
3678 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3679 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3680
3681 /* mas_walk none -> active */
3682 mas_set(&mas, 0x1600);
067311d3 3683 mas.status = ma_none;
eb2e817f
LH
3684 entry = mas_walk(&mas);
3685 MT_BUG_ON(mt, entry != NULL);
3686 MT_BUG_ON(mt, mas.index != 0x1501);
3687 MT_BUG_ON(mt, mas.last != 0x1fff);
067311d3 3688 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3689
3690 /* mas_walk active -> active */
3691 mas.index = 0x1200;
3692 mas.last = 0x1200;
3693 mas.offset = 0;
3694 entry = mas_walk(&mas);
3695 MT_BUG_ON(mt, entry != ptr);
3696 MT_BUG_ON(mt, mas.index != 0x1000);
3697 MT_BUG_ON(mt, mas.last != 0x1500);
067311d3 3698 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3699
3700 /* mas_walk active -> active */
3701 mas.index = 0x1600;
3702 mas.last = 0x1600;
3703 entry = mas_walk(&mas);
3704 MT_BUG_ON(mt, entry != NULL);
3705 MT_BUG_ON(mt, mas.index != 0x1501);
3706 MT_BUG_ON(mt, mas.last != 0x1fff);
067311d3 3707 MT_BUG_ON(mt, !mas_is_active(&mas));
eb2e817f
LH
3708
3709 mas_unlock(&mas);
3710}
3711
f92e1a82
LH
3712static noinline void __init alloc_cyclic_testing(struct maple_tree *mt)
3713{
3714 unsigned long location;
3715 unsigned long next;
3716 int ret = 0;
3717 MA_STATE(mas, mt, 0, 0);
3718
3719 next = 0;
3720 mtree_lock(mt);
3721 for (int i = 0; i < 100; i++) {
3722 mas_alloc_cyclic(&mas, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3723 MAS_BUG_ON(&mas, i != location - 2);
3724 MAS_BUG_ON(&mas, mas.index != location);
3725 MAS_BUG_ON(&mas, mas.last != location);
3726 MAS_BUG_ON(&mas, i != next - 3);
3727 }
3728
3729 mtree_unlock(mt);
3730 mtree_destroy(mt);
3731 next = 0;
3732 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
3733 for (int i = 0; i < 100; i++) {
3734 mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3735 MT_BUG_ON(mt, i != location - 2);
3736 MT_BUG_ON(mt, i != next - 3);
3737 MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3738 }
3739
3740 mtree_destroy(mt);
b02fcc08
LH
3741
3742 /*
3743 * Issue with reverse search was discovered
3744 * https://lore.kernel.org/all/20241216060600.287B4C4CED0@smtp.kernel.org/
3745 * Exhausting the allocation area and forcing the search to wrap needs a
3746 * mas_reset() in mas_alloc_cyclic().
3747 */
3748 next = 0;
3749 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
3750 for (int i = 0; i < 1023; i++) {
3751 mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL);
3752 MT_BUG_ON(mt, i != location - 2);
3753 MT_BUG_ON(mt, i != next - 3);
3754 MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3755 }
3756 mtree_erase(mt, 123);
3757 MT_BUG_ON(mt, mtree_load(mt, 123) != NULL);
3758 mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL);
3759 MT_BUG_ON(mt, 123 != location);
3760 MT_BUG_ON(mt, 124 != next);
3761 MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3762 mtree_erase(mt, 100);
3763 mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL);
3764 MT_BUG_ON(mt, 100 != location);
3765 MT_BUG_ON(mt, 101 != next);
3766 MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3767 mtree_destroy(mt);
3768
f92e1a82
LH
3769 /* Overflow test */
3770 next = ULONG_MAX - 1;
3771 ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3772 MT_BUG_ON(mt, ret != 0);
3773 ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3774 MT_BUG_ON(mt, ret != 0);
3775 ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3776 MT_BUG_ON(mt, ret != 1);
3777}
3778
e15e06a8 3779static DEFINE_MTREE(tree);
eaf9790d 3780static int __init maple_tree_seed(void)
e15e06a8 3781{
eaf9790d
LH
3782 unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3783 1001, 1002, 1003, 1005, 0,
3784 5003, 5002};
e15e06a8
LH
3785 void *ptr = &set;
3786
3787 pr_info("\nTEST STARTING\n\n");
3788
e15e06a8
LH
3789#if defined(BENCH_SLOT_STORE)
3790#define BENCH
3791 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3792 bench_slot_store(&tree);
3793 mtree_destroy(&tree);
3794 goto skip;
3795#endif
3796#if defined(BENCH_NODE_STORE)
3797#define BENCH
3798 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3799 bench_node_store(&tree);
3800 mtree_destroy(&tree);
3801 goto skip;
3802#endif
3803#if defined(BENCH_AWALK)
3804#define BENCH
3805 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3806 bench_awalk(&tree);
3807 mtree_destroy(&tree);
3808 goto skip;
3809#endif
3810#if defined(BENCH_WALK)
3811#define BENCH
3812 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3813 bench_walk(&tree);
3814 mtree_destroy(&tree);
3815 goto skip;
3816#endif
24662dec
LH
3817#if defined(BENCH_LOAD)
3818#define BENCH
3819 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3820 bench_load(&tree);
3821 mtree_destroy(&tree);
3822 goto skip;
3823#endif
e15e06a8
LH
3824#if defined(BENCH_FORK)
3825#define BENCH
446e1867 3826 bench_forking();
e15e06a8
LH
3827 goto skip;
3828#endif
3829#if defined(BENCH_MT_FOR_EACH)
3830#define BENCH
3831 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3832 bench_mt_for_each(&tree);
3833 mtree_destroy(&tree);
3834 goto skip;
3835#endif
361c678b
LH
3836#if defined(BENCH_MAS_FOR_EACH)
3837#define BENCH
3838 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3839 bench_mas_for_each(&tree);
3840 mtree_destroy(&tree);
3841 goto skip;
3842#endif
8c314f3b
LH
3843#if defined(BENCH_MAS_PREV)
3844#define BENCH
3845 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3846 bench_mas_prev(&tree);
3847 mtree_destroy(&tree);
3848 goto skip;
3849#endif
e15e06a8 3850
c38279d4
WY
3851 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3852 check_deficient_node(&tree);
3853 mtree_destroy(&tree);
3854
431e1060
WY
3855 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3856 check_store_null(&tree);
3857 mtree_destroy(&tree);
3858
f670fa1c
PZ
3859 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3860 check_root_expand(&tree);
3861 mtree_destroy(&tree);
3862
5159d64b
LH
3863 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3864 check_iteration(&tree);
3865 mtree_destroy(&tree);
3866
446e1867 3867 check_forking();
e15e06a8
LH
3868
3869 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3870 check_mas_store_gfp(&tree);
3871 mtree_destroy(&tree);
3872
3873 /* Test ranges (store and insert) */
3874 mt_init_flags(&tree, 0);
3875 check_ranges(&tree);
3876 mtree_destroy(&tree);
3877
120b1162
LH
3878#if defined(CONFIG_64BIT)
3879 /* These tests have ranges outside of 4GB */
e15e06a8
LH
3880 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3881 check_alloc_range(&tree);
3882 mtree_destroy(&tree);
3883
3884 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3885 check_alloc_rev_range(&tree);
3886 mtree_destroy(&tree);
120b1162 3887#endif
e15e06a8
LH
3888
3889 mt_init_flags(&tree, 0);
3890
3891 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3892
3893 check_insert(&tree, set[9], &tree); /* Insert 0 */
3894 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3895 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3896
3897 check_insert(&tree, set[10], ptr); /* Insert 5003 */
3898 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3899 check_load(&tree, set[11], NULL); /* See if 5002 -> NULL */
3900 check_load(&tree, set[10], ptr); /* See if 5003 -> ptr */
3901
3902 /* Clear out the tree */
3903 mtree_destroy(&tree);
3904
3905 /* Try to insert, insert a dup, and load back what was inserted. */
3906 mt_init_flags(&tree, 0);
3907 check_insert(&tree, set[0], &tree); /* Insert 5015 */
3908 check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3909 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3910
3911 /*
3912 * Second set of tests try to load a value that doesn't exist, inserts
3913 * a second value, then loads the value again
3914 */
3915 check_load(&tree, set[1], NULL); /* See if 5014 -> NULL */
3916 check_insert(&tree, set[1], ptr); /* insert 5014 -> ptr */
3917 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3918 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3919 /*
3920 * Tree currently contains:
3921 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3922 */
3923 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3924 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3925
3926 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3927 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3928 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3929 check_load(&tree, set[7], &tree); /* 1003 = &tree ? */
3930
3931 /* Clear out tree */
3932 mtree_destroy(&tree);
3933
3934 mt_init_flags(&tree, 0);
3935 /* Test inserting into a NULL hole. */
3936 check_insert(&tree, set[5], ptr); /* insert 1001 -> ptr */
3937 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3938 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3939 check_load(&tree, set[5], ptr); /* See if 1001 -> ptr */
3940 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3941 check_load(&tree, set[7], &tree); /* See if 1003 -> &tree */
3942
3943 /* Clear out the tree */
3944 mtree_destroy(&tree);
3945
e15e06a8
LH
3946 mt_init_flags(&tree, 0);
3947 /*
3948 * set[] = {5015, 5014, 5017, 25, 1000,
3949 * 1001, 1002, 1003, 1005, 0,
3950 * 5003, 5002};
3951 */
3952
3953 check_insert(&tree, set[0], ptr); /* 5015 */
3954 check_insert(&tree, set[1], &tree); /* 5014 */
3955 check_insert(&tree, set[2], ptr); /* 5017 */
3956 check_insert(&tree, set[3], &tree); /* 25 */
3957 check_load(&tree, set[0], ptr);
3958 check_load(&tree, set[1], &tree);
3959 check_load(&tree, set[2], ptr);
3960 check_load(&tree, set[3], &tree);
3961 check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3962 check_load(&tree, set[0], ptr);
3963 check_load(&tree, set[1], &tree);
3964 check_load(&tree, set[2], ptr);
3965 check_load(&tree, set[3], &tree); /*25 */
3966 check_load(&tree, set[4], ptr);
3967 check_insert(&tree, set[5], &tree); /* 1001 */
3968 check_load(&tree, set[0], ptr);
3969 check_load(&tree, set[1], &tree);
3970 check_load(&tree, set[2], ptr);
3971 check_load(&tree, set[3], &tree);
3972 check_load(&tree, set[4], ptr);
3973 check_load(&tree, set[5], &tree);
3974 check_insert(&tree, set[6], ptr);
3975 check_load(&tree, set[0], ptr);
3976 check_load(&tree, set[1], &tree);
3977 check_load(&tree, set[2], ptr);
3978 check_load(&tree, set[3], &tree);
3979 check_load(&tree, set[4], ptr);
3980 check_load(&tree, set[5], &tree);
3981 check_load(&tree, set[6], ptr);
3982 check_insert(&tree, set[7], &tree);
3983 check_load(&tree, set[0], ptr);
3984 check_insert(&tree, set[8], ptr);
3985
3986 check_insert(&tree, set[9], &tree);
3987
3988 check_load(&tree, set[0], ptr);
3989 check_load(&tree, set[1], &tree);
3990 check_load(&tree, set[2], ptr);
3991 check_load(&tree, set[3], &tree);
3992 check_load(&tree, set[4], ptr);
3993 check_load(&tree, set[5], &tree);
3994 check_load(&tree, set[6], ptr);
3995 check_load(&tree, set[9], &tree);
3996 mtree_destroy(&tree);
3997
e15e06a8
LH
3998 mt_init_flags(&tree, 0);
3999 check_seq(&tree, 16, false);
4000 mtree_destroy(&tree);
4001
4002 mt_init_flags(&tree, 0);
4003 check_seq(&tree, 1000, true);
4004 mtree_destroy(&tree);
4005
4006 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4007 check_rev_seq(&tree, 1000, true);
4008 mtree_destroy(&tree);
4009
4010 check_lower_bound_split(&tree);
4011 check_upper_bound_split(&tree);
4012 check_mid_split(&tree);
4013
4014 mt_init_flags(&tree, 0);
4015 check_next_entry(&tree);
4016 check_find(&tree);
4017 check_find_2(&tree);
4018 mtree_destroy(&tree);
4019
4020 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4021 check_prev_entry(&tree);
4022 mtree_destroy(&tree);
4023
e15e06a8
LH
4024 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4025 check_gap_combining(&tree);
4026 mtree_destroy(&tree);
4027
4028 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4029 check_node_overwrite(&tree);
4030 mtree_destroy(&tree);
4031
4032 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4033 next_prev_test(&tree);
4034 mtree_destroy(&tree);
4035
e15e06a8
LH
4036 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4037 check_spanning_relatives(&tree);
4038 mtree_destroy(&tree);
4039
4040 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4041 check_rev_find(&tree);
4042 mtree_destroy(&tree);
4043
4044 mt_init_flags(&tree, 0);
4045 check_fuzzer(&tree);
4046 mtree_destroy(&tree);
4047
4048 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4049 check_dup(&tree);
4050 mtree_destroy(&tree);
4051
c5651b31
LH
4052 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4053 check_bnode_min_spanning(&tree);
4054 mtree_destroy(&tree);
4055
7327e811
LH
4056 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4057 check_empty_area_window(&tree);
4058 mtree_destroy(&tree);
4059
4bd6dded
LH
4060 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4061 check_empty_area_fill(&tree);
4062 mtree_destroy(&tree);
4063
eb2e817f
LH
4064 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4065 check_state_handling(&tree);
4066 mtree_destroy(&tree);
4067
f92e1a82
LH
4068 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4069 alloc_cyclic_testing(&tree);
4070 mtree_destroy(&tree);
4071
4072
e15e06a8
LH
4073#if defined(BENCH)
4074skip:
4075#endif
4076 rcu_barrier();
4077 pr_info("maple_tree: %u of %u tests passed\n",
4078 atomic_read(&maple_tree_tests_passed),
4079 atomic_read(&maple_tree_tests_run));
4080 if (atomic_read(&maple_tree_tests_run) ==
4081 atomic_read(&maple_tree_tests_passed))
4082 return 0;
4083
4084 return -EINVAL;
4085}
4086
eaf9790d 4087static void __exit maple_tree_harvest(void)
e15e06a8
LH
4088{
4089
4090}
4091
4092module_init(maple_tree_seed);
4093module_exit(maple_tree_harvest);
4094MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
a619dd39 4095MODULE_DESCRIPTION("maple tree API test module");
e15e06a8 4096MODULE_LICENSE("GPL");