Merge tag 'for-linus-iommufd' of git://git.kernel.org/pub/scm/linux/kernel/git/jgg...
[linux-block.git] / lib / test_maple_tree.c
CommitLineData
e15e06a8
LH
1// SPDX-License-Identifier: GPL-2.0+
2/*
3 * test_maple_tree.c: Test the maple tree API
120b1162 4 * Copyright (c) 2018-2022 Oracle Corporation
e15e06a8 5 * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
120b1162
LH
6 *
7 * Any tests that only require the interface of the tree.
e15e06a8
LH
8 */
9
10#include <linux/maple_tree.h>
11#include <linux/module.h>
e15e06a8
LH
12
13#define MTREE_ALLOC_MAX 0x2000000000000Ul
e15e06a8 14#define CONFIG_MAPLE_SEARCH
120b1162
LH
15#define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
16
a5199577
LH
17#ifndef CONFIG_DEBUG_MAPLE_TREE
18#define mt_dump(mt, fmt) do {} while (0)
19#define mt_validate(mt) do {} while (0)
20#define mt_cache_shrink() do {} while (0)
21#define mas_dump(mas) do {} while (0)
22#define mas_wr_dump(mas) do {} while (0)
23atomic_t maple_tree_tests_run;
24atomic_t maple_tree_tests_passed;
25#undef MT_BUG_ON
26
27#define MT_BUG_ON(__tree, __x) do { \
28 atomic_inc(&maple_tree_tests_run); \
29 if (__x) { \
30 pr_info("BUG at %s:%d (%u)\n", \
31 __func__, __LINE__, __x); \
32 pr_info("Pass: %u Run:%u\n", \
33 atomic_read(&maple_tree_tests_passed), \
34 atomic_read(&maple_tree_tests_run)); \
35 } else { \
36 atomic_inc(&maple_tree_tests_passed); \
37 } \
38} while (0)
39#endif
40
e15e06a8
LH
41/* #define BENCH_SLOT_STORE */
42/* #define BENCH_NODE_STORE */
43/* #define BENCH_AWALK */
44/* #define BENCH_WALK */
45/* #define BENCH_MT_FOR_EACH */
46/* #define BENCH_FORK */
120b1162
LH
47
48#ifdef __KERNEL__
49#define mt_set_non_kernel(x) do {} while (0)
50#define mt_zero_nr_tallocated(x) do {} while (0)
51#else
52#define cond_resched() do {} while (0)
53#endif
eaf9790d
LH
54static int __init mtree_insert_index(struct maple_tree *mt,
55 unsigned long index, gfp_t gfp)
e15e06a8
LH
56{
57 return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
58}
59
eaf9790d 60static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
61{
62 MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
63 MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
64}
65
eaf9790d 66static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
e15e06a8
LH
67 void *ptr)
68{
69 return mtree_insert(mt, index, ptr, GFP_KERNEL);
70}
71
eaf9790d
LH
72static int __init mtree_test_store_range(struct maple_tree *mt,
73 unsigned long start, unsigned long end, void *ptr)
e15e06a8
LH
74{
75 return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
76}
77
eaf9790d 78static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
e15e06a8
LH
79 void *ptr)
80{
81 return mtree_test_store_range(mt, start, start, ptr);
82}
83
eaf9790d
LH
84static int __init mtree_test_insert_range(struct maple_tree *mt,
85 unsigned long start, unsigned long end, void *ptr)
e15e06a8
LH
86{
87 return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
88}
89
eaf9790d 90static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
91{
92 return mtree_load(mt, index);
93}
94
eaf9790d 95static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
96{
97 return mtree_erase(mt, index);
98}
99
120b1162 100#if defined(CONFIG_64BIT)
eaf9790d 101static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
e15e06a8
LH
102 unsigned long start, unsigned long end, unsigned long size,
103 unsigned long expected, int eret, void *ptr)
104{
105
106 unsigned long result = expected + 1;
107 int ret;
108
109 ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
110 GFP_KERNEL);
111 MT_BUG_ON(mt, ret != eret);
112 if (ret)
113 return;
114
115 MT_BUG_ON(mt, result != expected);
116}
117
eaf9790d 118static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
e15e06a8
LH
119 unsigned long start, unsigned long end, unsigned long size,
120 unsigned long expected, int eret, void *ptr)
121{
122
123 unsigned long result = expected + 1;
124 int ret;
125
ba997212 126 ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
e15e06a8
LH
127 GFP_KERNEL);
128 MT_BUG_ON(mt, ret != eret);
129 if (ret)
130 return;
131
132 MT_BUG_ON(mt, result != expected);
133}
120b1162 134#endif
e15e06a8 135
eaf9790d
LH
136static noinline void __init check_load(struct maple_tree *mt,
137 unsigned long index, void *ptr)
e15e06a8
LH
138{
139 void *ret = mtree_test_load(mt, index);
140
141 if (ret != ptr)
142 pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
143 MT_BUG_ON(mt, ret != ptr);
144}
145
eaf9790d 146static noinline void __init check_store_range(struct maple_tree *mt,
e15e06a8
LH
147 unsigned long start, unsigned long end, void *ptr, int expected)
148{
149 int ret = -EINVAL;
150 unsigned long i;
151
152 ret = mtree_test_store_range(mt, start, end, ptr);
153 MT_BUG_ON(mt, ret != expected);
154
155 if (ret)
156 return;
157
158 for (i = start; i <= end; i++)
159 check_load(mt, i, ptr);
160}
161
eaf9790d 162static noinline void __init check_insert_range(struct maple_tree *mt,
e15e06a8
LH
163 unsigned long start, unsigned long end, void *ptr, int expected)
164{
165 int ret = -EINVAL;
166 unsigned long i;
167
168 ret = mtree_test_insert_range(mt, start, end, ptr);
169 MT_BUG_ON(mt, ret != expected);
170
171 if (ret)
172 return;
173
174 for (i = start; i <= end; i++)
175 check_load(mt, i, ptr);
176}
177
eaf9790d
LH
178static noinline void __init check_insert(struct maple_tree *mt,
179 unsigned long index, void *ptr)
e15e06a8
LH
180{
181 int ret = -EINVAL;
182
183 ret = mtree_test_insert(mt, index, ptr);
184 MT_BUG_ON(mt, ret != 0);
185}
186
eaf9790d 187static noinline void __init check_dup_insert(struct maple_tree *mt,
e15e06a8
LH
188 unsigned long index, void *ptr)
189{
190 int ret = -EINVAL;
191
192 ret = mtree_test_insert(mt, index, ptr);
193 MT_BUG_ON(mt, ret != -EEXIST);
194}
195
196
eaf9790d
LH
197static noinline void __init check_index_load(struct maple_tree *mt,
198 unsigned long index)
e15e06a8
LH
199{
200 return check_load(mt, index, xa_mk_value(index & LONG_MAX));
201}
202
eaf9790d 203static inline __init int not_empty(struct maple_node *node)
e15e06a8
LH
204{
205 int i;
206
207 if (node->parent)
208 return 1;
209
210 for (i = 0; i < ARRAY_SIZE(node->slot); i++)
211 if (node->slot[i])
212 return 1;
213
214 return 0;
215}
216
e15e06a8 217
eaf9790d
LH
218static noinline void __init check_rev_seq(struct maple_tree *mt,
219 unsigned long max, bool verbose)
e15e06a8
LH
220{
221 unsigned long i = max, j;
222
223 MT_BUG_ON(mt, !mtree_empty(mt));
224
225 mt_zero_nr_tallocated();
226 while (i) {
227 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
228 for (j = i; j <= max; j++)
229 check_index_load(mt, j);
230
231 check_load(mt, i - 1, NULL);
232 mt_set_in_rcu(mt);
233 MT_BUG_ON(mt, !mt_height(mt));
234 mt_clear_in_rcu(mt);
235 MT_BUG_ON(mt, !mt_height(mt));
236 i--;
237 }
238 check_load(mt, max + 1, NULL);
239
120b1162 240#ifndef __KERNEL__
e15e06a8
LH
241 if (verbose) {
242 rcu_barrier();
89f499f3 243 mt_dump(mt, mt_dump_dec);
e15e06a8
LH
244 pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
245 __func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
246 mt_nr_tallocated());
247 }
120b1162 248#endif
e15e06a8
LH
249}
250
eaf9790d 251static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
e15e06a8
LH
252 bool verbose)
253{
254 unsigned long i, j;
255
256 MT_BUG_ON(mt, !mtree_empty(mt));
257
258 mt_zero_nr_tallocated();
259 for (i = 0; i <= max; i++) {
260 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
261 for (j = 0; j <= i; j++)
262 check_index_load(mt, j);
263
264 if (i)
265 MT_BUG_ON(mt, !mt_height(mt));
266 check_load(mt, i + 1, NULL);
267 }
120b1162
LH
268
269#ifndef __KERNEL__
e15e06a8
LH
270 if (verbose) {
271 rcu_barrier();
89f499f3 272 mt_dump(mt, mt_dump_dec);
e15e06a8
LH
273 pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
274 max, mt_get_alloc_size()/1024, mt_nr_allocated(),
275 mt_nr_tallocated());
276 }
120b1162 277#endif
e15e06a8
LH
278}
279
eaf9790d 280static noinline void __init check_lb_not_empty(struct maple_tree *mt)
e15e06a8
LH
281{
282 unsigned long i, j;
283 unsigned long huge = 4000UL * 1000 * 1000;
284
285
286 i = huge;
287 while (i > 4096) {
288 check_insert(mt, i, (void *) i);
289 for (j = huge; j >= i; j /= 2) {
290 check_load(mt, j-1, NULL);
291 check_load(mt, j, (void *) j);
292 check_load(mt, j+1, NULL);
293 }
294 i /= 2;
295 }
296 mtree_destroy(mt);
297}
298
eaf9790d 299static noinline void __init check_lower_bound_split(struct maple_tree *mt)
e15e06a8
LH
300{
301 MT_BUG_ON(mt, !mtree_empty(mt));
302 check_lb_not_empty(mt);
303}
304
eaf9790d 305static noinline void __init check_upper_bound_split(struct maple_tree *mt)
e15e06a8
LH
306{
307 unsigned long i, j;
120b1162 308 unsigned long huge;
e15e06a8
LH
309
310 MT_BUG_ON(mt, !mtree_empty(mt));
311
120b1162
LH
312 if (MAPLE_32BIT)
313 huge = 2147483647UL;
314 else
315 huge = 4000UL * 1000 * 1000;
316
e15e06a8
LH
317 i = 4096;
318 while (i < huge) {
319 check_insert(mt, i, (void *) i);
320 for (j = i; j >= huge; j *= 2) {
321 check_load(mt, j-1, NULL);
322 check_load(mt, j, (void *) j);
323 check_load(mt, j+1, NULL);
324 }
325 i *= 2;
326 }
327 mtree_destroy(mt);
328}
329
eaf9790d 330static noinline void __init check_mid_split(struct maple_tree *mt)
e15e06a8
LH
331{
332 unsigned long huge = 8000UL * 1000 * 1000;
333
334 check_insert(mt, huge, (void *) huge);
335 check_insert(mt, 0, xa_mk_value(0));
336 check_lb_not_empty(mt);
337}
338
eaf9790d 339static noinline void __init check_rev_find(struct maple_tree *mt)
e15e06a8
LH
340{
341 int i, nr_entries = 200;
342 void *val;
343 MA_STATE(mas, mt, 0, 0);
344
345 for (i = 0; i <= nr_entries; i++)
346 mtree_store_range(mt, i*10, i*10 + 5,
347 xa_mk_value(i), GFP_KERNEL);
348
120b1162 349 rcu_read_lock();
e15e06a8
LH
350 mas_set(&mas, 1000);
351 val = mas_find_rev(&mas, 1000);
352 MT_BUG_ON(mt, val != xa_mk_value(100));
353 val = mas_find_rev(&mas, 1000);
354 MT_BUG_ON(mt, val != NULL);
355
356 mas_set(&mas, 999);
357 val = mas_find_rev(&mas, 997);
358 MT_BUG_ON(mt, val != NULL);
359
360 mas_set(&mas, 1000);
361 val = mas_find_rev(&mas, 900);
362 MT_BUG_ON(mt, val != xa_mk_value(100));
363 val = mas_find_rev(&mas, 900);
364 MT_BUG_ON(mt, val != xa_mk_value(99));
365
366 mas_set(&mas, 20);
367 val = mas_find_rev(&mas, 0);
368 MT_BUG_ON(mt, val != xa_mk_value(2));
369 val = mas_find_rev(&mas, 0);
370 MT_BUG_ON(mt, val != xa_mk_value(1));
371 val = mas_find_rev(&mas, 0);
372 MT_BUG_ON(mt, val != xa_mk_value(0));
373 val = mas_find_rev(&mas, 0);
374 MT_BUG_ON(mt, val != NULL);
120b1162 375 rcu_read_unlock();
e15e06a8
LH
376}
377
eaf9790d 378static noinline void __init check_find(struct maple_tree *mt)
e15e06a8
LH
379{
380 unsigned long val = 0;
120b1162 381 unsigned long count;
e15e06a8 382 unsigned long max;
120b1162 383 unsigned long top;
e15e06a8
LH
384 unsigned long last = 0, index = 0;
385 void *entry, *entry2;
386
387 MA_STATE(mas, mt, 0, 0);
388
389 /* Insert 0. */
390 MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
391
120b1162
LH
392#if defined(CONFIG_64BIT)
393 top = 4398046511104UL;
394#else
395 top = ULONG_MAX;
396#endif
397
398 if (MAPLE_32BIT) {
399 count = 15;
400 } else {
401 count = 20;
402 }
403
e15e06a8
LH
404 for (int i = 0; i <= count; i++) {
405 if (val != 64)
406 MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
407 else
408 MT_BUG_ON(mt, mtree_insert(mt, val,
409 XA_ZERO_ENTRY, GFP_KERNEL));
410
411 val <<= 2;
412 }
413
414 val = 0;
415 mas_set(&mas, val);
416 mas_lock(&mas);
417 while ((entry = mas_find(&mas, 268435456)) != NULL) {
418 if (val != 64)
419 MT_BUG_ON(mt, xa_mk_value(val) != entry);
420 else
421 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
422
423 val <<= 2;
424 /* For zero check. */
425 if (!val)
426 val = 1;
427 }
428 mas_unlock(&mas);
429
430 val = 0;
431 mas_set(&mas, val);
432 mas_lock(&mas);
433 mas_for_each(&mas, entry, ULONG_MAX) {
434 if (val != 64)
435 MT_BUG_ON(mt, xa_mk_value(val) != entry);
436 else
437 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
438 val <<= 2;
439 /* For zero check. */
440 if (!val)
441 val = 1;
442 }
443 mas_unlock(&mas);
444
445 /* Test mas_pause */
446 val = 0;
447 mas_set(&mas, val);
448 mas_lock(&mas);
449 mas_for_each(&mas, entry, ULONG_MAX) {
450 if (val != 64)
451 MT_BUG_ON(mt, xa_mk_value(val) != entry);
452 else
453 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
454 val <<= 2;
455 /* For zero check. */
456 if (!val)
457 val = 1;
458
459 mas_pause(&mas);
460 mas_unlock(&mas);
461 mas_lock(&mas);
462 }
463 mas_unlock(&mas);
464
465 val = 0;
466 max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
467 mt_for_each(mt, entry, index, max) {
468 MT_BUG_ON(mt, xa_mk_value(val) != entry);
469 val <<= 2;
470 if (val == 64) /* Skip zero entry. */
471 val <<= 2;
472 /* For zero check. */
473 if (!val)
474 val = 1;
475 }
476
477 val = 0;
478 max = 0;
479 index = 0;
480 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
481 mt_for_each(mt, entry, index, ULONG_MAX) {
120b1162
LH
482 if (val == top)
483 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
e15e06a8
LH
484 else
485 MT_BUG_ON(mt, xa_mk_value(val) != entry);
120b1162
LH
486
487 /* Workaround for 32bit */
488 if ((val << 2) < val)
489 val = ULONG_MAX;
490 else
491 val <<= 2;
492
e15e06a8
LH
493 if (val == 64) /* Skip zero entry. */
494 val <<= 2;
495 /* For zero check. */
496 if (!val)
497 val = 1;
498 max++;
499 MT_BUG_ON(mt, max > 25);
500 }
501 mtree_erase_index(mt, ULONG_MAX);
502
503 mas_reset(&mas);
504 index = 17;
505 entry = mt_find(mt, &index, 512);
506 MT_BUG_ON(mt, xa_mk_value(256) != entry);
507
508 mas_reset(&mas);
509 index = 17;
510 entry = mt_find(mt, &index, 20);
511 MT_BUG_ON(mt, entry != NULL);
512
513
514 /* Range check.. */
515 /* Insert ULONG_MAX */
516 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
517
518 val = 0;
519 mas_set(&mas, 0);
520 mas_lock(&mas);
521 mas_for_each(&mas, entry, ULONG_MAX) {
522 if (val == 64)
523 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
120b1162
LH
524 else if (val == top)
525 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
e15e06a8
LH
526 else
527 MT_BUG_ON(mt, xa_mk_value(val) != entry);
120b1162
LH
528
529 /* Workaround for 32bit */
530 if ((val << 2) < val)
531 val = ULONG_MAX;
532 else
533 val <<= 2;
e15e06a8
LH
534
535 /* For zero check. */
536 if (!val)
537 val = 1;
538 mas_pause(&mas);
539 mas_unlock(&mas);
540 mas_lock(&mas);
541 }
542 mas_unlock(&mas);
543
544 mas_set(&mas, 1048576);
545 mas_lock(&mas);
546 entry = mas_find(&mas, 1048576);
547 mas_unlock(&mas);
548 MT_BUG_ON(mas.tree, entry == NULL);
549
550 /*
551 * Find last value.
552 * 1. get the expected value, leveraging the existence of an end entry
553 * 2. delete end entry
554 * 3. find the last value but searching for ULONG_MAX and then using
555 * prev
556 */
557 /* First, get the expected result. */
558 mas_lock(&mas);
559 mas_reset(&mas);
560 mas.index = ULONG_MAX; /* start at max.. */
561 entry = mas_find(&mas, ULONG_MAX);
562 entry = mas_prev(&mas, 0);
563 index = mas.index;
564 last = mas.last;
565
566 /* Erase the last entry. */
567 mas_reset(&mas);
568 mas.index = ULONG_MAX;
569 mas.last = ULONG_MAX;
570 mas_erase(&mas);
571
572 /* Get the previous value from MAS_START */
573 mas_reset(&mas);
574 entry2 = mas_prev(&mas, 0);
575
576 /* Check results. */
577 MT_BUG_ON(mt, entry != entry2);
578 MT_BUG_ON(mt, index != mas.index);
579 MT_BUG_ON(mt, last != mas.last);
580
581
582 mas.node = MAS_NONE;
583 mas.index = ULONG_MAX;
584 mas.last = ULONG_MAX;
585 entry2 = mas_prev(&mas, 0);
586 MT_BUG_ON(mt, entry != entry2);
587
588 mas_set(&mas, 0);
589 MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
590
591 mas_unlock(&mas);
592 mtree_destroy(mt);
593}
594
eaf9790d 595static noinline void __init check_find_2(struct maple_tree *mt)
e15e06a8
LH
596{
597 unsigned long i, j;
598 void *entry;
599
600 MA_STATE(mas, mt, 0, 0);
601 rcu_read_lock();
602 mas_for_each(&mas, entry, ULONG_MAX)
603 MT_BUG_ON(mt, true);
604 rcu_read_unlock();
605
606 for (i = 0; i < 256; i++) {
607 mtree_insert_index(mt, i, GFP_KERNEL);
608 j = 0;
609 mas_set(&mas, 0);
610 rcu_read_lock();
611 mas_for_each(&mas, entry, ULONG_MAX) {
612 MT_BUG_ON(mt, entry != xa_mk_value(j));
613 j++;
614 }
615 rcu_read_unlock();
616 MT_BUG_ON(mt, j != i + 1);
617 }
618
619 for (i = 0; i < 256; i++) {
620 mtree_erase_index(mt, i);
621 j = i + 1;
622 mas_set(&mas, 0);
623 rcu_read_lock();
624 mas_for_each(&mas, entry, ULONG_MAX) {
625 if (xa_is_zero(entry))
626 continue;
627
628 MT_BUG_ON(mt, entry != xa_mk_value(j));
629 j++;
630 }
631 rcu_read_unlock();
632 MT_BUG_ON(mt, j != 256);
633 }
634
635 /*MT_BUG_ON(mt, !mtree_empty(mt)); */
636}
637
e15e06a8 638
120b1162 639#if defined(CONFIG_64BIT)
eaf9790d 640static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
e15e06a8 641{
e15e06a8 642 /*
120b1162
LH
643 * Generated by:
644 * cat /proc/self/maps | awk '{print $1}'|
645 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
e15e06a8 646 */
e15e06a8 647
eaf9790d 648 static const unsigned long range[] = {
120b1162
LH
649 /* Inclusive , Exclusive. */
650 0x565234af2000, 0x565234af4000,
651 0x565234af4000, 0x565234af9000,
652 0x565234af9000, 0x565234afb000,
653 0x565234afc000, 0x565234afd000,
654 0x565234afd000, 0x565234afe000,
655 0x565235def000, 0x565235e10000,
656 0x7f36d4bfd000, 0x7f36d4ee2000,
657 0x7f36d4ee2000, 0x7f36d4f04000,
658 0x7f36d4f04000, 0x7f36d504c000,
659 0x7f36d504c000, 0x7f36d5098000,
660 0x7f36d5098000, 0x7f36d5099000,
661 0x7f36d5099000, 0x7f36d509d000,
662 0x7f36d509d000, 0x7f36d509f000,
663 0x7f36d509f000, 0x7f36d50a5000,
664 0x7f36d50b9000, 0x7f36d50db000,
665 0x7f36d50db000, 0x7f36d50dc000,
666 0x7f36d50dc000, 0x7f36d50fa000,
667 0x7f36d50fa000, 0x7f36d5102000,
668 0x7f36d5102000, 0x7f36d5103000,
669 0x7f36d5103000, 0x7f36d5104000,
670 0x7f36d5104000, 0x7f36d5105000,
671 0x7fff5876b000, 0x7fff5878d000,
672 0x7fff5878e000, 0x7fff58791000,
673 0x7fff58791000, 0x7fff58793000,
674 };
e15e06a8 675
eaf9790d 676 static const unsigned long holes[] = {
120b1162
LH
677 /*
678 * Note: start of hole is INCLUSIVE
679 * end of hole is EXCLUSIVE
680 * (opposite of the above table.)
681 * Start of hole, end of hole, size of hole (+1)
682 */
683 0x565234afb000, 0x565234afc000, 0x1000,
684 0x565234afe000, 0x565235def000, 0x12F1000,
685 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
686 };
e15e06a8 687
120b1162
LH
688 /*
689 * req_range consists of 4 values.
690 * 1. min index
691 * 2. max index
692 * 3. size
693 * 4. number that should be returned.
694 * 5. return value
695 */
eaf9790d 696 static const unsigned long req_range[] = {
120b1162
LH
697 0x565234af9000, /* Min */
698 0x7fff58791000, /* Max */
699 0x1000, /* Size */
700 0x7fff5878d << 12, /* First rev hole of size 0x1000 */
701 0, /* Return value success. */
e15e06a8 702
120b1162 703 0x0, /* Min */
ba997212 704 0x565234AF0 << 12, /* Max */
120b1162
LH
705 0x3000, /* Size */
706 0x565234AEE << 12, /* max - 3. */
707 0, /* Return value success. */
e15e06a8 708
120b1162
LH
709 0x0, /* Min */
710 -1, /* Max */
711 0x1000, /* Size */
712 562949953421311 << 12,/* First rev hole of size 0x1000 */
713 0, /* Return value success. */
e15e06a8 714
120b1162 715 0x0, /* Min */
ba997212 716 0x7F36D5109 << 12, /* Max */
120b1162
LH
717 0x4000, /* Size */
718 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
719 0, /* Return value success. */
e15e06a8 720
120b1162
LH
721 /* Ascend test. */
722 0x0,
ba997212 723 34148798628 << 12,
120b1162
LH
724 19 << 12,
725 34148797418 << 12,
726 0x0,
e15e06a8 727
120b1162
LH
728 /* Too big test. */
729 0x0,
730 18446744073709551615UL,
731 562915594369134UL << 12,
732 0x0,
733 -EBUSY,
e15e06a8 734
ba997212
LH
735 /* Single space test. */
736 34148798725 << 12,
737 34148798725 << 12,
738 1 << 12,
739 34148798725 << 12,
740 0,
120b1162 741 };
e15e06a8 742
120b1162
LH
743 int i, range_count = ARRAY_SIZE(range);
744 int req_range_count = ARRAY_SIZE(req_range);
745 unsigned long min = 0;
e15e06a8 746
120b1162 747 MA_STATE(mas, mt, 0, 0);
e15e06a8 748
120b1162
LH
749 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
750 GFP_KERNEL);
751#define DEBUG_REV_RANGE 0
752 for (i = 0; i < range_count; i += 2) {
753 /* Inclusive, Inclusive (with the -1) */
e15e06a8 754
120b1162
LH
755#if DEBUG_REV_RANGE
756 pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
757 (range[i + 1] >> 12) - 1);
758#endif
759 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
760 xa_mk_value(range[i] >> 12), 0);
761 mt_validate(mt);
e15e06a8
LH
762 }
763
e15e06a8 764
120b1162
LH
765 mas_lock(&mas);
766 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
767#if DEBUG_REV_RANGE
768 pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
769 min, holes[i+1]>>12, holes[i+2]>>12,
770 holes[i] >> 12);
771#endif
772 MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
773 holes[i+1] >> 12,
774 holes[i+2] >> 12));
775#if DEBUG_REV_RANGE
776 pr_debug("Found %lu %lu\n", mas.index, mas.last);
777 pr_debug("gap %lu %lu\n", (holes[i] >> 12),
778 (holes[i+1] >> 12));
779#endif
780 MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
781 MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
782 min = holes[i+1] >> 12;
783 mas_reset(&mas);
e15e06a8 784 }
e15e06a8 785
120b1162
LH
786 mas_unlock(&mas);
787 for (i = 0; i < req_range_count; i += 5) {
788#if DEBUG_REV_RANGE
ba997212
LH
789 pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
790 i, req_range[i] >> 12,
791 (req_range[i + 1] >> 12),
120b1162
LH
792 req_range[i+2] >> 12,
793 req_range[i+3] >> 12);
794#endif
795 check_mtree_alloc_rrange(mt,
796 req_range[i] >> 12, /* start */
797 req_range[i+1] >> 12, /* end */
798 req_range[i+2] >> 12, /* size */
799 req_range[i+3] >> 12, /* expected address */
800 req_range[i+4], /* expected return */
801 xa_mk_value(req_range[i] >> 12)); /* pointer */
802 mt_validate(mt);
e15e06a8
LH
803 }
804
120b1162
LH
805 mt_set_non_kernel(1);
806 mtree_erase(mt, 34148798727); /* create a deleted range. */
ba997212 807 mtree_erase(mt, 34148798725);
120b1162
LH
808 check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
809 34148798725, 0, mt);
e15e06a8 810
120b1162
LH
811 mtree_destroy(mt);
812}
e15e06a8 813
eaf9790d 814static noinline void __init check_alloc_range(struct maple_tree *mt)
e15e06a8 815{
120b1162
LH
816 /*
817 * Generated by:
818 * cat /proc/self/maps|awk '{print $1}'|
819 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
820 */
e15e06a8 821
eaf9790d 822 static const unsigned long range[] = {
120b1162
LH
823 /* Inclusive , Exclusive. */
824 0x565234af2000, 0x565234af4000,
825 0x565234af4000, 0x565234af9000,
826 0x565234af9000, 0x565234afb000,
827 0x565234afc000, 0x565234afd000,
828 0x565234afd000, 0x565234afe000,
829 0x565235def000, 0x565235e10000,
830 0x7f36d4bfd000, 0x7f36d4ee2000,
831 0x7f36d4ee2000, 0x7f36d4f04000,
832 0x7f36d4f04000, 0x7f36d504c000,
833 0x7f36d504c000, 0x7f36d5098000,
834 0x7f36d5098000, 0x7f36d5099000,
835 0x7f36d5099000, 0x7f36d509d000,
836 0x7f36d509d000, 0x7f36d509f000,
837 0x7f36d509f000, 0x7f36d50a5000,
838 0x7f36d50b9000, 0x7f36d50db000,
839 0x7f36d50db000, 0x7f36d50dc000,
840 0x7f36d50dc000, 0x7f36d50fa000,
841 0x7f36d50fa000, 0x7f36d5102000,
842 0x7f36d5102000, 0x7f36d5103000,
843 0x7f36d5103000, 0x7f36d5104000,
844 0x7f36d5104000, 0x7f36d5105000,
845 0x7fff5876b000, 0x7fff5878d000,
846 0x7fff5878e000, 0x7fff58791000,
847 0x7fff58791000, 0x7fff58793000,
848 };
eaf9790d 849 static const unsigned long holes[] = {
120b1162
LH
850 /* Start of hole, end of hole, size of hole (+1) */
851 0x565234afb000, 0x565234afc000, 0x1000,
852 0x565234afe000, 0x565235def000, 0x12F1000,
853 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
854 };
e15e06a8 855
120b1162
LH
856 /*
857 * req_range consists of 4 values.
858 * 1. min index
859 * 2. max index
860 * 3. size
861 * 4. number that should be returned.
862 * 5. return value
863 */
eaf9790d 864 static const unsigned long req_range[] = {
120b1162
LH
865 0x565234af9000, /* Min */
866 0x7fff58791000, /* Max */
867 0x1000, /* Size */
868 0x565234afb000, /* First hole in our data of size 1000. */
869 0, /* Return value success. */
e15e06a8 870
120b1162
LH
871 0x0, /* Min */
872 0x7fff58791000, /* Max */
873 0x1F00, /* Size */
874 0x0, /* First hole in our data of size 2000. */
875 0, /* Return value success. */
e15e06a8 876
120b1162
LH
877 /* Test ascend. */
878 34148797436 << 12, /* Min */
879 0x7fff587AF000, /* Max */
880 0x3000, /* Size */
881 34148798629 << 12, /* Expected location */
882 0, /* Return value success. */
e15e06a8 883
120b1162
LH
884 /* Test failing. */
885 34148798623 << 12, /* Min */
886 34148798683 << 12, /* Max */
887 0x15000, /* Size */
888 0, /* Expected location */
889 -EBUSY, /* Return value failed. */
e15e06a8 890
120b1162
LH
891 /* Test filling entire gap. */
892 34148798623 << 12, /* Min */
893 0x7fff587AF000, /* Max */
894 0x10000, /* Size */
895 34148798632 << 12, /* Expected location */
896 0, /* Return value success. */
e15e06a8 897
120b1162
LH
898 /* Test walking off the end of root. */
899 0, /* Min */
900 -1, /* Max */
901 -1, /* Size */
902 0, /* Expected location */
903 -EBUSY, /* Return value failure. */
e15e06a8 904
120b1162
LH
905 /* Test looking for too large a hole across entire range. */
906 0, /* Min */
907 -1, /* Max */
908 4503599618982063UL << 12, /* Size */
909 34359052178 << 12, /* Expected location */
910 -EBUSY, /* Return failure. */
ba997212
LH
911
912 /* Test a single entry */
913 34148798648 << 12, /* Min */
914 34148798648 << 12, /* Max */
915 4096, /* Size of 1 */
916 34148798648 << 12, /* Location is the same as min/max */
917 0, /* Success */
120b1162
LH
918 };
919 int i, range_count = ARRAY_SIZE(range);
920 int req_range_count = ARRAY_SIZE(req_range);
921 unsigned long min = 0x565234af2000;
e15e06a8
LH
922 MA_STATE(mas, mt, 0, 0);
923
120b1162
LH
924 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
925 GFP_KERNEL);
926 for (i = 0; i < range_count; i += 2) {
927#define DEBUG_ALLOC_RANGE 0
928#if DEBUG_ALLOC_RANGE
929 pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
930 (range[i + 1] >> 12) - 1);
89f499f3 931 mt_dump(mt, mt_dump_hex);
e15e06a8 932#endif
120b1162
LH
933 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
934 xa_mk_value(range[i] >> 12), 0);
935 mt_validate(mt);
936 }
e15e06a8 937
e15e06a8 938
e15e06a8 939
120b1162
LH
940 mas_lock(&mas);
941 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
e15e06a8 942
120b1162
LH
943#if DEBUG_ALLOC_RANGE
944 pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
945 holes[i+1] >> 12, holes[i+2] >> 12,
946 min, holes[i+1]);
947#endif
948 MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
949 holes[i+1] >> 12,
950 holes[i+2] >> 12));
951 MT_BUG_ON(mt, mas.index != holes[i] >> 12);
952 min = holes[i+1];
e15e06a8 953 mas_reset(&mas);
120b1162
LH
954 }
955 mas_unlock(&mas);
956 for (i = 0; i < req_range_count; i += 5) {
957#if DEBUG_ALLOC_RANGE
958 pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
959 i/5, req_range[i] >> 12, req_range[i + 1] >> 12,
960 req_range[i + 2] >> 12, req_range[i + 3] >> 12,
961 req_range[i], req_range[i+1]);
e15e06a8 962#endif
120b1162
LH
963 check_mtree_alloc_range(mt,
964 req_range[i] >> 12, /* start */
965 req_range[i+1] >> 12, /* end */
966 req_range[i+2] >> 12, /* size */
967 req_range[i+3] >> 12, /* expected address */
968 req_range[i+4], /* expected return */
969 xa_mk_value(req_range[i] >> 12)); /* pointer */
e15e06a8 970 mt_validate(mt);
120b1162 971#if DEBUG_ALLOC_RANGE
89f499f3 972 mt_dump(mt, mt_dump_hex);
e15e06a8 973#endif
e15e06a8 974 }
e15e06a8 975
120b1162
LH
976 mtree_destroy(mt);
977}
978#endif
e15e06a8 979
eaf9790d 980static noinline void __init check_ranges(struct maple_tree *mt)
e15e06a8 981{
120b1162 982 int i, val, val2;
eaf9790d 983 static const unsigned long r[] = {
120b1162
LH
984 10, 15,
985 20, 25,
986 17, 22, /* Overlaps previous range. */
987 9, 1000, /* Huge. */
988 100, 200,
989 45, 168,
990 118, 128,
991 };
e15e06a8 992
120b1162
LH
993 MT_BUG_ON(mt, !mtree_empty(mt));
994 check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
995 check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
996 check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
997 MT_BUG_ON(mt, !mt_height(mt));
998 /* Store */
999 check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1000 check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1001 check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1002 MT_BUG_ON(mt, !mt_height(mt));
1003 mtree_destroy(mt);
1004 MT_BUG_ON(mt, mt_height(mt));
e15e06a8 1005
120b1162
LH
1006 check_seq(mt, 50, false);
1007 mt_set_non_kernel(4);
1008 check_store_range(mt, 5, 47, xa_mk_value(47), 0);
1009 MT_BUG_ON(mt, !mt_height(mt));
1010 mtree_destroy(mt);
e15e06a8 1011
120b1162
LH
1012 /* Create tree of 1-100 */
1013 check_seq(mt, 100, false);
1014 /* Store 45-168 */
1015 mt_set_non_kernel(10);
1016 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1017 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1018 mtree_destroy(mt);
1019
120b1162
LH
1020 /* Create tree of 1-200 */
1021 check_seq(mt, 200, false);
1022 /* Store 45-168 */
1023 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1024 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1025 mtree_destroy(mt);
1026
120b1162
LH
1027 check_seq(mt, 30, false);
1028 check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1029 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1030 mtree_destroy(mt);
1031
120b1162
LH
1032 /* Overwrite across multiple levels. */
1033 /* Create tree of 1-400 */
1034 check_seq(mt, 400, false);
1035 mt_set_non_kernel(50);
1036 /* Store 118-128 */
1037 check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1038 mt_set_non_kernel(50);
1039 mtree_test_erase(mt, 140);
1040 mtree_test_erase(mt, 141);
1041 mtree_test_erase(mt, 142);
1042 mtree_test_erase(mt, 143);
1043 mtree_test_erase(mt, 130);
1044 mtree_test_erase(mt, 131);
1045 mtree_test_erase(mt, 132);
1046 mtree_test_erase(mt, 133);
1047 mtree_test_erase(mt, 134);
1048 mtree_test_erase(mt, 135);
1049 check_load(mt, r[12], xa_mk_value(r[12]));
1050 check_load(mt, r[13], xa_mk_value(r[12]));
1051 check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1052 check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1053 check_load(mt, 135, NULL);
1054 check_load(mt, 140, NULL);
e15e06a8 1055 mt_set_non_kernel(0);
120b1162 1056 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1057 mtree_destroy(mt);
1058
e15e06a8 1059
e15e06a8 1060
120b1162
LH
1061 /* Overwrite multiple levels at the end of the tree (slot 7) */
1062 mt_set_non_kernel(50);
1063 check_seq(mt, 400, false);
1064 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1065 check_store_range(mt, 347, 352, xa_mk_value(347), 0);
e15e06a8 1066
120b1162
LH
1067 check_load(mt, 346, xa_mk_value(346));
1068 for (i = 347; i <= 352; i++)
1069 check_load(mt, i, xa_mk_value(347));
1070 for (i = 353; i <= 361; i++)
1071 check_load(mt, i, xa_mk_value(353));
1072 check_load(mt, 362, xa_mk_value(362));
1073 mt_set_non_kernel(0);
1074 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1075 mtree_destroy(mt);
1076
120b1162
LH
1077 mt_set_non_kernel(50);
1078 check_seq(mt, 400, false);
1079 check_store_range(mt, 352, 364, NULL, 0);
1080 check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1081 check_load(mt, 350, xa_mk_value(350));
1082 check_load(mt, 351, xa_mk_value(352));
1083 for (i = 352; i <= 363; i++)
1084 check_load(mt, i, xa_mk_value(352));
1085 check_load(mt, 364, NULL);
1086 check_load(mt, 365, xa_mk_value(365));
1087 mt_set_non_kernel(0);
1088 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1089 mtree_destroy(mt);
1090
120b1162
LH
1091 mt_set_non_kernel(5);
1092 check_seq(mt, 400, false);
1093 check_store_range(mt, 352, 364, NULL, 0);
1094 check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1095 check_load(mt, 350, xa_mk_value(350));
1096 check_load(mt, 351, xa_mk_value(352));
1097 for (i = 352; i <= 364; i++)
1098 check_load(mt, i, xa_mk_value(352));
1099 check_load(mt, 365, xa_mk_value(365));
1100 mt_set_non_kernel(0);
1101 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1102 mtree_destroy(mt);
1103
e15e06a8 1104
120b1162
LH
1105 mt_set_non_kernel(50);
1106 check_seq(mt, 400, false);
1107 check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1108 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1109 mt_set_non_kernel(0);
1110 mt_validate(mt);
1111 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8 1112 mtree_destroy(mt);
120b1162
LH
1113 /*
1114 * Interesting cases:
1115 * 1. Overwrite the end of a node and end in the first entry of the next
1116 * node.
1117 * 2. Split a single range
1118 * 3. Overwrite the start of a range
1119 * 4. Overwrite the end of a range
1120 * 5. Overwrite the entire range
1121 * 6. Overwrite a range that causes multiple parent nodes to be
1122 * combined
1123 * 7. Overwrite a range that causes multiple parent nodes and part of
1124 * root to be combined
1125 * 8. Overwrite the whole tree
1126 * 9. Try to overwrite the zero entry of an alloc tree.
1127 * 10. Write a range larger than a nodes current pivot
1128 */
e15e06a8 1129
120b1162
LH
1130 mt_set_non_kernel(50);
1131 for (i = 0; i <= 500; i++) {
1132 val = i*5;
1133 val2 = (i+1)*5;
1134 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1135 }
1136 check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1137 check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1138 check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1139 check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1140 check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
e15e06a8 1141 mtree_destroy(mt);
120b1162 1142 mt_set_non_kernel(0);
e15e06a8 1143
120b1162
LH
1144 mt_set_non_kernel(50);
1145 for (i = 0; i <= 500; i++) {
1146 val = i*5;
1147 val2 = (i+1)*5;
1148 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1149 }
1150 check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1151 check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1152 check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1153 check_store_range(mt, 2460, 2470, NULL, 0);
1154 check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1155 check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
e15e06a8 1156 mt_set_non_kernel(0);
120b1162 1157 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1158 mtree_destroy(mt);
1159
120b1162 1160 /* Test rebalance gaps */
e15e06a8 1161 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1162 mt_set_non_kernel(50);
1163 for (i = 0; i <= 50; i++) {
1164 val = i*10;
1165 val2 = (i+1)*10;
1166 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1167 }
1168 check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1169 check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1170 check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1171 check_store_range(mt, 240, 249, NULL, 0);
1172 mtree_erase(mt, 200);
1173 mtree_erase(mt, 210);
1174 mtree_erase(mt, 220);
1175 mtree_erase(mt, 230);
e15e06a8 1176 mt_set_non_kernel(0);
120b1162 1177 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1178 mtree_destroy(mt);
1179
e15e06a8 1180 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1181 for (i = 0; i <= 500; i++) {
1182 val = i*10;
1183 val2 = (i+1)*10;
1184 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1185 }
1186 check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1187 mt_validate(mt);
1188 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1189 mtree_destroy(mt);
1190
1191 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1192 for (i = 0; i <= 500; i++) {
1193 val = i*10;
1194 val2 = (i+1)*10;
1195 check_store_range(mt, val, val2, xa_mk_value(val), 0);
e15e06a8 1196 }
120b1162
LH
1197 check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1198 check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1199 check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1200 check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1201 check_store_range(mt, 4842, 4849, NULL, 0);
1202 mt_validate(mt);
1203 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1204 mtree_destroy(mt);
1205
1206 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1207 for (i = 0; i <= 1300; i++) {
1208 val = i*10;
1209 val2 = (i+1)*10;
1210 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1211 MT_BUG_ON(mt, mt_height(mt) >= 4);
1212 }
1213 /* Cause a 3 child split all the way up the tree. */
1214 for (i = 5; i < 215; i += 10)
1215 check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1216 for (i = 5; i < 65; i += 10)
1217 check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
e15e06a8 1218
120b1162
LH
1219 MT_BUG_ON(mt, mt_height(mt) >= 4);
1220 for (i = 5; i < 45; i += 10)
1221 check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1222 if (!MAPLE_32BIT)
1223 MT_BUG_ON(mt, mt_height(mt) < 4);
e15e06a8
LH
1224 mtree_destroy(mt);
1225
120b1162 1226
e15e06a8 1227 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1228 for (i = 0; i <= 1200; i++) {
1229 val = i*10;
1230 val2 = (i+1)*10;
1231 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1232 MT_BUG_ON(mt, mt_height(mt) >= 4);
e15e06a8 1233 }
120b1162
LH
1234 /* Fill parents and leaves before split. */
1235 for (i = 5; i < 455; i += 10)
1236 check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
e15e06a8 1237
120b1162
LH
1238 for (i = 1; i < 16; i++)
1239 check_store_range(mt, 8185 + i, 8185 + i + 1,
1240 xa_mk_value(8185+i), 0);
1241 MT_BUG_ON(mt, mt_height(mt) >= 4);
1242 /* triple split across multiple levels. */
1243 check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1244 if (!MAPLE_32BIT)
1245 MT_BUG_ON(mt, mt_height(mt) != 4);
e15e06a8
LH
1246}
1247
eaf9790d 1248static noinline void __init check_next_entry(struct maple_tree *mt)
e15e06a8 1249{
120b1162
LH
1250 void *entry = NULL;
1251 unsigned long limit = 30, i = 0;
1252 MA_STATE(mas, mt, i, i);
e15e06a8 1253
120b1162 1254 MT_BUG_ON(mt, !mtree_empty(mt));
e15e06a8 1255
120b1162
LH
1256 check_seq(mt, limit, false);
1257 rcu_read_lock();
1258
1259 /* Check the first one and get ma_state in the correct state. */
1260 MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1261 for ( ; i <= limit + 1; i++) {
1262 entry = mas_next(&mas, limit);
1263 if (i > limit)
1264 MT_BUG_ON(mt, entry != NULL);
1265 else
1266 MT_BUG_ON(mt, xa_mk_value(i) != entry);
e15e06a8 1267 }
120b1162
LH
1268 rcu_read_unlock();
1269 mtree_destroy(mt);
e15e06a8 1270}
e15e06a8 1271
eaf9790d 1272static noinline void __init check_prev_entry(struct maple_tree *mt)
e15e06a8 1273{
120b1162
LH
1274 unsigned long index = 16;
1275 void *value;
1276 int i;
e15e06a8 1277
120b1162 1278 MA_STATE(mas, mt, index, index);
e15e06a8 1279
120b1162
LH
1280 MT_BUG_ON(mt, !mtree_empty(mt));
1281 check_seq(mt, 30, false);
e15e06a8 1282
120b1162
LH
1283 rcu_read_lock();
1284 value = mas_find(&mas, ULONG_MAX);
1285 MT_BUG_ON(mt, value != xa_mk_value(index));
1286 value = mas_prev(&mas, 0);
1287 MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1288 rcu_read_unlock();
1289 mtree_destroy(mt);
e15e06a8 1290
120b1162
LH
1291 /* Check limits on prev */
1292 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1293 mas_lock(&mas);
1294 for (i = 0; i <= index; i++) {
1295 mas_set_range(&mas, i*10, i*10+5);
1296 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1297 }
e15e06a8 1298
120b1162
LH
1299 mas_set(&mas, 20);
1300 value = mas_walk(&mas);
1301 MT_BUG_ON(mt, value != xa_mk_value(2));
e15e06a8 1302
120b1162
LH
1303 value = mas_prev(&mas, 19);
1304 MT_BUG_ON(mt, value != NULL);
e15e06a8 1305
120b1162
LH
1306 mas_set(&mas, 80);
1307 value = mas_walk(&mas);
1308 MT_BUG_ON(mt, value != xa_mk_value(8));
e15e06a8 1309
120b1162
LH
1310 value = mas_prev(&mas, 76);
1311 MT_BUG_ON(mt, value != NULL);
e15e06a8 1312
120b1162 1313 mas_unlock(&mas);
e15e06a8 1314}
e15e06a8 1315
eaf9790d 1316static noinline void __init check_root_expand(struct maple_tree *mt)
e15e06a8 1317{
120b1162
LH
1318 MA_STATE(mas, mt, 0, 0);
1319 void *ptr;
e15e06a8 1320
e15e06a8 1321
120b1162
LH
1322 mas_lock(&mas);
1323 mas_set(&mas, 3);
1324 ptr = mas_walk(&mas);
eb2e817f 1325 MT_BUG_ON(mt, mas.index != 0);
120b1162
LH
1326 MT_BUG_ON(mt, ptr != NULL);
1327 MT_BUG_ON(mt, mas.index != 0);
1328 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 1329
120b1162
LH
1330 ptr = &check_prev_entry;
1331 mas_set(&mas, 1);
1332 mas_store_gfp(&mas, ptr, GFP_KERNEL);
e15e06a8 1333
120b1162
LH
1334 mas_set(&mas, 0);
1335 ptr = mas_walk(&mas);
1336 MT_BUG_ON(mt, ptr != NULL);
e15e06a8 1337
120b1162
LH
1338 mas_set(&mas, 1);
1339 ptr = mas_walk(&mas);
1340 MT_BUG_ON(mt, ptr != &check_prev_entry);
e15e06a8 1341
120b1162
LH
1342 mas_set(&mas, 2);
1343 ptr = mas_walk(&mas);
1344 MT_BUG_ON(mt, ptr != NULL);
1345 mas_unlock(&mas);
1346 mtree_destroy(mt);
e15e06a8 1347
e15e06a8 1348
120b1162
LH
1349 mt_init_flags(mt, 0);
1350 mas_lock(&mas);
e15e06a8 1351
120b1162
LH
1352 mas_set(&mas, 0);
1353 ptr = &check_prev_entry;
1354 mas_store_gfp(&mas, ptr, GFP_KERNEL);
e15e06a8 1355
120b1162
LH
1356 mas_set(&mas, 5);
1357 ptr = mas_walk(&mas);
1358 MT_BUG_ON(mt, ptr != NULL);
1359 MT_BUG_ON(mt, mas.index != 1);
1360 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 1361
120b1162
LH
1362 mas_set_range(&mas, 0, 100);
1363 ptr = mas_walk(&mas);
1364 MT_BUG_ON(mt, ptr != &check_prev_entry);
1365 MT_BUG_ON(mt, mas.last != 0);
1366 mas_unlock(&mas);
1367 mtree_destroy(mt);
e15e06a8 1368
120b1162
LH
1369 mt_init_flags(mt, 0);
1370 mas_lock(&mas);
1371
1372 mas_set(&mas, 0);
1373 ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1374 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1375 ptr = mas_next(&mas, ULONG_MAX);
1376 MT_BUG_ON(mt, ptr != NULL);
1377 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1378
1379 mas_set(&mas, 1);
1380 ptr = mas_prev(&mas, 0);
1381 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1382 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
e15e06a8 1383
120b1162 1384 mas_unlock(&mas);
e15e06a8 1385
120b1162 1386 mtree_destroy(mt);
e15e06a8 1387
120b1162
LH
1388 mt_init_flags(mt, 0);
1389 mas_lock(&mas);
1390 mas_set(&mas, 0);
1391 ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1392 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1393 ptr = mas_next(&mas, ULONG_MAX);
1394 MT_BUG_ON(mt, ptr != NULL);
eb2e817f 1395 MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
e15e06a8 1396
120b1162
LH
1397 mas_set(&mas, 1);
1398 ptr = mas_prev(&mas, 0);
1399 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1400 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
e15e06a8 1401
e15e06a8 1402
120b1162 1403 mas_unlock(&mas);
e15e06a8 1404}
e15e06a8 1405
eaf9790d 1406static noinline void __init check_gap_combining(struct maple_tree *mt)
e15e06a8 1407{
120b1162
LH
1408 struct maple_enode *mn1, *mn2;
1409 void *entry;
1410 unsigned long singletons = 100;
eaf9790d
LH
1411 static const unsigned long *seq100;
1412 static const unsigned long seq100_64[] = {
120b1162
LH
1413 /* 0-5 */
1414 74, 75, 76,
1415 50, 100, 2,
e15e06a8 1416
120b1162
LH
1417 /* 6-12 */
1418 44, 45, 46, 43,
1419 20, 50, 3,
e15e06a8 1420
120b1162
LH
1421 /* 13-20*/
1422 80, 81, 82,
1423 76, 2, 79, 85, 4,
1424 };
e15e06a8 1425
eaf9790d 1426 static const unsigned long seq100_32[] = {
120b1162
LH
1427 /* 0-5 */
1428 61, 62, 63,
1429 50, 100, 2,
e15e06a8 1430
120b1162
LH
1431 /* 6-12 */
1432 31, 32, 33, 30,
1433 20, 50, 3,
e15e06a8 1434
120b1162
LH
1435 /* 13-20*/
1436 80, 81, 82,
1437 76, 2, 79, 85, 4,
1438 };
e15e06a8 1439
eaf9790d 1440 static const unsigned long seq2000[] = {
120b1162
LH
1441 1152, 1151,
1442 1100, 1200, 2,
1443 };
eaf9790d 1444 static const unsigned long seq400[] = {
120b1162
LH
1445 286, 318,
1446 256, 260, 266, 270, 275, 280, 290, 398,
1447 286, 310,
1448 };
e15e06a8 1449
120b1162 1450 unsigned long index;
e15e06a8 1451
120b1162 1452 MA_STATE(mas, mt, 0, 0);
e15e06a8 1453
120b1162
LH
1454 if (MAPLE_32BIT)
1455 seq100 = seq100_32;
1456 else
1457 seq100 = seq100_64;
e15e06a8 1458
120b1162
LH
1459 index = seq100[0];
1460 mas_set(&mas, index);
1461 MT_BUG_ON(mt, !mtree_empty(mt));
1462 check_seq(mt, singletons, false); /* create 100 singletons. */
e15e06a8 1463
120b1162
LH
1464 mt_set_non_kernel(1);
1465 mtree_test_erase(mt, seq100[2]);
1466 check_load(mt, seq100[2], NULL);
1467 mtree_test_erase(mt, seq100[1]);
1468 check_load(mt, seq100[1], NULL);
e15e06a8 1469
120b1162
LH
1470 rcu_read_lock();
1471 entry = mas_find(&mas, ULONG_MAX);
1472 MT_BUG_ON(mt, entry != xa_mk_value(index));
1473 mn1 = mas.node;
1474 mas_next(&mas, ULONG_MAX);
1475 entry = mas_next(&mas, ULONG_MAX);
1476 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1477 mn2 = mas.node;
1478 MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
e15e06a8 1479
120b1162
LH
1480 /*
1481 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1482 * seq100[4]. Search for the gap.
1483 */
1484 mt_set_non_kernel(1);
e15e06a8 1485 mas_reset(&mas);
120b1162
LH
1486 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1487 seq100[5]));
1488 MT_BUG_ON(mt, mas.index != index + 1);
1489 rcu_read_unlock();
e15e06a8 1490
120b1162
LH
1491 mtree_test_erase(mt, seq100[6]);
1492 check_load(mt, seq100[6], NULL);
1493 mtree_test_erase(mt, seq100[7]);
1494 check_load(mt, seq100[7], NULL);
1495 mtree_test_erase(mt, seq100[8]);
1496 index = seq100[9];
e15e06a8 1497
120b1162
LH
1498 rcu_read_lock();
1499 mas.index = index;
1500 mas.last = index;
e15e06a8 1501 mas_reset(&mas);
120b1162
LH
1502 entry = mas_find(&mas, ULONG_MAX);
1503 MT_BUG_ON(mt, entry != xa_mk_value(index));
1504 mn1 = mas.node;
1505 entry = mas_next(&mas, ULONG_MAX);
1506 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1507 mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1508 mn2 = mas.node;
1509 MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
e15e06a8 1510
120b1162
LH
1511 /*
1512 * At this point, there is a gap of 3 at seq100[6]. Find it by
1513 * searching 20 - 50 for size 3.
1514 */
e15e06a8 1515 mas_reset(&mas);
120b1162
LH
1516 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1517 seq100[12]));
1518 MT_BUG_ON(mt, mas.index != seq100[6]);
1519 rcu_read_unlock();
e15e06a8 1520
120b1162
LH
1521 mt_set_non_kernel(1);
1522 mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1523 check_load(mt, seq100[13], NULL);
1524 check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1525 mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1526 check_load(mt, seq100[13], NULL);
1527 check_load(mt, seq100[14], NULL);
e15e06a8 1528
120b1162
LH
1529 mas_reset(&mas);
1530 rcu_read_lock();
1531 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1532 seq100[17]));
1533 MT_BUG_ON(mt, mas.index != seq100[13]);
1534 mt_validate(mt);
1535 rcu_read_unlock();
e15e06a8 1536
120b1162
LH
1537 /*
1538 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1539 * gap.
1540 */
1541 mt_set_non_kernel(2);
1542 mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1543 mtree_test_erase(mt, seq100[15]);
e15e06a8 1544 mas_reset(&mas);
120b1162
LH
1545 rcu_read_lock();
1546 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1547 seq100[20]));
1548 rcu_read_unlock();
1549 MT_BUG_ON(mt, mas.index != seq100[18]);
1550 mt_validate(mt);
1551 mtree_destroy(mt);
e15e06a8 1552
120b1162
LH
1553 /* seq 2000 tests are for multi-level tree gaps */
1554 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1555 check_seq(mt, 2000, false);
1556 mt_set_non_kernel(1);
1557 mtree_test_erase(mt, seq2000[0]);
1558 mtree_test_erase(mt, seq2000[1]);
e15e06a8 1559
120b1162
LH
1560 mt_set_non_kernel(2);
1561 mas_reset(&mas);
1562 rcu_read_lock();
1563 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1564 seq2000[4]));
1565 MT_BUG_ON(mt, mas.index != seq2000[1]);
1566 rcu_read_unlock();
1567 mt_validate(mt);
e15e06a8
LH
1568 mtree_destroy(mt);
1569
120b1162
LH
1570 /* seq 400 tests rebalancing over two levels. */
1571 mt_set_non_kernel(99);
1572 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1573 check_seq(mt, 400, false);
1574 mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1575 mt_set_non_kernel(0);
1576 mtree_destroy(mt);
e15e06a8 1577
120b1162
LH
1578 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1579 check_seq(mt, 400, false);
1580 mt_set_non_kernel(50);
1581 mtree_test_store_range(mt, seq400[2], seq400[9],
1582 xa_mk_value(seq400[2]));
1583 mtree_test_store_range(mt, seq400[3], seq400[9],
1584 xa_mk_value(seq400[3]));
1585 mtree_test_store_range(mt, seq400[4], seq400[9],
1586 xa_mk_value(seq400[4]));
1587 mtree_test_store_range(mt, seq400[5], seq400[9],
1588 xa_mk_value(seq400[5]));
1589 mtree_test_store_range(mt, seq400[0], seq400[9],
1590 xa_mk_value(seq400[0]));
1591 mtree_test_store_range(mt, seq400[6], seq400[9],
1592 xa_mk_value(seq400[6]));
1593 mtree_test_store_range(mt, seq400[7], seq400[9],
1594 xa_mk_value(seq400[7]));
1595 mtree_test_store_range(mt, seq400[8], seq400[9],
1596 xa_mk_value(seq400[8]));
1597 mtree_test_store_range(mt, seq400[10], seq400[11],
1598 xa_mk_value(seq400[10]));
1599 mt_validate(mt);
1600 mt_set_non_kernel(0);
1601 mtree_destroy(mt);
1602}
eaf9790d 1603static noinline void __init check_node_overwrite(struct maple_tree *mt)
e15e06a8 1604{
120b1162 1605 int i, max = 4000;
e15e06a8 1606
120b1162
LH
1607 for (i = 0; i < max; i++)
1608 mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
e15e06a8 1609
120b1162 1610 mtree_test_store_range(mt, 319951, 367950, NULL);
89f499f3 1611 /*mt_dump(mt, mt_dump_dec); */
120b1162 1612 mt_validate(mt);
e15e06a8
LH
1613}
1614
120b1162 1615#if defined(BENCH_SLOT_STORE)
eaf9790d 1616static noinline void __init bench_slot_store(struct maple_tree *mt)
e15e06a8 1617{
120b1162 1618 int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
e15e06a8 1619
120b1162
LH
1620 for (i = 0; i < max; i += 10)
1621 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1622
120b1162
LH
1623 for (i = 0; i < count; i++) {
1624 mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1625 mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1626 GFP_KERNEL);
e15e06a8 1627 }
e15e06a8 1628}
120b1162 1629#endif
e15e06a8 1630
120b1162 1631#if defined(BENCH_NODE_STORE)
eaf9790d 1632static noinline void __init bench_node_store(struct maple_tree *mt)
e15e06a8 1633{
120b1162 1634 int i, overwrite = 76, max = 240, count = 20000000;
e15e06a8 1635
120b1162
LH
1636 for (i = 0; i < max; i += 10)
1637 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1638
120b1162
LH
1639 for (i = 0; i < count; i++) {
1640 mtree_store_range(mt, overwrite, overwrite + 15,
1641 xa_mk_value(overwrite), GFP_KERNEL);
e15e06a8 1642
120b1162
LH
1643 overwrite += 5;
1644 if (overwrite >= 135)
1645 overwrite = 76;
e15e06a8
LH
1646 }
1647}
120b1162 1648#endif
e15e06a8 1649
120b1162 1650#if defined(BENCH_AWALK)
eaf9790d 1651static noinline void __init bench_awalk(struct maple_tree *mt)
e15e06a8 1652{
120b1162
LH
1653 int i, max = 2500, count = 50000000;
1654 MA_STATE(mas, mt, 1470, 1470);
e15e06a8 1655
120b1162
LH
1656 for (i = 0; i < max; i += 10)
1657 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1658
120b1162 1659 mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
e15e06a8 1660
120b1162
LH
1661 for (i = 0; i < count; i++) {
1662 mas_empty_area_rev(&mas, 0, 2000, 10);
1663 mas_reset(&mas);
e15e06a8 1664 }
120b1162
LH
1665}
1666#endif
1667#if defined(BENCH_WALK)
eaf9790d 1668static noinline void __init bench_walk(struct maple_tree *mt)
120b1162
LH
1669{
1670 int i, max = 2500, count = 550000000;
1671 MA_STATE(mas, mt, 1470, 1470);
e15e06a8 1672
120b1162
LH
1673 for (i = 0; i < max; i += 10)
1674 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1675
120b1162
LH
1676 for (i = 0; i < count; i++) {
1677 mas_walk(&mas);
1678 mas_reset(&mas);
e15e06a8
LH
1679 }
1680
e15e06a8 1681}
120b1162 1682#endif
e15e06a8 1683
120b1162 1684#if defined(BENCH_MT_FOR_EACH)
eaf9790d 1685static noinline void __init bench_mt_for_each(struct maple_tree *mt)
e15e06a8 1686{
120b1162
LH
1687 int i, count = 1000000;
1688 unsigned long max = 2500, index = 0;
1689 void *entry;
e15e06a8 1690
120b1162
LH
1691 for (i = 0; i < max; i += 5)
1692 mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1693
1694 for (i = 0; i < count; i++) {
1695 unsigned long j = 0;
e15e06a8 1696
120b1162
LH
1697 mt_for_each(mt, entry, index, max) {
1698 MT_BUG_ON(mt, entry != xa_mk_value(j));
1699 j += 5;
e15e06a8 1700 }
120b1162
LH
1701
1702 index = 0;
e15e06a8
LH
1703 }
1704
e15e06a8 1705}
120b1162 1706#endif
e15e06a8 1707
120b1162 1708/* check_forking - simulate the kernel forking sequence with the tree. */
eaf9790d 1709static noinline void __init check_forking(struct maple_tree *mt)
e15e06a8 1710{
e15e06a8 1711
120b1162
LH
1712 struct maple_tree newmt;
1713 int i, nr_entries = 134;
1714 void *val;
1715 MA_STATE(mas, mt, 0, 0);
1716 MA_STATE(newmas, mt, 0, 0);
1717
1718 for (i = 0; i <= nr_entries; i++)
1719 mtree_store_range(mt, i*10, i*10 + 5,
1720 xa_mk_value(i), GFP_KERNEL);
1721
1722 mt_set_non_kernel(99999);
1723 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1724 newmas.tree = &newmt;
1725 mas_reset(&newmas);
1726 mas_reset(&mas);
1727 mas_lock(&newmas);
1728 mas.index = 0;
1729 mas.last = 0;
1730 if (mas_expected_entries(&newmas, nr_entries)) {
1731 pr_err("OOM!");
1732 BUG_ON(1);
1733 }
1734 rcu_read_lock();
1735 mas_for_each(&mas, val, ULONG_MAX) {
1736 newmas.index = mas.index;
1737 newmas.last = mas.last;
1738 mas_store(&newmas, val);
e15e06a8 1739 }
120b1162
LH
1740 rcu_read_unlock();
1741 mas_destroy(&newmas);
1742 mas_unlock(&newmas);
1743 mt_validate(&newmt);
1744 mt_set_non_kernel(0);
1745 mtree_destroy(&newmt);
e15e06a8
LH
1746}
1747
eaf9790d 1748static noinline void __init check_iteration(struct maple_tree *mt)
5159d64b
LH
1749{
1750 int i, nr_entries = 125;
1751 void *val;
1752 MA_STATE(mas, mt, 0, 0);
1753
1754 for (i = 0; i <= nr_entries; i++)
1755 mtree_store_range(mt, i * 10, i * 10 + 9,
1756 xa_mk_value(i), GFP_KERNEL);
1757
1758 mt_set_non_kernel(99999);
1759
1760 i = 0;
1761 mas_lock(&mas);
1762 mas_for_each(&mas, val, 925) {
1763 MT_BUG_ON(mt, mas.index != i * 10);
1764 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1765 /* Overwrite end of entry 92 */
1766 if (i == 92) {
1767 mas.index = 925;
1768 mas.last = 929;
1769 mas_store(&mas, val);
1770 }
1771 i++;
1772 }
1773 /* Ensure mas_find() gets the next value */
1774 val = mas_find(&mas, ULONG_MAX);
1775 MT_BUG_ON(mt, val != xa_mk_value(i));
1776
1777 mas_set(&mas, 0);
1778 i = 0;
1779 mas_for_each(&mas, val, 785) {
1780 MT_BUG_ON(mt, mas.index != i * 10);
1781 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1782 /* Overwrite start of entry 78 */
1783 if (i == 78) {
1784 mas.index = 780;
1785 mas.last = 785;
1786 mas_store(&mas, val);
1787 } else {
1788 i++;
1789 }
1790 }
1791 val = mas_find(&mas, ULONG_MAX);
1792 MT_BUG_ON(mt, val != xa_mk_value(i));
1793
1794 mas_set(&mas, 0);
1795 i = 0;
1796 mas_for_each(&mas, val, 765) {
1797 MT_BUG_ON(mt, mas.index != i * 10);
1798 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1799 /* Overwrite end of entry 76 and advance to the end */
1800 if (i == 76) {
1801 mas.index = 760;
1802 mas.last = 765;
1803 mas_store(&mas, val);
5159d64b
LH
1804 }
1805 i++;
1806 }
1807 /* Make sure the next find returns the one after 765, 766-769 */
1808 val = mas_find(&mas, ULONG_MAX);
1809 MT_BUG_ON(mt, val != xa_mk_value(76));
1810 mas_unlock(&mas);
1811 mas_destroy(&mas);
1812 mt_set_non_kernel(0);
1813}
1814
eaf9790d 1815static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
e15e06a8 1816{
e15e06a8 1817
120b1162
LH
1818 struct maple_tree newmt;
1819 int i, nr_entries = 135;
1820 void *val;
1821 MA_STATE(mas, mt, 0, 0);
1822 MA_STATE(newmas, mt, 0, 0);
e15e06a8 1823
120b1162
LH
1824 for (i = 0; i <= nr_entries; i++)
1825 mtree_store_range(mt, i*10, i*10 + 5,
1826 xa_mk_value(i), GFP_KERNEL);
e15e06a8 1827
120b1162
LH
1828 mt_set_non_kernel(99999);
1829 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1830 newmas.tree = &newmt;
1831 rcu_read_lock();
1832 mas_lock(&newmas);
1833 mas_reset(&newmas);
1834 mas_set(&mas, 0);
1835 mas_for_each(&mas, val, ULONG_MAX) {
1836 newmas.index = mas.index;
1837 newmas.last = mas.last;
1838 mas_store_gfp(&newmas, val, GFP_KERNEL);
e15e06a8 1839 }
120b1162
LH
1840 mas_unlock(&newmas);
1841 rcu_read_unlock();
1842 mt_validate(&newmt);
1843 mt_set_non_kernel(0);
1844 mtree_destroy(&newmt);
e15e06a8
LH
1845}
1846
120b1162 1847#if defined(BENCH_FORK)
eaf9790d 1848static noinline void __init bench_forking(struct maple_tree *mt)
e15e06a8
LH
1849{
1850
120b1162
LH
1851 struct maple_tree newmt;
1852 int i, nr_entries = 134, nr_fork = 80000;
1853 void *val;
1854 MA_STATE(mas, mt, 0, 0);
1855 MA_STATE(newmas, mt, 0, 0);
e15e06a8 1856
120b1162
LH
1857 for (i = 0; i <= nr_entries; i++)
1858 mtree_store_range(mt, i*10, i*10 + 5,
1859 xa_mk_value(i), GFP_KERNEL);
e15e06a8 1860
120b1162
LH
1861 for (i = 0; i < nr_fork; i++) {
1862 mt_set_non_kernel(99999);
1863 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1864 newmas.tree = &newmt;
1865 mas_reset(&newmas);
1866 mas_reset(&mas);
1867 mas.index = 0;
1868 mas.last = 0;
1869 rcu_read_lock();
1870 mas_lock(&newmas);
1871 if (mas_expected_entries(&newmas, nr_entries)) {
1872 printk("OOM!");
1873 BUG_ON(1);
1874 }
1875 mas_for_each(&mas, val, ULONG_MAX) {
1876 newmas.index = mas.index;
1877 newmas.last = mas.last;
1878 mas_store(&newmas, val);
e15e06a8 1879 }
120b1162
LH
1880 mas_destroy(&newmas);
1881 mas_unlock(&newmas);
1882 rcu_read_unlock();
1883 mt_validate(&newmt);
1884 mt_set_non_kernel(0);
1885 mtree_destroy(&newmt);
e15e06a8 1886 }
e15e06a8 1887}
120b1162 1888#endif
e15e06a8 1889
eaf9790d 1890static noinline void __init next_prev_test(struct maple_tree *mt)
e15e06a8 1891{
120b1162
LH
1892 int i, nr_entries;
1893 void *val;
1894 MA_STATE(mas, mt, 0, 0);
1895 struct maple_enode *mn;
eaf9790d
LH
1896 static const unsigned long *level2;
1897 static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
1898 725};
1899 static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
1900 1760, 1765};
7a93c71a 1901 unsigned long last_index;
e15e06a8 1902
120b1162
LH
1903 if (MAPLE_32BIT) {
1904 nr_entries = 500;
1905 level2 = level2_32;
7a93c71a 1906 last_index = 0x138e;
120b1162
LH
1907 } else {
1908 nr_entries = 200;
1909 level2 = level2_64;
7a93c71a 1910 last_index = 0x7d6;
120b1162 1911 }
e15e06a8 1912
120b1162
LH
1913 for (i = 0; i <= nr_entries; i++)
1914 mtree_store_range(mt, i*10, i*10 + 5,
1915 xa_mk_value(i), GFP_KERNEL);
e15e06a8 1916
120b1162
LH
1917 mas_lock(&mas);
1918 for (i = 0; i <= nr_entries / 2; i++) {
1919 mas_next(&mas, 1000);
1920 if (mas_is_none(&mas))
1921 break;
e15e06a8 1922
e15e06a8 1923 }
120b1162
LH
1924 mas_reset(&mas);
1925 mas_set(&mas, 0);
1926 i = 0;
1927 mas_for_each(&mas, val, 1000) {
1928 i++;
e15e06a8
LH
1929 }
1930
120b1162
LH
1931 mas_reset(&mas);
1932 mas_set(&mas, 0);
1933 i = 0;
1934 mas_for_each(&mas, val, 1000) {
1935 mas_pause(&mas);
1936 i++;
1937 }
e15e06a8 1938
120b1162
LH
1939 /*
1940 * 680 - 685 = 0x61a00001930c
1941 * 686 - 689 = NULL;
1942 * 690 - 695 = 0x61a00001930c
1943 * Check simple next/prev
1944 */
1945 mas_set(&mas, 686);
1946 val = mas_walk(&mas);
1947 MT_BUG_ON(mt, val != NULL);
e15e06a8 1948
120b1162
LH
1949 val = mas_next(&mas, 1000);
1950 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
1951 MT_BUG_ON(mt, mas.index != 690);
1952 MT_BUG_ON(mt, mas.last != 695);
e15e06a8 1953
120b1162
LH
1954 val = mas_prev(&mas, 0);
1955 MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
1956 MT_BUG_ON(mt, mas.index != 680);
1957 MT_BUG_ON(mt, mas.last != 685);
e15e06a8 1958
120b1162
LH
1959 val = mas_next(&mas, 1000);
1960 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
1961 MT_BUG_ON(mt, mas.index != 690);
1962 MT_BUG_ON(mt, mas.last != 695);
e15e06a8 1963
120b1162
LH
1964 val = mas_next(&mas, 1000);
1965 MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
1966 MT_BUG_ON(mt, mas.index != 700);
1967 MT_BUG_ON(mt, mas.last != 705);
e15e06a8 1968
120b1162
LH
1969 /* Check across node boundaries of the tree */
1970 mas_set(&mas, 70);
1971 val = mas_walk(&mas);
1972 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
1973 MT_BUG_ON(mt, mas.index != 70);
1974 MT_BUG_ON(mt, mas.last != 75);
e15e06a8 1975
120b1162
LH
1976 val = mas_next(&mas, 1000);
1977 MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
1978 MT_BUG_ON(mt, mas.index != 80);
1979 MT_BUG_ON(mt, mas.last != 85);
e15e06a8 1980
120b1162
LH
1981 val = mas_prev(&mas, 70);
1982 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
1983 MT_BUG_ON(mt, mas.index != 70);
1984 MT_BUG_ON(mt, mas.last != 75);
e15e06a8 1985
120b1162
LH
1986 /* Check across two levels of the tree */
1987 mas_reset(&mas);
1988 mas_set(&mas, level2[0]);
1989 val = mas_walk(&mas);
1990 MT_BUG_ON(mt, val != NULL);
1991 val = mas_next(&mas, level2[1]);
1992 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
1993 MT_BUG_ON(mt, mas.index != level2[2]);
1994 MT_BUG_ON(mt, mas.last != level2[3]);
1995 mn = mas.node;
e15e06a8 1996
120b1162
LH
1997 val = mas_next(&mas, level2[1]);
1998 MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
1999 MT_BUG_ON(mt, mas.index != level2[4]);
2000 MT_BUG_ON(mt, mas.last != level2[5]);
2001 MT_BUG_ON(mt, mn == mas.node);
e15e06a8 2002
120b1162
LH
2003 val = mas_prev(&mas, 0);
2004 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2005 MT_BUG_ON(mt, mas.index != level2[2]);
2006 MT_BUG_ON(mt, mas.last != level2[3]);
e15e06a8 2007
120b1162
LH
2008 /* Check running off the end and back on */
2009 mas_set(&mas, nr_entries * 10);
2010 val = mas_walk(&mas);
2011 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2012 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2013 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
e15e06a8 2014
120b1162
LH
2015 val = mas_next(&mas, ULONG_MAX);
2016 MT_BUG_ON(mt, val != NULL);
7a93c71a 2017 MT_BUG_ON(mt, mas.index != last_index);
120b1162 2018 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 2019
120b1162
LH
2020 val = mas_prev(&mas, 0);
2021 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2022 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2023 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
e15e06a8 2024
120b1162
LH
2025 /* Check running off the start and back on */
2026 mas_reset(&mas);
2027 mas_set(&mas, 10);
2028 val = mas_walk(&mas);
2029 MT_BUG_ON(mt, val != xa_mk_value(1));
2030 MT_BUG_ON(mt, mas.index != 10);
2031 MT_BUG_ON(mt, mas.last != 15);
e15e06a8 2032
120b1162
LH
2033 val = mas_prev(&mas, 0);
2034 MT_BUG_ON(mt, val != xa_mk_value(0));
2035 MT_BUG_ON(mt, mas.index != 0);
2036 MT_BUG_ON(mt, mas.last != 5);
e15e06a8 2037
120b1162
LH
2038 val = mas_prev(&mas, 0);
2039 MT_BUG_ON(mt, val != NULL);
2040 MT_BUG_ON(mt, mas.index != 0);
eb2e817f
LH
2041 MT_BUG_ON(mt, mas.last != 5);
2042 MT_BUG_ON(mt, mas.node != MAS_NONE);
e15e06a8 2043
120b1162
LH
2044 mas.index = 0;
2045 mas.last = 5;
2046 mas_store(&mas, NULL);
2047 mas_reset(&mas);
2048 mas_set(&mas, 10);
2049 mas_walk(&mas);
e15e06a8 2050
120b1162
LH
2051 val = mas_prev(&mas, 0);
2052 MT_BUG_ON(mt, val != NULL);
2053 MT_BUG_ON(mt, mas.index != 0);
eb2e817f 2054 MT_BUG_ON(mt, mas.last != 9);
120b1162 2055 mas_unlock(&mas);
e15e06a8 2056
120b1162 2057 mtree_destroy(mt);
e15e06a8 2058
120b1162
LH
2059 mt_init(mt);
2060 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2061 mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
e15e06a8 2062 rcu_read_lock();
120b1162
LH
2063 mas_set(&mas, 5);
2064 val = mas_prev(&mas, 4);
2065 MT_BUG_ON(mt, val != NULL);
e15e06a8 2066 rcu_read_unlock();
e15e06a8
LH
2067}
2068
e15e06a8 2069
e15e06a8
LH
2070
2071/* Test spanning writes that require balancing right sibling or right cousin */
eaf9790d 2072static noinline void __init check_spanning_relatives(struct maple_tree *mt)
e15e06a8
LH
2073{
2074
2075 unsigned long i, nr_entries = 1000;
2076
2077 for (i = 0; i <= nr_entries; i++)
2078 mtree_store_range(mt, i*10, i*10 + 5,
2079 xa_mk_value(i), GFP_KERNEL);
2080
2081
2082 mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2083}
2084
eaf9790d 2085static noinline void __init check_fuzzer(struct maple_tree *mt)
e15e06a8
LH
2086{
2087 /*
2088 * 1. Causes a spanning rebalance of a single root node.
2089 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2090 * entire right side is consumed.
2091 */
2092 mtree_test_insert(mt, 88, (void *)0xb1);
2093 mtree_test_insert(mt, 84, (void *)0xa9);
2094 mtree_test_insert(mt, 2, (void *)0x5);
2095 mtree_test_insert(mt, 4, (void *)0x9);
2096 mtree_test_insert(mt, 14, (void *)0x1d);
2097 mtree_test_insert(mt, 7, (void *)0xf);
2098 mtree_test_insert(mt, 12, (void *)0x19);
2099 mtree_test_insert(mt, 18, (void *)0x25);
2100 mtree_test_store_range(mt, 8, 18, (void *)0x11);
2101 mtree_destroy(mt);
2102
2103
2104 /*
2105 * 2. Cause a spanning rebalance of two nodes in root.
2106 * Fixed by setting mast->r->max correctly.
2107 */
2108 mt_init_flags(mt, 0);
2109 mtree_test_store(mt, 87, (void *)0xaf);
2110 mtree_test_store(mt, 0, (void *)0x1);
2111 mtree_test_load(mt, 4);
2112 mtree_test_insert(mt, 4, (void *)0x9);
2113 mtree_test_store(mt, 8, (void *)0x11);
2114 mtree_test_store(mt, 44, (void *)0x59);
2115 mtree_test_store(mt, 68, (void *)0x89);
2116 mtree_test_store(mt, 2, (void *)0x5);
2117 mtree_test_insert(mt, 43, (void *)0x57);
2118 mtree_test_insert(mt, 24, (void *)0x31);
2119 mtree_test_insert(mt, 844, (void *)0x699);
2120 mtree_test_store(mt, 84, (void *)0xa9);
2121 mtree_test_store(mt, 4, (void *)0x9);
2122 mtree_test_erase(mt, 4);
2123 mtree_test_load(mt, 5);
2124 mtree_test_erase(mt, 0);
2125 mtree_destroy(mt);
2126
2127 /*
2128 * 3. Cause a node overflow on copy
2129 * Fixed by using the correct check for node size in mas_wr_modify()
2130 * Also discovered issue with metadata setting.
2131 */
2132 mt_init_flags(mt, 0);
120b1162 2133 mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
e15e06a8
LH
2134 mtree_test_store(mt, 4, (void *)0x9);
2135 mtree_test_erase(mt, 5);
2136 mtree_test_erase(mt, 0);
2137 mtree_test_erase(mt, 4);
2138 mtree_test_store(mt, 5, (void *)0xb);
2139 mtree_test_erase(mt, 5);
2140 mtree_test_store(mt, 5, (void *)0xb);
2141 mtree_test_erase(mt, 5);
2142 mtree_test_erase(mt, 4);
2143 mtree_test_store(mt, 4, (void *)0x9);
2144 mtree_test_store(mt, 444, (void *)0x379);
2145 mtree_test_store(mt, 0, (void *)0x1);
2146 mtree_test_load(mt, 0);
2147 mtree_test_store(mt, 5, (void *)0xb);
2148 mtree_test_erase(mt, 0);
2149 mtree_destroy(mt);
2150
2151 /*
2152 * 4. spanning store failure due to writing incorrect pivot value at
2153 * last slot.
2154 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2155 *
2156 */
2157 mt_init_flags(mt, 0);
2158 mtree_test_insert(mt, 261, (void *)0x20b);
2159 mtree_test_store(mt, 516, (void *)0x409);
2160 mtree_test_store(mt, 6, (void *)0xd);
2161 mtree_test_insert(mt, 5, (void *)0xb);
2162 mtree_test_insert(mt, 1256, (void *)0x9d1);
2163 mtree_test_store(mt, 4, (void *)0x9);
2164 mtree_test_erase(mt, 1);
2165 mtree_test_store(mt, 56, (void *)0x71);
2166 mtree_test_insert(mt, 1, (void *)0x3);
2167 mtree_test_store(mt, 24, (void *)0x31);
2168 mtree_test_erase(mt, 1);
2169 mtree_test_insert(mt, 2263, (void *)0x11af);
2170 mtree_test_insert(mt, 446, (void *)0x37d);
2171 mtree_test_store_range(mt, 6, 45, (void *)0xd);
2172 mtree_test_store_range(mt, 3, 446, (void *)0x7);
2173 mtree_destroy(mt);
2174
2175 /*
2176 * 5. mas_wr_extend_null() may overflow slots.
2177 * Fix by checking against wr_mas->node_end.
2178 */
2179 mt_init_flags(mt, 0);
2180 mtree_test_store(mt, 48, (void *)0x61);
2181 mtree_test_store(mt, 3, (void *)0x7);
2182 mtree_test_load(mt, 0);
2183 mtree_test_store(mt, 88, (void *)0xb1);
2184 mtree_test_store(mt, 81, (void *)0xa3);
2185 mtree_test_insert(mt, 0, (void *)0x1);
2186 mtree_test_insert(mt, 8, (void *)0x11);
2187 mtree_test_insert(mt, 4, (void *)0x9);
2188 mtree_test_insert(mt, 2480, (void *)0x1361);
120b1162 2189 mtree_test_insert(mt, ULONG_MAX,
e15e06a8 2190 (void *)0xffffffffffffffff);
120b1162 2191 mtree_test_erase(mt, ULONG_MAX);
e15e06a8
LH
2192 mtree_destroy(mt);
2193
2194 /*
2195 * 6. When reusing a node with an implied pivot and the node is
2196 * shrinking, old data would be left in the implied slot
2197 * Fixed by checking the last pivot for the mas->max and clear
2198 * accordingly. This only affected the left-most node as that node is
2199 * the only one allowed to end in NULL.
2200 */
2201 mt_init_flags(mt, 0);
2202 mtree_test_erase(mt, 3);
2203 mtree_test_insert(mt, 22, (void *)0x2d);
2204 mtree_test_insert(mt, 15, (void *)0x1f);
2205 mtree_test_load(mt, 2);
2206 mtree_test_insert(mt, 1, (void *)0x3);
2207 mtree_test_insert(mt, 1, (void *)0x3);
2208 mtree_test_insert(mt, 5, (void *)0xb);
2209 mtree_test_erase(mt, 1);
2210 mtree_test_insert(mt, 1, (void *)0x3);
2211 mtree_test_insert(mt, 4, (void *)0x9);
2212 mtree_test_insert(mt, 1, (void *)0x3);
2213 mtree_test_erase(mt, 1);
2214 mtree_test_insert(mt, 2, (void *)0x5);
2215 mtree_test_insert(mt, 1, (void *)0x3);
2216 mtree_test_erase(mt, 3);
2217 mtree_test_insert(mt, 22, (void *)0x2d);
2218 mtree_test_insert(mt, 15, (void *)0x1f);
2219 mtree_test_insert(mt, 2, (void *)0x5);
2220 mtree_test_insert(mt, 1, (void *)0x3);
2221 mtree_test_insert(mt, 8, (void *)0x11);
2222 mtree_test_load(mt, 2);
2223 mtree_test_insert(mt, 1, (void *)0x3);
2224 mtree_test_insert(mt, 1, (void *)0x3);
2225 mtree_test_store(mt, 1, (void *)0x3);
2226 mtree_test_insert(mt, 5, (void *)0xb);
2227 mtree_test_erase(mt, 1);
2228 mtree_test_insert(mt, 1, (void *)0x3);
2229 mtree_test_insert(mt, 4, (void *)0x9);
2230 mtree_test_insert(mt, 1, (void *)0x3);
2231 mtree_test_erase(mt, 1);
2232 mtree_test_insert(mt, 2, (void *)0x5);
2233 mtree_test_insert(mt, 1, (void *)0x3);
2234 mtree_test_erase(mt, 3);
2235 mtree_test_insert(mt, 22, (void *)0x2d);
2236 mtree_test_insert(mt, 15, (void *)0x1f);
2237 mtree_test_insert(mt, 2, (void *)0x5);
2238 mtree_test_insert(mt, 1, (void *)0x3);
2239 mtree_test_insert(mt, 8, (void *)0x11);
2240 mtree_test_insert(mt, 12, (void *)0x19);
2241 mtree_test_erase(mt, 1);
2242 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2243 mtree_test_erase(mt, 62);
2244 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2245 mtree_test_insert(mt, 11, (void *)0x17);
2246 mtree_test_insert(mt, 3, (void *)0x7);
2247 mtree_test_insert(mt, 3, (void *)0x7);
2248 mtree_test_store(mt, 62, (void *)0x7d);
2249 mtree_test_erase(mt, 62);
2250 mtree_test_store_range(mt, 1, 15, (void *)0x3);
2251 mtree_test_erase(mt, 1);
2252 mtree_test_insert(mt, 22, (void *)0x2d);
2253 mtree_test_insert(mt, 12, (void *)0x19);
2254 mtree_test_erase(mt, 1);
2255 mtree_test_insert(mt, 3, (void *)0x7);
2256 mtree_test_store(mt, 62, (void *)0x7d);
2257 mtree_test_erase(mt, 62);
2258 mtree_test_insert(mt, 122, (void *)0xf5);
2259 mtree_test_store(mt, 3, (void *)0x7);
2260 mtree_test_insert(mt, 0, (void *)0x1);
2261 mtree_test_store_range(mt, 0, 1, (void *)0x1);
2262 mtree_test_insert(mt, 85, (void *)0xab);
2263 mtree_test_insert(mt, 72, (void *)0x91);
2264 mtree_test_insert(mt, 81, (void *)0xa3);
2265 mtree_test_insert(mt, 726, (void *)0x5ad);
2266 mtree_test_insert(mt, 0, (void *)0x1);
2267 mtree_test_insert(mt, 1, (void *)0x3);
2268 mtree_test_store(mt, 51, (void *)0x67);
2269 mtree_test_insert(mt, 611, (void *)0x4c7);
2270 mtree_test_insert(mt, 485, (void *)0x3cb);
2271 mtree_test_insert(mt, 1, (void *)0x3);
2272 mtree_test_erase(mt, 1);
2273 mtree_test_insert(mt, 0, (void *)0x1);
2274 mtree_test_insert(mt, 1, (void *)0x3);
2275 mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2276 mtree_test_load(mt, 1);
2277 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2278 mtree_test_insert(mt, 1, (void *)0x3);
2279 mtree_test_erase(mt, 1);
2280 mtree_test_load(mt, 53);
2281 mtree_test_load(mt, 1);
2282 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2283 mtree_test_insert(mt, 222, (void *)0x1bd);
2284 mtree_test_insert(mt, 485, (void *)0x3cb);
2285 mtree_test_insert(mt, 1, (void *)0x3);
2286 mtree_test_erase(mt, 1);
2287 mtree_test_load(mt, 0);
2288 mtree_test_insert(mt, 21, (void *)0x2b);
2289 mtree_test_insert(mt, 3, (void *)0x7);
2290 mtree_test_store(mt, 621, (void *)0x4db);
2291 mtree_test_insert(mt, 0, (void *)0x1);
2292 mtree_test_erase(mt, 5);
2293 mtree_test_insert(mt, 1, (void *)0x3);
2294 mtree_test_store(mt, 62, (void *)0x7d);
2295 mtree_test_erase(mt, 62);
2296 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2297 mtree_test_insert(mt, 22, (void *)0x2d);
2298 mtree_test_insert(mt, 12, (void *)0x19);
2299 mtree_test_erase(mt, 1);
2300 mtree_test_insert(mt, 1, (void *)0x3);
2301 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2302 mtree_test_erase(mt, 62);
2303 mtree_test_erase(mt, 1);
2304 mtree_test_load(mt, 1);
2305 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2306 mtree_test_insert(mt, 1, (void *)0x3);
2307 mtree_test_erase(mt, 1);
2308 mtree_test_load(mt, 53);
2309 mtree_test_load(mt, 1);
2310 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2311 mtree_test_insert(mt, 222, (void *)0x1bd);
2312 mtree_test_insert(mt, 485, (void *)0x3cb);
2313 mtree_test_insert(mt, 1, (void *)0x3);
2314 mtree_test_erase(mt, 1);
2315 mtree_test_insert(mt, 1, (void *)0x3);
2316 mtree_test_load(mt, 0);
2317 mtree_test_load(mt, 0);
2318 mtree_destroy(mt);
2319
2320 /*
2321 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2322 * data by overwriting it first - that way metadata is of no concern.
2323 */
2324 mt_init_flags(mt, 0);
2325 mtree_test_load(mt, 1);
2326 mtree_test_insert(mt, 102, (void *)0xcd);
2327 mtree_test_erase(mt, 2);
2328 mtree_test_erase(mt, 0);
2329 mtree_test_load(mt, 0);
2330 mtree_test_insert(mt, 4, (void *)0x9);
2331 mtree_test_insert(mt, 2, (void *)0x5);
2332 mtree_test_insert(mt, 110, (void *)0xdd);
2333 mtree_test_insert(mt, 1, (void *)0x3);
2334 mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2335 mtree_test_erase(mt, 2);
2336 mtree_test_store(mt, 0, (void *)0x1);
2337 mtree_test_store(mt, 112, (void *)0xe1);
2338 mtree_test_insert(mt, 21, (void *)0x2b);
2339 mtree_test_store(mt, 1, (void *)0x3);
2340 mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2341 mtree_test_store(mt, 2, (void *)0x5);
2342 mtree_test_load(mt, 22);
2343 mtree_test_erase(mt, 2);
2344 mtree_test_store(mt, 210, (void *)0x1a5);
2345 mtree_test_store_range(mt, 0, 2, (void *)0x1);
2346 mtree_test_store(mt, 2, (void *)0x5);
2347 mtree_test_erase(mt, 2);
2348 mtree_test_erase(mt, 22);
2349 mtree_test_erase(mt, 1);
2350 mtree_test_erase(mt, 2);
2351 mtree_test_store(mt, 0, (void *)0x1);
2352 mtree_test_load(mt, 112);
2353 mtree_test_insert(mt, 2, (void *)0x5);
2354 mtree_test_erase(mt, 2);
2355 mtree_test_store(mt, 1, (void *)0x3);
2356 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2357 mtree_test_erase(mt, 0);
2358 mtree_test_erase(mt, 2);
2359 mtree_test_store(mt, 2, (void *)0x5);
2360 mtree_test_erase(mt, 0);
2361 mtree_test_erase(mt, 2);
2362 mtree_test_store(mt, 0, (void *)0x1);
2363 mtree_test_store(mt, 0, (void *)0x1);
2364 mtree_test_erase(mt, 2);
2365 mtree_test_store(mt, 2, (void *)0x5);
2366 mtree_test_erase(mt, 2);
2367 mtree_test_insert(mt, 2, (void *)0x5);
2368 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2369 mtree_test_erase(mt, 0);
2370 mtree_test_erase(mt, 2);
2371 mtree_test_store(mt, 0, (void *)0x1);
2372 mtree_test_load(mt, 112);
2373 mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2374 mtree_test_store(mt, 2, (void *)0x5);
2375 mtree_test_load(mt, 110);
2376 mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2377 mtree_test_load(mt, 2);
2378 mtree_test_store(mt, 2, (void *)0x5);
2379 mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2380 mtree_test_erase(mt, 12);
2381 mtree_test_store(mt, 2, (void *)0x5);
2382 mtree_test_load(mt, 22);
2383 mtree_destroy(mt);
2384
2385
2386 /*
2387 * 8. When rebalancing or spanning_rebalance(), the max of the new node
2388 * may be set incorrectly to the final pivot and not the right max.
2389 * Fix by setting the left max to orig right max if the entire node is
2390 * consumed.
2391 */
2392 mt_init_flags(mt, 0);
2393 mtree_test_store(mt, 6, (void *)0xd);
2394 mtree_test_store(mt, 67, (void *)0x87);
2395 mtree_test_insert(mt, 15, (void *)0x1f);
2396 mtree_test_insert(mt, 6716, (void *)0x3479);
2397 mtree_test_store(mt, 61, (void *)0x7b);
2398 mtree_test_insert(mt, 13, (void *)0x1b);
2399 mtree_test_store(mt, 8, (void *)0x11);
2400 mtree_test_insert(mt, 1, (void *)0x3);
2401 mtree_test_load(mt, 0);
2402 mtree_test_erase(mt, 67167);
2403 mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2404 mtree_test_insert(mt, 6, (void *)0xd);
2405 mtree_test_erase(mt, 67);
2406 mtree_test_insert(mt, 1, (void *)0x3);
2407 mtree_test_erase(mt, 667167);
2408 mtree_test_insert(mt, 6, (void *)0xd);
2409 mtree_test_store(mt, 67, (void *)0x87);
2410 mtree_test_insert(mt, 5, (void *)0xb);
2411 mtree_test_erase(mt, 1);
2412 mtree_test_insert(mt, 6, (void *)0xd);
2413 mtree_test_erase(mt, 67);
2414 mtree_test_insert(mt, 15, (void *)0x1f);
2415 mtree_test_insert(mt, 67167, (void *)0x20cbf);
2416 mtree_test_insert(mt, 1, (void *)0x3);
2417 mtree_test_load(mt, 7);
2418 mtree_test_insert(mt, 16, (void *)0x21);
2419 mtree_test_insert(mt, 36, (void *)0x49);
2420 mtree_test_store(mt, 67, (void *)0x87);
2421 mtree_test_store(mt, 6, (void *)0xd);
2422 mtree_test_insert(mt, 367, (void *)0x2df);
2423 mtree_test_insert(mt, 115, (void *)0xe7);
2424 mtree_test_store(mt, 0, (void *)0x1);
2425 mtree_test_store_range(mt, 1, 3, (void *)0x3);
2426 mtree_test_store(mt, 1, (void *)0x3);
2427 mtree_test_erase(mt, 67167);
2428 mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2429 mtree_test_store(mt, 1, (void *)0x3);
2430 mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2431 mtree_test_load(mt, 67);
2432 mtree_test_insert(mt, 1, (void *)0x3);
2433 mtree_test_erase(mt, 67167);
2434 mtree_destroy(mt);
2435
2436 /*
2437 * 9. spanning store to the end of data caused an invalid metadata
2438 * length which resulted in a crash eventually.
2439 * Fix by checking if there is a value in pivot before incrementing the
2440 * metadata end in mab_mas_cp(). To ensure this doesn't happen again,
2441 * abstract the two locations this happens into a function called
2442 * mas_leaf_set_meta().
2443 */
2444 mt_init_flags(mt, 0);
2445 mtree_test_insert(mt, 21, (void *)0x2b);
2446 mtree_test_insert(mt, 12, (void *)0x19);
2447 mtree_test_insert(mt, 6, (void *)0xd);
2448 mtree_test_insert(mt, 8, (void *)0x11);
2449 mtree_test_insert(mt, 2, (void *)0x5);
2450 mtree_test_insert(mt, 91, (void *)0xb7);
2451 mtree_test_insert(mt, 18, (void *)0x25);
2452 mtree_test_insert(mt, 81, (void *)0xa3);
2453 mtree_test_store_range(mt, 0, 128, (void *)0x1);
2454 mtree_test_store(mt, 1, (void *)0x3);
2455 mtree_test_erase(mt, 8);
2456 mtree_test_insert(mt, 11, (void *)0x17);
2457 mtree_test_insert(mt, 8, (void *)0x11);
2458 mtree_test_insert(mt, 21, (void *)0x2b);
2459 mtree_test_insert(mt, 2, (void *)0x5);
120b1162
LH
2460 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2461 mtree_test_erase(mt, ULONG_MAX - 10);
e15e06a8
LH
2462 mtree_test_store_range(mt, 0, 281, (void *)0x1);
2463 mtree_test_erase(mt, 2);
2464 mtree_test_insert(mt, 1211, (void *)0x977);
2465 mtree_test_insert(mt, 111, (void *)0xdf);
2466 mtree_test_insert(mt, 13, (void *)0x1b);
2467 mtree_test_insert(mt, 211, (void *)0x1a7);
2468 mtree_test_insert(mt, 11, (void *)0x17);
2469 mtree_test_insert(mt, 5, (void *)0xb);
2470 mtree_test_insert(mt, 1218, (void *)0x985);
2471 mtree_test_insert(mt, 61, (void *)0x7b);
2472 mtree_test_store(mt, 1, (void *)0x3);
2473 mtree_test_insert(mt, 121, (void *)0xf3);
2474 mtree_test_insert(mt, 8, (void *)0x11);
2475 mtree_test_insert(mt, 21, (void *)0x2b);
2476 mtree_test_insert(mt, 2, (void *)0x5);
120b1162
LH
2477 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2478 mtree_test_erase(mt, ULONG_MAX - 10);
e15e06a8 2479}
120b1162
LH
2480
2481/* duplicate the tree with a specific gap */
eaf9790d 2482static noinline void __init check_dup_gaps(struct maple_tree *mt,
e15e06a8
LH
2483 unsigned long nr_entries, bool zero_start,
2484 unsigned long gap)
2485{
2486 unsigned long i = 0;
2487 struct maple_tree newmt;
2488 int ret;
2489 void *tmp;
2490 MA_STATE(mas, mt, 0, 0);
2491 MA_STATE(newmas, &newmt, 0, 0);
2492
e15e06a8
LH
2493 if (!zero_start)
2494 i = 1;
2495
2496 mt_zero_nr_tallocated();
2497 for (; i <= nr_entries; i++)
2498 mtree_store_range(mt, i*10, (i+1)*10 - gap,
2499 xa_mk_value(i), GFP_KERNEL);
2500
2501 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2502 mt_set_non_kernel(99999);
120b1162 2503 mas_lock(&newmas);
e15e06a8
LH
2504 ret = mas_expected_entries(&newmas, nr_entries);
2505 mt_set_non_kernel(0);
2506 MT_BUG_ON(mt, ret != 0);
2507
120b1162 2508 rcu_read_lock();
e15e06a8
LH
2509 mas_for_each(&mas, tmp, ULONG_MAX) {
2510 newmas.index = mas.index;
2511 newmas.last = mas.last;
2512 mas_store(&newmas, tmp);
2513 }
120b1162 2514 rcu_read_unlock();
e15e06a8 2515 mas_destroy(&newmas);
120b1162
LH
2516 mas_unlock(&newmas);
2517
e15e06a8
LH
2518 mtree_destroy(&newmt);
2519}
2520
120b1162 2521/* Duplicate many sizes of trees. Mainly to test expected entry values */
eaf9790d 2522static noinline void __init check_dup(struct maple_tree *mt)
e15e06a8
LH
2523{
2524 int i;
120b1162 2525 int big_start = 100010;
e15e06a8
LH
2526
2527 /* Check with a value at zero */
2528 for (i = 10; i < 1000; i++) {
2529 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2530 check_dup_gaps(mt, i, true, 5);
2531 mtree_destroy(mt);
120b1162 2532 rcu_barrier();
e15e06a8
LH
2533 }
2534
120b1162
LH
2535 cond_resched();
2536 mt_cache_shrink();
e15e06a8
LH
2537 /* Check with a value at zero, no gap */
2538 for (i = 1000; i < 2000; i++) {
2539 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2540 check_dup_gaps(mt, i, true, 0);
2541 mtree_destroy(mt);
120b1162 2542 rcu_barrier();
e15e06a8
LH
2543 }
2544
120b1162
LH
2545 cond_resched();
2546 mt_cache_shrink();
e15e06a8 2547 /* Check with a value at zero and unreasonably large */
120b1162 2548 for (i = big_start; i < big_start + 10; i++) {
e15e06a8
LH
2549 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2550 check_dup_gaps(mt, i, true, 5);
2551 mtree_destroy(mt);
120b1162 2552 rcu_barrier();
e15e06a8
LH
2553 }
2554
120b1162
LH
2555 cond_resched();
2556 mt_cache_shrink();
e15e06a8
LH
2557 /* Small to medium size not starting at zero*/
2558 for (i = 200; i < 1000; i++) {
2559 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2560 check_dup_gaps(mt, i, false, 5);
2561 mtree_destroy(mt);
120b1162 2562 rcu_barrier();
e15e06a8
LH
2563 }
2564
120b1162
LH
2565 cond_resched();
2566 mt_cache_shrink();
e15e06a8 2567 /* Unreasonably large not starting at zero*/
120b1162 2568 for (i = big_start; i < big_start + 10; i++) {
e15e06a8
LH
2569 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2570 check_dup_gaps(mt, i, false, 5);
2571 mtree_destroy(mt);
120b1162
LH
2572 rcu_barrier();
2573 cond_resched();
2574 mt_cache_shrink();
e15e06a8
LH
2575 }
2576
2577 /* Check non-allocation tree not starting at zero */
2578 for (i = 1500; i < 3000; i++) {
2579 mt_init_flags(mt, 0);
2580 check_dup_gaps(mt, i, false, 5);
2581 mtree_destroy(mt);
120b1162
LH
2582 rcu_barrier();
2583 cond_resched();
2584 if (i % 2 == 0)
2585 mt_cache_shrink();
e15e06a8
LH
2586 }
2587
120b1162 2588 mt_cache_shrink();
e15e06a8
LH
2589 /* Check non-allocation tree starting at zero */
2590 for (i = 200; i < 1000; i++) {
2591 mt_init_flags(mt, 0);
2592 check_dup_gaps(mt, i, true, 5);
2593 mtree_destroy(mt);
120b1162
LH
2594 rcu_barrier();
2595 cond_resched();
e15e06a8
LH
2596 }
2597
120b1162 2598 mt_cache_shrink();
e15e06a8 2599 /* Unreasonably large */
120b1162 2600 for (i = big_start + 5; i < big_start + 10; i++) {
e15e06a8
LH
2601 mt_init_flags(mt, 0);
2602 check_dup_gaps(mt, i, true, 5);
2603 mtree_destroy(mt);
120b1162
LH
2604 rcu_barrier();
2605 mt_cache_shrink();
2606 cond_resched();
e15e06a8 2607 }
e15e06a8
LH
2608}
2609
eaf9790d 2610static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
c5651b31
LH
2611{
2612 int i = 50;
2613 MA_STATE(mas, mt, 0, 0);
2614
2615 mt_set_non_kernel(9999);
2616 mas_lock(&mas);
2617 do {
2618 mas_set_range(&mas, i*10, i*10+9);
2619 mas_store(&mas, check_bnode_min_spanning);
2620 } while (i--);
2621
2622 mas_set_range(&mas, 240, 509);
2623 mas_store(&mas, NULL);
2624 mas_unlock(&mas);
2625 mas_destroy(&mas);
2626 mt_set_non_kernel(0);
2627}
2628
eaf9790d 2629static noinline void __init check_empty_area_window(struct maple_tree *mt)
7327e811
LH
2630{
2631 unsigned long i, nr_entries = 20;
2632 MA_STATE(mas, mt, 0, 0);
2633
2634 for (i = 1; i <= nr_entries; i++)
2635 mtree_store_range(mt, i*10, i*10 + 9,
2636 xa_mk_value(i), GFP_KERNEL);
2637
2638 /* Create another hole besides the one at 0 */
2639 mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2640
2641 /* Check lower bounds that don't fit */
2642 rcu_read_lock();
2643 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2644
2645 mas_reset(&mas);
2646 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2647
2648 /* Check lower bound that does fit */
2649 mas_reset(&mas);
2650 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2651 MT_BUG_ON(mt, mas.index != 5);
2652 MT_BUG_ON(mt, mas.last != 9);
2653 rcu_read_unlock();
2654
2655 /* Check one gap that doesn't fit and one that does */
2656 rcu_read_lock();
2657 mas_reset(&mas);
2658 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2659 MT_BUG_ON(mt, mas.index != 161);
2660 MT_BUG_ON(mt, mas.last != 169);
2661
2662 /* Check one gap that does fit above the min */
2663 mas_reset(&mas);
2664 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2665 MT_BUG_ON(mt, mas.index != 216);
2666 MT_BUG_ON(mt, mas.last != 218);
2667
2668 /* Check size that doesn't fit any gap */
2669 mas_reset(&mas);
2670 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2671
2672 /*
2673 * Check size that doesn't fit the lower end of the window but
2674 * does fit the gap
2675 */
2676 mas_reset(&mas);
2677 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2678
2679 /*
2680 * Check size that doesn't fit the upper end of the window but
2681 * does fit the gap
2682 */
2683 mas_reset(&mas);
2684 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2685
2686 /* Check mas_empty_area forward */
2687 mas_reset(&mas);
2688 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2689 MT_BUG_ON(mt, mas.index != 0);
2690 MT_BUG_ON(mt, mas.last != 8);
2691
2692 mas_reset(&mas);
2693 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2694 MT_BUG_ON(mt, mas.index != 0);
2695 MT_BUG_ON(mt, mas.last != 3);
2696
2697 mas_reset(&mas);
2698 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2699
2700 mas_reset(&mas);
2701 MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2702
2703 mas_reset(&mas);
17e7436b 2704 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
7327e811
LH
2705
2706 mas_reset(&mas);
2707 mas_empty_area(&mas, 100, 165, 3);
2708
2709 mas_reset(&mas);
2710 MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2711 rcu_read_unlock();
2712}
2713
eaf9790d 2714static noinline void __init check_empty_area_fill(struct maple_tree *mt)
4bd6dded
LH
2715{
2716 const unsigned long max = 0x25D78000;
2717 unsigned long size;
2718 int loop, shift;
2719 MA_STATE(mas, mt, 0, 0);
2720
2721 mt_set_non_kernel(99999);
2722 for (shift = 12; shift <= 16; shift++) {
2723 loop = 5000;
2724 size = 1 << shift;
2725 while (loop--) {
2726 mas_set(&mas, 0);
2727 mas_lock(&mas);
2728 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2729 MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2730 mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2731 mas_unlock(&mas);
2732 mas_reset(&mas);
2733 }
2734 }
2735
2736 /* No space left. */
2737 size = 0x1000;
2738 rcu_read_lock();
2739 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2740 rcu_read_unlock();
2741
2742 /* Fill a depth 3 node to the maximum */
2743 for (unsigned long i = 629440511; i <= 629440800; i += 6)
2744 mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2745 /* Make space in the second-last depth 4 node */
2746 mtree_erase(mt, 631668735);
2747 /* Make space in the last depth 4 node */
2748 mtree_erase(mt, 629506047);
2749 mas_reset(&mas);
2750 /* Search from just after the gap in the second-last depth 4 */
2751 rcu_read_lock();
2752 MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2753 rcu_read_unlock();
2754 mt_set_non_kernel(0);
2755}
2756
eb2e817f
LH
2757/*
2758 * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
2759 *
2760 * The table below shows the single entry tree (0-0 pointer) and normal tree
2761 * with nodes.
2762 *
2763 * Function ENTRY Start Result index & last
2764 * ┬ ┬ ┬ ┬ ┬
2765 * │ │ │ │ └─ the final range
2766 * │ │ │ └─ The node value after execution
2767 * │ │ └─ The node value before execution
2768 * │ └─ If the entry exists or does not exists (DNE)
2769 * └─ The function name
2770 *
2771 * Function ENTRY Start Result index & last
2772 * mas_next()
2773 * - after last
2774 * Single entry tree at 0-0
2775 * ------------------------
2776 * DNE MAS_START MAS_NONE 1 - oo
2777 * DNE MAS_PAUSE MAS_NONE 1 - oo
2778 * DNE MAS_ROOT MAS_NONE 1 - oo
2779 * when index = 0
2780 * DNE MAS_NONE MAS_ROOT 0
2781 * when index > 0
2782 * DNE MAS_NONE MAS_NONE 1 - oo
2783 *
2784 * Normal tree
2785 * -----------
2786 * exists MAS_START active range
2787 * DNE MAS_START active set to last range
2788 * exists MAS_PAUSE active range
2789 * DNE MAS_PAUSE active set to last range
2790 * exists MAS_NONE active range
2791 * exists active active range
2792 * DNE active active set to last range
2793 *
2794 * Function ENTRY Start Result index & last
2795 * mas_prev()
2796 * - before index
2797 * Single entry tree at 0-0
2798 * ------------------------
2799 * if index > 0
2800 * exists MAS_START MAS_ROOT 0
2801 * exists MAS_PAUSE MAS_ROOT 0
2802 * exists MAS_NONE MAS_ROOT 0
2803 *
2804 * if index == 0
2805 * DNE MAS_START MAS_NONE 0
2806 * DNE MAS_PAUSE MAS_NONE 0
2807 * DNE MAS_NONE MAS_NONE 0
2808 * DNE MAS_ROOT MAS_NONE 0
2809 *
2810 * Normal tree
2811 * -----------
2812 * exists MAS_START active range
2813 * DNE MAS_START active set to min
2814 * exists MAS_PAUSE active range
2815 * DNE MAS_PAUSE active set to min
2816 * exists MAS_NONE active range
2817 * DNE MAS_NONE MAS_NONE set to min
2818 * any MAS_ROOT MAS_NONE 0
2819 * exists active active range
2820 * DNE active active last range
2821 *
2822 * Function ENTRY Start Result index & last
2823 * mas_find()
2824 * - at index or next
2825 * Single entry tree at 0-0
2826 * ------------------------
2827 * if index > 0
2828 * DNE MAS_START MAS_NONE 0
2829 * DNE MAS_PAUSE MAS_NONE 0
2830 * DNE MAS_ROOT MAS_NONE 0
2831 * DNE MAS_NONE MAS_NONE 0
2832 * if index == 0
2833 * exists MAS_START MAS_ROOT 0
2834 * exists MAS_PAUSE MAS_ROOT 0
2835 * exists MAS_NONE MAS_ROOT 0
2836 *
2837 * Normal tree
2838 * -----------
2839 * exists MAS_START active range
2840 * DNE MAS_START active set to max
2841 * exists MAS_PAUSE active range
2842 * DNE MAS_PAUSE active set to max
2843 * exists MAS_NONE active range
2844 * exists active active range
2845 * DNE active active last range (max < last)
2846 *
2847 * Function ENTRY Start Result index & last
2848 * mas_find_rev()
2849 * - at index or before
2850 * Single entry tree at 0-0
2851 * ------------------------
2852 * if index > 0
2853 * exists MAS_START MAS_ROOT 0
2854 * exists MAS_PAUSE MAS_ROOT 0
2855 * exists MAS_NONE MAS_ROOT 0
2856 * if index == 0
2857 * DNE MAS_START MAS_NONE 0
2858 * DNE MAS_PAUSE MAS_NONE 0
2859 * DNE MAS_NONE MAS_NONE 0
2860 * DNE MAS_ROOT MAS_NONE 0
2861 *
2862 * Normal tree
2863 * -----------
2864 * exists MAS_START active range
2865 * DNE MAS_START active set to min
2866 * exists MAS_PAUSE active range
2867 * DNE MAS_PAUSE active set to min
2868 * exists MAS_NONE active range
2869 * exists active active range
2870 * DNE active active last range (min > index)
2871 *
2872 * Function ENTRY Start Result index & last
2873 * mas_walk()
2874 * - Look up index
2875 * Single entry tree at 0-0
2876 * ------------------------
2877 * if index > 0
2878 * DNE MAS_START MAS_ROOT 1 - oo
2879 * DNE MAS_PAUSE MAS_ROOT 1 - oo
2880 * DNE MAS_NONE MAS_ROOT 1 - oo
2881 * DNE MAS_ROOT MAS_ROOT 1 - oo
2882 * if index == 0
2883 * exists MAS_START MAS_ROOT 0
2884 * exists MAS_PAUSE MAS_ROOT 0
2885 * exists MAS_NONE MAS_ROOT 0
2886 * exists MAS_ROOT MAS_ROOT 0
2887 *
2888 * Normal tree
2889 * -----------
2890 * exists MAS_START active range
2891 * DNE MAS_START active range of NULL
2892 * exists MAS_PAUSE active range
2893 * DNE MAS_PAUSE active range of NULL
2894 * exists MAS_NONE active range
2895 * DNE MAS_NONE active range of NULL
2896 * exists active active range
2897 * DNE active active range of NULL
2898 */
2899
2900#define mas_active(x) (((x).node != MAS_ROOT) && \
2901 ((x).node != MAS_START) && \
2902 ((x).node != MAS_PAUSE) && \
2903 ((x).node != MAS_NONE))
2904static noinline void __init check_state_handling(struct maple_tree *mt)
2905{
2906 MA_STATE(mas, mt, 0, 0);
2907 void *entry, *ptr = (void *) 0x1234500;
2908 void *ptr2 = &ptr;
2909 void *ptr3 = &ptr2;
2910
2911 /* Check MAS_ROOT First */
2912 mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
2913
2914 mas_lock(&mas);
2915 /* prev: Start -> none */
2916 entry = mas_prev(&mas, 0);
2917 MT_BUG_ON(mt, entry != NULL);
2918 MT_BUG_ON(mt, mas.node != MAS_NONE);
2919
2920 /* prev: Start -> root */
2921 mas_set(&mas, 10);
2922 entry = mas_prev(&mas, 0);
2923 MT_BUG_ON(mt, entry != ptr);
2924 MT_BUG_ON(mt, mas.index != 0);
2925 MT_BUG_ON(mt, mas.last != 0);
2926 MT_BUG_ON(mt, mas.node != MAS_ROOT);
2927
2928 /* prev: pause -> root */
2929 mas_set(&mas, 10);
2930 mas_pause(&mas);
2931 entry = mas_prev(&mas, 0);
2932 MT_BUG_ON(mt, entry != ptr);
2933 MT_BUG_ON(mt, mas.index != 0);
2934 MT_BUG_ON(mt, mas.last != 0);
2935 MT_BUG_ON(mt, mas.node != MAS_ROOT);
2936
2937 /* next: start -> none */
2938 mas_set(&mas, 0);
2939 entry = mas_next(&mas, ULONG_MAX);
2940 MT_BUG_ON(mt, mas.index != 1);
2941 MT_BUG_ON(mt, mas.last != ULONG_MAX);
2942 MT_BUG_ON(mt, entry != NULL);
2943 MT_BUG_ON(mt, mas.node != MAS_NONE);
2944
2945 /* next: start -> none */
2946 mas_set(&mas, 10);
2947 entry = mas_next(&mas, ULONG_MAX);
2948 MT_BUG_ON(mt, mas.index != 1);
2949 MT_BUG_ON(mt, mas.last != ULONG_MAX);
2950 MT_BUG_ON(mt, entry != NULL);
2951 MT_BUG_ON(mt, mas.node != MAS_NONE);
2952
2953 /* find: start -> root */
2954 mas_set(&mas, 0);
2955 entry = mas_find(&mas, ULONG_MAX);
2956 MT_BUG_ON(mt, entry != ptr);
2957 MT_BUG_ON(mt, mas.index != 0);
2958 MT_BUG_ON(mt, mas.last != 0);
2959 MT_BUG_ON(mt, mas.node != MAS_ROOT);
2960
2961 /* find: root -> none */
2962 entry = mas_find(&mas, ULONG_MAX);
2963 MT_BUG_ON(mt, entry != NULL);
2964 MT_BUG_ON(mt, mas.index != 1);
2965 MT_BUG_ON(mt, mas.last != ULONG_MAX);
2966 MT_BUG_ON(mt, mas.node != MAS_NONE);
2967
2968 /* find: none -> none */
2969 entry = mas_find(&mas, ULONG_MAX);
2970 MT_BUG_ON(mt, entry != NULL);
2971 MT_BUG_ON(mt, mas.index != 1);
2972 MT_BUG_ON(mt, mas.last != ULONG_MAX);
2973 MT_BUG_ON(mt, mas.node != MAS_NONE);
2974
2975 /* find: start -> none */
2976 mas_set(&mas, 10);
2977 entry = mas_find(&mas, ULONG_MAX);
2978 MT_BUG_ON(mt, entry != NULL);
2979 MT_BUG_ON(mt, mas.index != 1);
2980 MT_BUG_ON(mt, mas.last != ULONG_MAX);
2981 MT_BUG_ON(mt, mas.node != MAS_NONE);
2982
2983 /* find_rev: none -> root */
2984 entry = mas_find_rev(&mas, 0);
2985 MT_BUG_ON(mt, entry != ptr);
2986 MT_BUG_ON(mt, mas.index != 0);
2987 MT_BUG_ON(mt, mas.last != 0);
2988 MT_BUG_ON(mt, mas.node != MAS_ROOT);
2989
2990 /* find_rev: start -> root */
2991 mas_set(&mas, 0);
2992 entry = mas_find_rev(&mas, 0);
2993 MT_BUG_ON(mt, entry != ptr);
2994 MT_BUG_ON(mt, mas.index != 0);
2995 MT_BUG_ON(mt, mas.last != 0);
2996 MT_BUG_ON(mt, mas.node != MAS_ROOT);
2997
2998 /* find_rev: root -> none */
2999 entry = mas_find_rev(&mas, 0);
3000 MT_BUG_ON(mt, entry != NULL);
3001 MT_BUG_ON(mt, mas.index != 0);
3002 MT_BUG_ON(mt, mas.last != 0);
3003 MT_BUG_ON(mt, mas.node != MAS_NONE);
3004
3005 /* find_rev: none -> none */
3006 entry = mas_find_rev(&mas, 0);
3007 MT_BUG_ON(mt, entry != NULL);
3008 MT_BUG_ON(mt, mas.index != 0);
3009 MT_BUG_ON(mt, mas.last != 0);
3010 MT_BUG_ON(mt, mas.node != MAS_NONE);
3011
3012 /* find_rev: start -> root */
3013 mas_set(&mas, 10);
3014 entry = mas_find_rev(&mas, 0);
3015 MT_BUG_ON(mt, entry != ptr);
3016 MT_BUG_ON(mt, mas.index != 0);
3017 MT_BUG_ON(mt, mas.last != 0);
3018 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3019
3020 /* walk: start -> none */
3021 mas_set(&mas, 10);
3022 entry = mas_walk(&mas);
3023 MT_BUG_ON(mt, entry != NULL);
3024 MT_BUG_ON(mt, mas.index != 1);
3025 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3026 MT_BUG_ON(mt, mas.node != MAS_NONE);
3027
3028 /* walk: pause -> none*/
3029 mas_set(&mas, 10);
3030 mas_pause(&mas);
3031 entry = mas_walk(&mas);
3032 MT_BUG_ON(mt, entry != NULL);
3033 MT_BUG_ON(mt, mas.index != 1);
3034 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3035 MT_BUG_ON(mt, mas.node != MAS_NONE);
3036
3037 /* walk: none -> none */
3038 mas.index = mas.last = 10;
3039 entry = mas_walk(&mas);
3040 MT_BUG_ON(mt, entry != NULL);
3041 MT_BUG_ON(mt, mas.index != 1);
3042 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3043 MT_BUG_ON(mt, mas.node != MAS_NONE);
3044
3045 /* walk: none -> none */
3046 entry = mas_walk(&mas);
3047 MT_BUG_ON(mt, entry != NULL);
3048 MT_BUG_ON(mt, mas.index != 1);
3049 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3050 MT_BUG_ON(mt, mas.node != MAS_NONE);
3051
3052 /* walk: start -> root */
3053 mas_set(&mas, 0);
3054 entry = mas_walk(&mas);
3055 MT_BUG_ON(mt, entry != ptr);
3056 MT_BUG_ON(mt, mas.index != 0);
3057 MT_BUG_ON(mt, mas.last != 0);
3058 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3059
3060 /* walk: pause -> root */
3061 mas_set(&mas, 0);
3062 mas_pause(&mas);
3063 entry = mas_walk(&mas);
3064 MT_BUG_ON(mt, entry != ptr);
3065 MT_BUG_ON(mt, mas.index != 0);
3066 MT_BUG_ON(mt, mas.last != 0);
3067 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3068
3069 /* walk: none -> root */
3070 mas.node = MAS_NONE;
3071 entry = mas_walk(&mas);
3072 MT_BUG_ON(mt, entry != ptr);
3073 MT_BUG_ON(mt, mas.index != 0);
3074 MT_BUG_ON(mt, mas.last != 0);
3075 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3076
3077 /* walk: root -> root */
3078 entry = mas_walk(&mas);
3079 MT_BUG_ON(mt, entry != ptr);
3080 MT_BUG_ON(mt, mas.index != 0);
3081 MT_BUG_ON(mt, mas.last != 0);
3082 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3083
3084 /* walk: root -> none */
3085 mas_set(&mas, 10);
3086 entry = mas_walk(&mas);
3087 MT_BUG_ON(mt, entry != NULL);
3088 MT_BUG_ON(mt, mas.index != 1);
3089 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3090 MT_BUG_ON(mt, mas.node != MAS_NONE);
3091
3092 /* walk: none -> root */
3093 mas.index = mas.last = 0;
3094 entry = mas_walk(&mas);
3095 MT_BUG_ON(mt, entry != ptr);
3096 MT_BUG_ON(mt, mas.index != 0);
3097 MT_BUG_ON(mt, mas.last != 0);
3098 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3099
3100 mas_unlock(&mas);
3101
3102 /* Check when there is an actual node */
3103 mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3104 mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3105 mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3106 mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3107
3108 mas_lock(&mas);
3109
3110 /* next: start ->active */
3111 mas_set(&mas, 0);
3112 entry = mas_next(&mas, ULONG_MAX);
3113 MT_BUG_ON(mt, entry != ptr);
3114 MT_BUG_ON(mt, mas.index != 0x1000);
3115 MT_BUG_ON(mt, mas.last != 0x1500);
3116 MT_BUG_ON(mt, !mas_active(mas));
3117
3118 /* next: pause ->active */
3119 mas_set(&mas, 0);
3120 mas_pause(&mas);
3121 entry = mas_next(&mas, ULONG_MAX);
3122 MT_BUG_ON(mt, entry != ptr);
3123 MT_BUG_ON(mt, mas.index != 0x1000);
3124 MT_BUG_ON(mt, mas.last != 0x1500);
3125 MT_BUG_ON(mt, !mas_active(mas));
3126
3127 /* next: none ->active */
3128 mas.index = mas.last = 0;
3129 mas.offset = 0;
3130 mas.node = MAS_NONE;
3131 entry = mas_next(&mas, ULONG_MAX);
3132 MT_BUG_ON(mt, entry != ptr);
3133 MT_BUG_ON(mt, mas.index != 0x1000);
3134 MT_BUG_ON(mt, mas.last != 0x1500);
3135 MT_BUG_ON(mt, !mas_active(mas));
3136
3137 /* next:active ->active */
3138 entry = mas_next(&mas, ULONG_MAX);
3139 MT_BUG_ON(mt, entry != ptr2);
3140 MT_BUG_ON(mt, mas.index != 0x2000);
3141 MT_BUG_ON(mt, mas.last != 0x2500);
3142 MT_BUG_ON(mt, !mas_active(mas));
3143
3144 /* next:active -> active out of range*/
3145 entry = mas_next(&mas, 0x2999);
3146 MT_BUG_ON(mt, entry != NULL);
3147 MT_BUG_ON(mt, mas.index != 0x2501);
3148 MT_BUG_ON(mt, mas.last != 0x2fff);
3149 MT_BUG_ON(mt, !mas_active(mas));
3150
3151 /* Continue after out of range*/
3152 entry = mas_next(&mas, ULONG_MAX);
3153 MT_BUG_ON(mt, entry != ptr3);
3154 MT_BUG_ON(mt, mas.index != 0x3000);
3155 MT_BUG_ON(mt, mas.last != 0x3500);
3156 MT_BUG_ON(mt, !mas_active(mas));
3157
3158 /* next:active -> active out of range*/
3159 entry = mas_next(&mas, ULONG_MAX);
3160 MT_BUG_ON(mt, entry != NULL);
3161 MT_BUG_ON(mt, mas.index != 0x3501);
3162 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3163 MT_BUG_ON(mt, !mas_active(mas));
3164
3165 /* next: none -> active, skip value at location */
3166 mas_set(&mas, 0);
3167 entry = mas_next(&mas, ULONG_MAX);
3168 mas.node = MAS_NONE;
3169 mas.offset = 0;
3170 entry = mas_next(&mas, ULONG_MAX);
3171 MT_BUG_ON(mt, entry != ptr2);
3172 MT_BUG_ON(mt, mas.index != 0x2000);
3173 MT_BUG_ON(mt, mas.last != 0x2500);
3174 MT_BUG_ON(mt, !mas_active(mas));
3175
3176 /* prev:active ->active */
3177 entry = mas_prev(&mas, 0);
3178 MT_BUG_ON(mt, entry != ptr);
3179 MT_BUG_ON(mt, mas.index != 0x1000);
3180 MT_BUG_ON(mt, mas.last != 0x1500);
3181 MT_BUG_ON(mt, !mas_active(mas));
3182
3183 /* prev:active -> active out of range*/
3184 entry = mas_prev(&mas, 0);
3185 MT_BUG_ON(mt, entry != NULL);
3186 MT_BUG_ON(mt, mas.index != 0);
3187 MT_BUG_ON(mt, mas.last != 0x0FFF);
3188 MT_BUG_ON(mt, !mas_active(mas));
3189
3190 /* prev: pause ->active */
3191 mas_set(&mas, 0x3600);
3192 entry = mas_prev(&mas, 0);
3193 MT_BUG_ON(mt, entry != ptr3);
3194 mas_pause(&mas);
3195 entry = mas_prev(&mas, 0);
3196 MT_BUG_ON(mt, entry != ptr2);
3197 MT_BUG_ON(mt, mas.index != 0x2000);
3198 MT_BUG_ON(mt, mas.last != 0x2500);
3199 MT_BUG_ON(mt, !mas_active(mas));
3200
3201 /* prev:active -> active out of range*/
3202 entry = mas_prev(&mas, 0x1600);
3203 MT_BUG_ON(mt, entry != NULL);
3204 MT_BUG_ON(mt, mas.index != 0x1501);
3205 MT_BUG_ON(mt, mas.last != 0x1FFF);
3206 MT_BUG_ON(mt, !mas_active(mas));
3207
3208 /* prev: active ->active, continue*/
3209 entry = mas_prev(&mas, 0);
3210 MT_BUG_ON(mt, entry != ptr);
3211 MT_BUG_ON(mt, mas.index != 0x1000);
3212 MT_BUG_ON(mt, mas.last != 0x1500);
3213 MT_BUG_ON(mt, !mas_active(mas));
3214
3215 /* find: start ->active */
3216 mas_set(&mas, 0);
3217 entry = mas_find(&mas, ULONG_MAX);
3218 MT_BUG_ON(mt, entry != ptr);
3219 MT_BUG_ON(mt, mas.index != 0x1000);
3220 MT_BUG_ON(mt, mas.last != 0x1500);
3221 MT_BUG_ON(mt, !mas_active(mas));
3222
3223 /* find: pause ->active */
3224 mas_set(&mas, 0);
3225 mas_pause(&mas);
3226 entry = mas_find(&mas, ULONG_MAX);
3227 MT_BUG_ON(mt, entry != ptr);
3228 MT_BUG_ON(mt, mas.index != 0x1000);
3229 MT_BUG_ON(mt, mas.last != 0x1500);
3230 MT_BUG_ON(mt, !mas_active(mas));
3231
3232 /* find: start ->active on value */;
3233 mas_set(&mas, 1200);
3234 entry = mas_find(&mas, ULONG_MAX);
3235 MT_BUG_ON(mt, entry != ptr);
3236 MT_BUG_ON(mt, mas.index != 0x1000);
3237 MT_BUG_ON(mt, mas.last != 0x1500);
3238 MT_BUG_ON(mt, !mas_active(mas));
3239
3240 /* find:active ->active */
3241 entry = mas_find(&mas, ULONG_MAX);
3242 MT_BUG_ON(mt, entry != ptr2);
3243 MT_BUG_ON(mt, mas.index != 0x2000);
3244 MT_BUG_ON(mt, mas.last != 0x2500);
3245 MT_BUG_ON(mt, !mas_active(mas));
3246
3247
3248 /* find:active -> active (NULL)*/
3249 entry = mas_find(&mas, 0x2700);
3250 MT_BUG_ON(mt, entry != NULL);
3251 MT_BUG_ON(mt, mas.index != 0x2501);
3252 MT_BUG_ON(mt, mas.last != 0x2FFF);
3253 MT_BUG_ON(mt, !mas_active(mas));
3254
3255 /* find: none ->active */
3256 entry = mas_find(&mas, 0x5000);
3257 MT_BUG_ON(mt, entry != ptr3);
3258 MT_BUG_ON(mt, mas.index != 0x3000);
3259 MT_BUG_ON(mt, mas.last != 0x3500);
3260 MT_BUG_ON(mt, !mas_active(mas));
3261
3262 /* find:active -> active (NULL) end*/
3263 entry = mas_find(&mas, ULONG_MAX);
3264 MT_BUG_ON(mt, entry != NULL);
3265 MT_BUG_ON(mt, mas.index != 0x3501);
3266 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3267 MT_BUG_ON(mt, !mas_active(mas));
3268
3269 /* find_rev: active (END) ->active */
3270 entry = mas_find_rev(&mas, 0);
3271 MT_BUG_ON(mt, entry != ptr3);
3272 MT_BUG_ON(mt, mas.index != 0x3000);
3273 MT_BUG_ON(mt, mas.last != 0x3500);
3274 MT_BUG_ON(mt, !mas_active(mas));
3275
3276 /* find_rev:active ->active */
3277 entry = mas_find_rev(&mas, 0);
3278 MT_BUG_ON(mt, entry != ptr2);
3279 MT_BUG_ON(mt, mas.index != 0x2000);
3280 MT_BUG_ON(mt, mas.last != 0x2500);
3281 MT_BUG_ON(mt, !mas_active(mas));
3282
3283 /* find_rev: pause ->active */
3284 mas_pause(&mas);
3285 entry = mas_find_rev(&mas, 0);
3286 MT_BUG_ON(mt, entry != ptr);
3287 MT_BUG_ON(mt, mas.index != 0x1000);
3288 MT_BUG_ON(mt, mas.last != 0x1500);
3289 MT_BUG_ON(mt, !mas_active(mas));
3290
3291 /* find_rev:active -> active */
3292 entry = mas_find_rev(&mas, 0);
3293 MT_BUG_ON(mt, entry != NULL);
3294 MT_BUG_ON(mt, mas.index != 0);
3295 MT_BUG_ON(mt, mas.last != 0x0FFF);
3296 MT_BUG_ON(mt, !mas_active(mas));
3297
3298 /* find_rev: start ->active */
3299 mas_set(&mas, 0x1200);
3300 entry = mas_find_rev(&mas, 0);
3301 MT_BUG_ON(mt, entry != ptr);
3302 MT_BUG_ON(mt, mas.index != 0x1000);
3303 MT_BUG_ON(mt, mas.last != 0x1500);
3304 MT_BUG_ON(mt, !mas_active(mas));
3305
3306 /* mas_walk start ->active */
3307 mas_set(&mas, 0x1200);
3308 entry = mas_walk(&mas);
3309 MT_BUG_ON(mt, entry != ptr);
3310 MT_BUG_ON(mt, mas.index != 0x1000);
3311 MT_BUG_ON(mt, mas.last != 0x1500);
3312 MT_BUG_ON(mt, !mas_active(mas));
3313
3314 /* mas_walk start ->active */
3315 mas_set(&mas, 0x1600);
3316 entry = mas_walk(&mas);
3317 MT_BUG_ON(mt, entry != NULL);
3318 MT_BUG_ON(mt, mas.index != 0x1501);
3319 MT_BUG_ON(mt, mas.last != 0x1fff);
3320 MT_BUG_ON(mt, !mas_active(mas));
3321
3322 /* mas_walk pause ->active */
3323 mas_set(&mas, 0x1200);
3324 mas_pause(&mas);
3325 entry = mas_walk(&mas);
3326 MT_BUG_ON(mt, entry != ptr);
3327 MT_BUG_ON(mt, mas.index != 0x1000);
3328 MT_BUG_ON(mt, mas.last != 0x1500);
3329 MT_BUG_ON(mt, !mas_active(mas));
3330
3331 /* mas_walk pause -> active */
3332 mas_set(&mas, 0x1600);
3333 mas_pause(&mas);
3334 entry = mas_walk(&mas);
3335 MT_BUG_ON(mt, entry != NULL);
3336 MT_BUG_ON(mt, mas.index != 0x1501);
3337 MT_BUG_ON(mt, mas.last != 0x1fff);
3338 MT_BUG_ON(mt, !mas_active(mas));
3339
3340 /* mas_walk none -> active */
3341 mas_set(&mas, 0x1200);
3342 mas.node = MAS_NONE;
3343 entry = mas_walk(&mas);
3344 MT_BUG_ON(mt, entry != ptr);
3345 MT_BUG_ON(mt, mas.index != 0x1000);
3346 MT_BUG_ON(mt, mas.last != 0x1500);
3347 MT_BUG_ON(mt, !mas_active(mas));
3348
3349 /* mas_walk none -> active */
3350 mas_set(&mas, 0x1600);
3351 mas.node = MAS_NONE;
3352 entry = mas_walk(&mas);
3353 MT_BUG_ON(mt, entry != NULL);
3354 MT_BUG_ON(mt, mas.index != 0x1501);
3355 MT_BUG_ON(mt, mas.last != 0x1fff);
3356 MT_BUG_ON(mt, !mas_active(mas));
3357
3358 /* mas_walk active -> active */
3359 mas.index = 0x1200;
3360 mas.last = 0x1200;
3361 mas.offset = 0;
3362 entry = mas_walk(&mas);
3363 MT_BUG_ON(mt, entry != ptr);
3364 MT_BUG_ON(mt, mas.index != 0x1000);
3365 MT_BUG_ON(mt, mas.last != 0x1500);
3366 MT_BUG_ON(mt, !mas_active(mas));
3367
3368 /* mas_walk active -> active */
3369 mas.index = 0x1600;
3370 mas.last = 0x1600;
3371 entry = mas_walk(&mas);
3372 MT_BUG_ON(mt, entry != NULL);
3373 MT_BUG_ON(mt, mas.index != 0x1501);
3374 MT_BUG_ON(mt, mas.last != 0x1fff);
3375 MT_BUG_ON(mt, !mas_active(mas));
3376
3377 mas_unlock(&mas);
3378}
3379
e15e06a8 3380static DEFINE_MTREE(tree);
eaf9790d 3381static int __init maple_tree_seed(void)
e15e06a8 3382{
eaf9790d
LH
3383 unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3384 1001, 1002, 1003, 1005, 0,
3385 5003, 5002};
e15e06a8
LH
3386 void *ptr = &set;
3387
3388 pr_info("\nTEST STARTING\n\n");
3389
3390 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3391 check_root_expand(&tree);
3392 mtree_destroy(&tree);
3393
3394#if defined(BENCH_SLOT_STORE)
3395#define BENCH
3396 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3397 bench_slot_store(&tree);
3398 mtree_destroy(&tree);
3399 goto skip;
3400#endif
3401#if defined(BENCH_NODE_STORE)
3402#define BENCH
3403 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3404 bench_node_store(&tree);
3405 mtree_destroy(&tree);
3406 goto skip;
3407#endif
3408#if defined(BENCH_AWALK)
3409#define BENCH
3410 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3411 bench_awalk(&tree);
3412 mtree_destroy(&tree);
3413 goto skip;
3414#endif
3415#if defined(BENCH_WALK)
3416#define BENCH
3417 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3418 bench_walk(&tree);
3419 mtree_destroy(&tree);
3420 goto skip;
3421#endif
3422#if defined(BENCH_FORK)
3423#define BENCH
3424 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3425 bench_forking(&tree);
3426 mtree_destroy(&tree);
3427 goto skip;
3428#endif
3429#if defined(BENCH_MT_FOR_EACH)
3430#define BENCH
3431 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3432 bench_mt_for_each(&tree);
3433 mtree_destroy(&tree);
3434 goto skip;
3435#endif
3436
5159d64b
LH
3437 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3438 check_iteration(&tree);
3439 mtree_destroy(&tree);
3440
e15e06a8
LH
3441 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3442 check_forking(&tree);
3443 mtree_destroy(&tree);
3444
3445 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3446 check_mas_store_gfp(&tree);
3447 mtree_destroy(&tree);
3448
3449 /* Test ranges (store and insert) */
3450 mt_init_flags(&tree, 0);
3451 check_ranges(&tree);
3452 mtree_destroy(&tree);
3453
120b1162
LH
3454#if defined(CONFIG_64BIT)
3455 /* These tests have ranges outside of 4GB */
e15e06a8
LH
3456 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3457 check_alloc_range(&tree);
3458 mtree_destroy(&tree);
3459
3460 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3461 check_alloc_rev_range(&tree);
3462 mtree_destroy(&tree);
120b1162 3463#endif
e15e06a8
LH
3464
3465 mt_init_flags(&tree, 0);
3466
3467 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3468
3469 check_insert(&tree, set[9], &tree); /* Insert 0 */
3470 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3471 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3472
3473 check_insert(&tree, set[10], ptr); /* Insert 5003 */
3474 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3475 check_load(&tree, set[11], NULL); /* See if 5002 -> NULL */
3476 check_load(&tree, set[10], ptr); /* See if 5003 -> ptr */
3477
3478 /* Clear out the tree */
3479 mtree_destroy(&tree);
3480
3481 /* Try to insert, insert a dup, and load back what was inserted. */
3482 mt_init_flags(&tree, 0);
3483 check_insert(&tree, set[0], &tree); /* Insert 5015 */
3484 check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3485 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3486
3487 /*
3488 * Second set of tests try to load a value that doesn't exist, inserts
3489 * a second value, then loads the value again
3490 */
3491 check_load(&tree, set[1], NULL); /* See if 5014 -> NULL */
3492 check_insert(&tree, set[1], ptr); /* insert 5014 -> ptr */
3493 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3494 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3495 /*
3496 * Tree currently contains:
3497 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3498 */
3499 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3500 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3501
3502 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3503 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3504 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3505 check_load(&tree, set[7], &tree); /* 1003 = &tree ? */
3506
3507 /* Clear out tree */
3508 mtree_destroy(&tree);
3509
3510 mt_init_flags(&tree, 0);
3511 /* Test inserting into a NULL hole. */
3512 check_insert(&tree, set[5], ptr); /* insert 1001 -> ptr */
3513 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3514 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3515 check_load(&tree, set[5], ptr); /* See if 1001 -> ptr */
3516 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3517 check_load(&tree, set[7], &tree); /* See if 1003 -> &tree */
3518
3519 /* Clear out the tree */
3520 mtree_destroy(&tree);
3521
e15e06a8
LH
3522 mt_init_flags(&tree, 0);
3523 /*
3524 * set[] = {5015, 5014, 5017, 25, 1000,
3525 * 1001, 1002, 1003, 1005, 0,
3526 * 5003, 5002};
3527 */
3528
3529 check_insert(&tree, set[0], ptr); /* 5015 */
3530 check_insert(&tree, set[1], &tree); /* 5014 */
3531 check_insert(&tree, set[2], ptr); /* 5017 */
3532 check_insert(&tree, set[3], &tree); /* 25 */
3533 check_load(&tree, set[0], ptr);
3534 check_load(&tree, set[1], &tree);
3535 check_load(&tree, set[2], ptr);
3536 check_load(&tree, set[3], &tree);
3537 check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3538 check_load(&tree, set[0], ptr);
3539 check_load(&tree, set[1], &tree);
3540 check_load(&tree, set[2], ptr);
3541 check_load(&tree, set[3], &tree); /*25 */
3542 check_load(&tree, set[4], ptr);
3543 check_insert(&tree, set[5], &tree); /* 1001 */
3544 check_load(&tree, set[0], ptr);
3545 check_load(&tree, set[1], &tree);
3546 check_load(&tree, set[2], ptr);
3547 check_load(&tree, set[3], &tree);
3548 check_load(&tree, set[4], ptr);
3549 check_load(&tree, set[5], &tree);
3550 check_insert(&tree, set[6], ptr);
3551 check_load(&tree, set[0], ptr);
3552 check_load(&tree, set[1], &tree);
3553 check_load(&tree, set[2], ptr);
3554 check_load(&tree, set[3], &tree);
3555 check_load(&tree, set[4], ptr);
3556 check_load(&tree, set[5], &tree);
3557 check_load(&tree, set[6], ptr);
3558 check_insert(&tree, set[7], &tree);
3559 check_load(&tree, set[0], ptr);
3560 check_insert(&tree, set[8], ptr);
3561
3562 check_insert(&tree, set[9], &tree);
3563
3564 check_load(&tree, set[0], ptr);
3565 check_load(&tree, set[1], &tree);
3566 check_load(&tree, set[2], ptr);
3567 check_load(&tree, set[3], &tree);
3568 check_load(&tree, set[4], ptr);
3569 check_load(&tree, set[5], &tree);
3570 check_load(&tree, set[6], ptr);
3571 check_load(&tree, set[9], &tree);
3572 mtree_destroy(&tree);
3573
e15e06a8
LH
3574 mt_init_flags(&tree, 0);
3575 check_seq(&tree, 16, false);
3576 mtree_destroy(&tree);
3577
3578 mt_init_flags(&tree, 0);
3579 check_seq(&tree, 1000, true);
3580 mtree_destroy(&tree);
3581
3582 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3583 check_rev_seq(&tree, 1000, true);
3584 mtree_destroy(&tree);
3585
3586 check_lower_bound_split(&tree);
3587 check_upper_bound_split(&tree);
3588 check_mid_split(&tree);
3589
3590 mt_init_flags(&tree, 0);
3591 check_next_entry(&tree);
3592 check_find(&tree);
3593 check_find_2(&tree);
3594 mtree_destroy(&tree);
3595
3596 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3597 check_prev_entry(&tree);
3598 mtree_destroy(&tree);
3599
e15e06a8
LH
3600 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3601 check_gap_combining(&tree);
3602 mtree_destroy(&tree);
3603
3604 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3605 check_node_overwrite(&tree);
3606 mtree_destroy(&tree);
3607
3608 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3609 next_prev_test(&tree);
3610 mtree_destroy(&tree);
3611
e15e06a8
LH
3612 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3613 check_spanning_relatives(&tree);
3614 mtree_destroy(&tree);
3615
3616 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3617 check_rev_find(&tree);
3618 mtree_destroy(&tree);
3619
3620 mt_init_flags(&tree, 0);
3621 check_fuzzer(&tree);
3622 mtree_destroy(&tree);
3623
3624 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3625 check_dup(&tree);
3626 mtree_destroy(&tree);
3627
c5651b31
LH
3628 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3629 check_bnode_min_spanning(&tree);
3630 mtree_destroy(&tree);
3631
7327e811
LH
3632 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3633 check_empty_area_window(&tree);
3634 mtree_destroy(&tree);
3635
4bd6dded
LH
3636 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3637 check_empty_area_fill(&tree);
3638 mtree_destroy(&tree);
3639
3640
eb2e817f
LH
3641 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3642 check_state_handling(&tree);
3643 mtree_destroy(&tree);
3644
e15e06a8
LH
3645#if defined(BENCH)
3646skip:
3647#endif
3648 rcu_barrier();
3649 pr_info("maple_tree: %u of %u tests passed\n",
3650 atomic_read(&maple_tree_tests_passed),
3651 atomic_read(&maple_tree_tests_run));
3652 if (atomic_read(&maple_tree_tests_run) ==
3653 atomic_read(&maple_tree_tests_passed))
3654 return 0;
3655
3656 return -EINVAL;
3657}
3658
eaf9790d 3659static void __exit maple_tree_harvest(void)
e15e06a8
LH
3660{
3661
3662}
3663
3664module_init(maple_tree_seed);
3665module_exit(maple_tree_harvest);
3666MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
3667MODULE_LICENSE("GPL");