maple_tree: add benchmarking for mas_for_each
[linux-2.6-block.git] / lib / test_maple_tree.c
CommitLineData
e15e06a8
LH
1// SPDX-License-Identifier: GPL-2.0+
2/*
3 * test_maple_tree.c: Test the maple tree API
120b1162 4 * Copyright (c) 2018-2022 Oracle Corporation
e15e06a8 5 * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
120b1162
LH
6 *
7 * Any tests that only require the interface of the tree.
e15e06a8
LH
8 */
9
10#include <linux/maple_tree.h>
11#include <linux/module.h>
e15e06a8
LH
12
13#define MTREE_ALLOC_MAX 0x2000000000000Ul
e15e06a8 14#define CONFIG_MAPLE_SEARCH
120b1162
LH
15#define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
16
a5199577
LH
17#ifndef CONFIG_DEBUG_MAPLE_TREE
18#define mt_dump(mt, fmt) do {} while (0)
19#define mt_validate(mt) do {} while (0)
20#define mt_cache_shrink() do {} while (0)
21#define mas_dump(mas) do {} while (0)
22#define mas_wr_dump(mas) do {} while (0)
23atomic_t maple_tree_tests_run;
24atomic_t maple_tree_tests_passed;
25#undef MT_BUG_ON
26
27#define MT_BUG_ON(__tree, __x) do { \
28 atomic_inc(&maple_tree_tests_run); \
29 if (__x) { \
30 pr_info("BUG at %s:%d (%u)\n", \
31 __func__, __LINE__, __x); \
32 pr_info("Pass: %u Run:%u\n", \
33 atomic_read(&maple_tree_tests_passed), \
34 atomic_read(&maple_tree_tests_run)); \
35 } else { \
36 atomic_inc(&maple_tree_tests_passed); \
37 } \
38} while (0)
39#endif
40
e15e06a8
LH
41/* #define BENCH_SLOT_STORE */
42/* #define BENCH_NODE_STORE */
43/* #define BENCH_AWALK */
44/* #define BENCH_WALK */
45/* #define BENCH_MT_FOR_EACH */
46/* #define BENCH_FORK */
361c678b 47/* #define BENCH_MAS_FOR_EACH */
120b1162
LH
48
49#ifdef __KERNEL__
50#define mt_set_non_kernel(x) do {} while (0)
51#define mt_zero_nr_tallocated(x) do {} while (0)
52#else
53#define cond_resched() do {} while (0)
54#endif
eaf9790d
LH
55static int __init mtree_insert_index(struct maple_tree *mt,
56 unsigned long index, gfp_t gfp)
e15e06a8
LH
57{
58 return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
59}
60
eaf9790d 61static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
62{
63 MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
64 MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
65}
66
eaf9790d 67static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
e15e06a8
LH
68 void *ptr)
69{
70 return mtree_insert(mt, index, ptr, GFP_KERNEL);
71}
72
eaf9790d
LH
73static int __init mtree_test_store_range(struct maple_tree *mt,
74 unsigned long start, unsigned long end, void *ptr)
e15e06a8
LH
75{
76 return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
77}
78
eaf9790d 79static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
e15e06a8
LH
80 void *ptr)
81{
82 return mtree_test_store_range(mt, start, start, ptr);
83}
84
eaf9790d
LH
85static int __init mtree_test_insert_range(struct maple_tree *mt,
86 unsigned long start, unsigned long end, void *ptr)
e15e06a8
LH
87{
88 return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
89}
90
eaf9790d 91static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
92{
93 return mtree_load(mt, index);
94}
95
eaf9790d 96static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
e15e06a8
LH
97{
98 return mtree_erase(mt, index);
99}
100
120b1162 101#if defined(CONFIG_64BIT)
eaf9790d 102static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
e15e06a8
LH
103 unsigned long start, unsigned long end, unsigned long size,
104 unsigned long expected, int eret, void *ptr)
105{
106
107 unsigned long result = expected + 1;
108 int ret;
109
110 ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
111 GFP_KERNEL);
112 MT_BUG_ON(mt, ret != eret);
113 if (ret)
114 return;
115
116 MT_BUG_ON(mt, result != expected);
117}
118
eaf9790d 119static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
e15e06a8
LH
120 unsigned long start, unsigned long end, unsigned long size,
121 unsigned long expected, int eret, void *ptr)
122{
123
124 unsigned long result = expected + 1;
125 int ret;
126
ba997212 127 ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
e15e06a8
LH
128 GFP_KERNEL);
129 MT_BUG_ON(mt, ret != eret);
130 if (ret)
131 return;
132
133 MT_BUG_ON(mt, result != expected);
134}
120b1162 135#endif
e15e06a8 136
eaf9790d
LH
137static noinline void __init check_load(struct maple_tree *mt,
138 unsigned long index, void *ptr)
e15e06a8
LH
139{
140 void *ret = mtree_test_load(mt, index);
141
142 if (ret != ptr)
143 pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
144 MT_BUG_ON(mt, ret != ptr);
145}
146
eaf9790d 147static noinline void __init check_store_range(struct maple_tree *mt,
e15e06a8
LH
148 unsigned long start, unsigned long end, void *ptr, int expected)
149{
150 int ret = -EINVAL;
151 unsigned long i;
152
153 ret = mtree_test_store_range(mt, start, end, ptr);
154 MT_BUG_ON(mt, ret != expected);
155
156 if (ret)
157 return;
158
159 for (i = start; i <= end; i++)
160 check_load(mt, i, ptr);
161}
162
eaf9790d 163static noinline void __init check_insert_range(struct maple_tree *mt,
e15e06a8
LH
164 unsigned long start, unsigned long end, void *ptr, int expected)
165{
166 int ret = -EINVAL;
167 unsigned long i;
168
169 ret = mtree_test_insert_range(mt, start, end, ptr);
170 MT_BUG_ON(mt, ret != expected);
171
172 if (ret)
173 return;
174
175 for (i = start; i <= end; i++)
176 check_load(mt, i, ptr);
177}
178
eaf9790d
LH
179static noinline void __init check_insert(struct maple_tree *mt,
180 unsigned long index, void *ptr)
e15e06a8
LH
181{
182 int ret = -EINVAL;
183
184 ret = mtree_test_insert(mt, index, ptr);
185 MT_BUG_ON(mt, ret != 0);
186}
187
eaf9790d 188static noinline void __init check_dup_insert(struct maple_tree *mt,
e15e06a8
LH
189 unsigned long index, void *ptr)
190{
191 int ret = -EINVAL;
192
193 ret = mtree_test_insert(mt, index, ptr);
194 MT_BUG_ON(mt, ret != -EEXIST);
195}
196
197
eaf9790d
LH
198static noinline void __init check_index_load(struct maple_tree *mt,
199 unsigned long index)
e15e06a8
LH
200{
201 return check_load(mt, index, xa_mk_value(index & LONG_MAX));
202}
203
eaf9790d 204static inline __init int not_empty(struct maple_node *node)
e15e06a8
LH
205{
206 int i;
207
208 if (node->parent)
209 return 1;
210
211 for (i = 0; i < ARRAY_SIZE(node->slot); i++)
212 if (node->slot[i])
213 return 1;
214
215 return 0;
216}
217
e15e06a8 218
eaf9790d
LH
219static noinline void __init check_rev_seq(struct maple_tree *mt,
220 unsigned long max, bool verbose)
e15e06a8
LH
221{
222 unsigned long i = max, j;
223
224 MT_BUG_ON(mt, !mtree_empty(mt));
225
226 mt_zero_nr_tallocated();
227 while (i) {
228 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
229 for (j = i; j <= max; j++)
230 check_index_load(mt, j);
231
232 check_load(mt, i - 1, NULL);
233 mt_set_in_rcu(mt);
234 MT_BUG_ON(mt, !mt_height(mt));
235 mt_clear_in_rcu(mt);
236 MT_BUG_ON(mt, !mt_height(mt));
237 i--;
238 }
239 check_load(mt, max + 1, NULL);
240
120b1162 241#ifndef __KERNEL__
e15e06a8
LH
242 if (verbose) {
243 rcu_barrier();
89f499f3 244 mt_dump(mt, mt_dump_dec);
e15e06a8
LH
245 pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
246 __func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
247 mt_nr_tallocated());
248 }
120b1162 249#endif
e15e06a8
LH
250}
251
eaf9790d 252static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
e15e06a8
LH
253 bool verbose)
254{
255 unsigned long i, j;
256
257 MT_BUG_ON(mt, !mtree_empty(mt));
258
259 mt_zero_nr_tallocated();
260 for (i = 0; i <= max; i++) {
261 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
262 for (j = 0; j <= i; j++)
263 check_index_load(mt, j);
264
265 if (i)
266 MT_BUG_ON(mt, !mt_height(mt));
267 check_load(mt, i + 1, NULL);
268 }
120b1162
LH
269
270#ifndef __KERNEL__
e15e06a8
LH
271 if (verbose) {
272 rcu_barrier();
89f499f3 273 mt_dump(mt, mt_dump_dec);
e15e06a8
LH
274 pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
275 max, mt_get_alloc_size()/1024, mt_nr_allocated(),
276 mt_nr_tallocated());
277 }
120b1162 278#endif
e15e06a8
LH
279}
280
eaf9790d 281static noinline void __init check_lb_not_empty(struct maple_tree *mt)
e15e06a8
LH
282{
283 unsigned long i, j;
284 unsigned long huge = 4000UL * 1000 * 1000;
285
286
287 i = huge;
288 while (i > 4096) {
289 check_insert(mt, i, (void *) i);
290 for (j = huge; j >= i; j /= 2) {
291 check_load(mt, j-1, NULL);
292 check_load(mt, j, (void *) j);
293 check_load(mt, j+1, NULL);
294 }
295 i /= 2;
296 }
297 mtree_destroy(mt);
298}
299
eaf9790d 300static noinline void __init check_lower_bound_split(struct maple_tree *mt)
e15e06a8
LH
301{
302 MT_BUG_ON(mt, !mtree_empty(mt));
303 check_lb_not_empty(mt);
304}
305
eaf9790d 306static noinline void __init check_upper_bound_split(struct maple_tree *mt)
e15e06a8
LH
307{
308 unsigned long i, j;
120b1162 309 unsigned long huge;
e15e06a8
LH
310
311 MT_BUG_ON(mt, !mtree_empty(mt));
312
120b1162
LH
313 if (MAPLE_32BIT)
314 huge = 2147483647UL;
315 else
316 huge = 4000UL * 1000 * 1000;
317
e15e06a8
LH
318 i = 4096;
319 while (i < huge) {
320 check_insert(mt, i, (void *) i);
321 for (j = i; j >= huge; j *= 2) {
322 check_load(mt, j-1, NULL);
323 check_load(mt, j, (void *) j);
324 check_load(mt, j+1, NULL);
325 }
326 i *= 2;
327 }
328 mtree_destroy(mt);
329}
330
eaf9790d 331static noinline void __init check_mid_split(struct maple_tree *mt)
e15e06a8
LH
332{
333 unsigned long huge = 8000UL * 1000 * 1000;
334
335 check_insert(mt, huge, (void *) huge);
336 check_insert(mt, 0, xa_mk_value(0));
337 check_lb_not_empty(mt);
338}
339
eaf9790d 340static noinline void __init check_rev_find(struct maple_tree *mt)
e15e06a8
LH
341{
342 int i, nr_entries = 200;
343 void *val;
344 MA_STATE(mas, mt, 0, 0);
345
346 for (i = 0; i <= nr_entries; i++)
347 mtree_store_range(mt, i*10, i*10 + 5,
348 xa_mk_value(i), GFP_KERNEL);
349
120b1162 350 rcu_read_lock();
e15e06a8
LH
351 mas_set(&mas, 1000);
352 val = mas_find_rev(&mas, 1000);
353 MT_BUG_ON(mt, val != xa_mk_value(100));
354 val = mas_find_rev(&mas, 1000);
355 MT_BUG_ON(mt, val != NULL);
356
357 mas_set(&mas, 999);
358 val = mas_find_rev(&mas, 997);
359 MT_BUG_ON(mt, val != NULL);
360
361 mas_set(&mas, 1000);
362 val = mas_find_rev(&mas, 900);
363 MT_BUG_ON(mt, val != xa_mk_value(100));
364 val = mas_find_rev(&mas, 900);
365 MT_BUG_ON(mt, val != xa_mk_value(99));
366
367 mas_set(&mas, 20);
368 val = mas_find_rev(&mas, 0);
369 MT_BUG_ON(mt, val != xa_mk_value(2));
370 val = mas_find_rev(&mas, 0);
371 MT_BUG_ON(mt, val != xa_mk_value(1));
372 val = mas_find_rev(&mas, 0);
373 MT_BUG_ON(mt, val != xa_mk_value(0));
374 val = mas_find_rev(&mas, 0);
375 MT_BUG_ON(mt, val != NULL);
120b1162 376 rcu_read_unlock();
e15e06a8
LH
377}
378
eaf9790d 379static noinline void __init check_find(struct maple_tree *mt)
e15e06a8
LH
380{
381 unsigned long val = 0;
120b1162 382 unsigned long count;
e15e06a8 383 unsigned long max;
120b1162 384 unsigned long top;
e15e06a8
LH
385 unsigned long last = 0, index = 0;
386 void *entry, *entry2;
387
388 MA_STATE(mas, mt, 0, 0);
389
390 /* Insert 0. */
391 MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
392
120b1162
LH
393#if defined(CONFIG_64BIT)
394 top = 4398046511104UL;
395#else
396 top = ULONG_MAX;
397#endif
398
399 if (MAPLE_32BIT) {
400 count = 15;
401 } else {
402 count = 20;
403 }
404
e15e06a8
LH
405 for (int i = 0; i <= count; i++) {
406 if (val != 64)
407 MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
408 else
409 MT_BUG_ON(mt, mtree_insert(mt, val,
410 XA_ZERO_ENTRY, GFP_KERNEL));
411
412 val <<= 2;
413 }
414
415 val = 0;
416 mas_set(&mas, val);
417 mas_lock(&mas);
418 while ((entry = mas_find(&mas, 268435456)) != NULL) {
419 if (val != 64)
420 MT_BUG_ON(mt, xa_mk_value(val) != entry);
421 else
422 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
423
424 val <<= 2;
425 /* For zero check. */
426 if (!val)
427 val = 1;
428 }
429 mas_unlock(&mas);
430
431 val = 0;
432 mas_set(&mas, val);
433 mas_lock(&mas);
434 mas_for_each(&mas, entry, ULONG_MAX) {
435 if (val != 64)
436 MT_BUG_ON(mt, xa_mk_value(val) != entry);
437 else
438 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
439 val <<= 2;
440 /* For zero check. */
441 if (!val)
442 val = 1;
443 }
444 mas_unlock(&mas);
445
446 /* Test mas_pause */
447 val = 0;
448 mas_set(&mas, val);
449 mas_lock(&mas);
450 mas_for_each(&mas, entry, ULONG_MAX) {
451 if (val != 64)
452 MT_BUG_ON(mt, xa_mk_value(val) != entry);
453 else
454 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
455 val <<= 2;
456 /* For zero check. */
457 if (!val)
458 val = 1;
459
460 mas_pause(&mas);
461 mas_unlock(&mas);
462 mas_lock(&mas);
463 }
464 mas_unlock(&mas);
465
466 val = 0;
467 max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
468 mt_for_each(mt, entry, index, max) {
469 MT_BUG_ON(mt, xa_mk_value(val) != entry);
470 val <<= 2;
471 if (val == 64) /* Skip zero entry. */
472 val <<= 2;
473 /* For zero check. */
474 if (!val)
475 val = 1;
476 }
477
478 val = 0;
479 max = 0;
480 index = 0;
481 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
482 mt_for_each(mt, entry, index, ULONG_MAX) {
120b1162
LH
483 if (val == top)
484 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
e15e06a8
LH
485 else
486 MT_BUG_ON(mt, xa_mk_value(val) != entry);
120b1162
LH
487
488 /* Workaround for 32bit */
489 if ((val << 2) < val)
490 val = ULONG_MAX;
491 else
492 val <<= 2;
493
e15e06a8
LH
494 if (val == 64) /* Skip zero entry. */
495 val <<= 2;
496 /* For zero check. */
497 if (!val)
498 val = 1;
499 max++;
500 MT_BUG_ON(mt, max > 25);
501 }
502 mtree_erase_index(mt, ULONG_MAX);
503
504 mas_reset(&mas);
505 index = 17;
506 entry = mt_find(mt, &index, 512);
507 MT_BUG_ON(mt, xa_mk_value(256) != entry);
508
509 mas_reset(&mas);
510 index = 17;
511 entry = mt_find(mt, &index, 20);
512 MT_BUG_ON(mt, entry != NULL);
513
514
515 /* Range check.. */
516 /* Insert ULONG_MAX */
517 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
518
519 val = 0;
520 mas_set(&mas, 0);
521 mas_lock(&mas);
522 mas_for_each(&mas, entry, ULONG_MAX) {
523 if (val == 64)
524 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
120b1162
LH
525 else if (val == top)
526 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
e15e06a8
LH
527 else
528 MT_BUG_ON(mt, xa_mk_value(val) != entry);
120b1162
LH
529
530 /* Workaround for 32bit */
531 if ((val << 2) < val)
532 val = ULONG_MAX;
533 else
534 val <<= 2;
e15e06a8
LH
535
536 /* For zero check. */
537 if (!val)
538 val = 1;
539 mas_pause(&mas);
540 mas_unlock(&mas);
541 mas_lock(&mas);
542 }
543 mas_unlock(&mas);
544
545 mas_set(&mas, 1048576);
546 mas_lock(&mas);
547 entry = mas_find(&mas, 1048576);
548 mas_unlock(&mas);
549 MT_BUG_ON(mas.tree, entry == NULL);
550
551 /*
552 * Find last value.
553 * 1. get the expected value, leveraging the existence of an end entry
554 * 2. delete end entry
555 * 3. find the last value but searching for ULONG_MAX and then using
556 * prev
557 */
558 /* First, get the expected result. */
559 mas_lock(&mas);
560 mas_reset(&mas);
561 mas.index = ULONG_MAX; /* start at max.. */
562 entry = mas_find(&mas, ULONG_MAX);
563 entry = mas_prev(&mas, 0);
564 index = mas.index;
565 last = mas.last;
566
567 /* Erase the last entry. */
568 mas_reset(&mas);
569 mas.index = ULONG_MAX;
570 mas.last = ULONG_MAX;
571 mas_erase(&mas);
572
573 /* Get the previous value from MAS_START */
574 mas_reset(&mas);
575 entry2 = mas_prev(&mas, 0);
576
577 /* Check results. */
578 MT_BUG_ON(mt, entry != entry2);
579 MT_BUG_ON(mt, index != mas.index);
580 MT_BUG_ON(mt, last != mas.last);
581
582
583 mas.node = MAS_NONE;
584 mas.index = ULONG_MAX;
585 mas.last = ULONG_MAX;
586 entry2 = mas_prev(&mas, 0);
587 MT_BUG_ON(mt, entry != entry2);
588
589 mas_set(&mas, 0);
590 MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
591
592 mas_unlock(&mas);
593 mtree_destroy(mt);
594}
595
eaf9790d 596static noinline void __init check_find_2(struct maple_tree *mt)
e15e06a8
LH
597{
598 unsigned long i, j;
599 void *entry;
600
601 MA_STATE(mas, mt, 0, 0);
602 rcu_read_lock();
603 mas_for_each(&mas, entry, ULONG_MAX)
604 MT_BUG_ON(mt, true);
605 rcu_read_unlock();
606
607 for (i = 0; i < 256; i++) {
608 mtree_insert_index(mt, i, GFP_KERNEL);
609 j = 0;
610 mas_set(&mas, 0);
611 rcu_read_lock();
612 mas_for_each(&mas, entry, ULONG_MAX) {
613 MT_BUG_ON(mt, entry != xa_mk_value(j));
614 j++;
615 }
616 rcu_read_unlock();
617 MT_BUG_ON(mt, j != i + 1);
618 }
619
620 for (i = 0; i < 256; i++) {
621 mtree_erase_index(mt, i);
622 j = i + 1;
623 mas_set(&mas, 0);
624 rcu_read_lock();
625 mas_for_each(&mas, entry, ULONG_MAX) {
626 if (xa_is_zero(entry))
627 continue;
628
629 MT_BUG_ON(mt, entry != xa_mk_value(j));
630 j++;
631 }
632 rcu_read_unlock();
633 MT_BUG_ON(mt, j != 256);
634 }
635
636 /*MT_BUG_ON(mt, !mtree_empty(mt)); */
637}
638
e15e06a8 639
120b1162 640#if defined(CONFIG_64BIT)
eaf9790d 641static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
e15e06a8 642{
e15e06a8 643 /*
120b1162
LH
644 * Generated by:
645 * cat /proc/self/maps | awk '{print $1}'|
646 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
e15e06a8 647 */
e15e06a8 648
eaf9790d 649 static const unsigned long range[] = {
120b1162
LH
650 /* Inclusive , Exclusive. */
651 0x565234af2000, 0x565234af4000,
652 0x565234af4000, 0x565234af9000,
653 0x565234af9000, 0x565234afb000,
654 0x565234afc000, 0x565234afd000,
655 0x565234afd000, 0x565234afe000,
656 0x565235def000, 0x565235e10000,
657 0x7f36d4bfd000, 0x7f36d4ee2000,
658 0x7f36d4ee2000, 0x7f36d4f04000,
659 0x7f36d4f04000, 0x7f36d504c000,
660 0x7f36d504c000, 0x7f36d5098000,
661 0x7f36d5098000, 0x7f36d5099000,
662 0x7f36d5099000, 0x7f36d509d000,
663 0x7f36d509d000, 0x7f36d509f000,
664 0x7f36d509f000, 0x7f36d50a5000,
665 0x7f36d50b9000, 0x7f36d50db000,
666 0x7f36d50db000, 0x7f36d50dc000,
667 0x7f36d50dc000, 0x7f36d50fa000,
668 0x7f36d50fa000, 0x7f36d5102000,
669 0x7f36d5102000, 0x7f36d5103000,
670 0x7f36d5103000, 0x7f36d5104000,
671 0x7f36d5104000, 0x7f36d5105000,
672 0x7fff5876b000, 0x7fff5878d000,
673 0x7fff5878e000, 0x7fff58791000,
674 0x7fff58791000, 0x7fff58793000,
675 };
e15e06a8 676
eaf9790d 677 static const unsigned long holes[] = {
120b1162
LH
678 /*
679 * Note: start of hole is INCLUSIVE
680 * end of hole is EXCLUSIVE
681 * (opposite of the above table.)
682 * Start of hole, end of hole, size of hole (+1)
683 */
684 0x565234afb000, 0x565234afc000, 0x1000,
685 0x565234afe000, 0x565235def000, 0x12F1000,
686 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
687 };
e15e06a8 688
120b1162
LH
689 /*
690 * req_range consists of 4 values.
691 * 1. min index
692 * 2. max index
693 * 3. size
694 * 4. number that should be returned.
695 * 5. return value
696 */
eaf9790d 697 static const unsigned long req_range[] = {
120b1162
LH
698 0x565234af9000, /* Min */
699 0x7fff58791000, /* Max */
700 0x1000, /* Size */
701 0x7fff5878d << 12, /* First rev hole of size 0x1000 */
702 0, /* Return value success. */
e15e06a8 703
120b1162 704 0x0, /* Min */
ba997212 705 0x565234AF0 << 12, /* Max */
120b1162
LH
706 0x3000, /* Size */
707 0x565234AEE << 12, /* max - 3. */
708 0, /* Return value success. */
e15e06a8 709
120b1162
LH
710 0x0, /* Min */
711 -1, /* Max */
712 0x1000, /* Size */
713 562949953421311 << 12,/* First rev hole of size 0x1000 */
714 0, /* Return value success. */
e15e06a8 715
120b1162 716 0x0, /* Min */
ba997212 717 0x7F36D5109 << 12, /* Max */
120b1162
LH
718 0x4000, /* Size */
719 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
720 0, /* Return value success. */
e15e06a8 721
120b1162
LH
722 /* Ascend test. */
723 0x0,
ba997212 724 34148798628 << 12,
120b1162
LH
725 19 << 12,
726 34148797418 << 12,
727 0x0,
e15e06a8 728
120b1162
LH
729 /* Too big test. */
730 0x0,
731 18446744073709551615UL,
732 562915594369134UL << 12,
733 0x0,
734 -EBUSY,
e15e06a8 735
ba997212
LH
736 /* Single space test. */
737 34148798725 << 12,
738 34148798725 << 12,
739 1 << 12,
740 34148798725 << 12,
741 0,
120b1162 742 };
e15e06a8 743
120b1162
LH
744 int i, range_count = ARRAY_SIZE(range);
745 int req_range_count = ARRAY_SIZE(req_range);
746 unsigned long min = 0;
e15e06a8 747
120b1162 748 MA_STATE(mas, mt, 0, 0);
e15e06a8 749
120b1162
LH
750 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
751 GFP_KERNEL);
752#define DEBUG_REV_RANGE 0
753 for (i = 0; i < range_count; i += 2) {
754 /* Inclusive, Inclusive (with the -1) */
e15e06a8 755
120b1162
LH
756#if DEBUG_REV_RANGE
757 pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
758 (range[i + 1] >> 12) - 1);
759#endif
760 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
761 xa_mk_value(range[i] >> 12), 0);
762 mt_validate(mt);
e15e06a8
LH
763 }
764
e15e06a8 765
120b1162
LH
766 mas_lock(&mas);
767 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
768#if DEBUG_REV_RANGE
769 pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
770 min, holes[i+1]>>12, holes[i+2]>>12,
771 holes[i] >> 12);
772#endif
773 MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
774 holes[i+1] >> 12,
775 holes[i+2] >> 12));
776#if DEBUG_REV_RANGE
777 pr_debug("Found %lu %lu\n", mas.index, mas.last);
778 pr_debug("gap %lu %lu\n", (holes[i] >> 12),
779 (holes[i+1] >> 12));
780#endif
781 MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
782 MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
783 min = holes[i+1] >> 12;
784 mas_reset(&mas);
e15e06a8 785 }
e15e06a8 786
120b1162
LH
787 mas_unlock(&mas);
788 for (i = 0; i < req_range_count; i += 5) {
789#if DEBUG_REV_RANGE
ba997212
LH
790 pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
791 i, req_range[i] >> 12,
792 (req_range[i + 1] >> 12),
120b1162
LH
793 req_range[i+2] >> 12,
794 req_range[i+3] >> 12);
795#endif
796 check_mtree_alloc_rrange(mt,
797 req_range[i] >> 12, /* start */
798 req_range[i+1] >> 12, /* end */
799 req_range[i+2] >> 12, /* size */
800 req_range[i+3] >> 12, /* expected address */
801 req_range[i+4], /* expected return */
802 xa_mk_value(req_range[i] >> 12)); /* pointer */
803 mt_validate(mt);
e15e06a8
LH
804 }
805
120b1162
LH
806 mt_set_non_kernel(1);
807 mtree_erase(mt, 34148798727); /* create a deleted range. */
ba997212 808 mtree_erase(mt, 34148798725);
120b1162
LH
809 check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
810 34148798725, 0, mt);
e15e06a8 811
120b1162
LH
812 mtree_destroy(mt);
813}
e15e06a8 814
eaf9790d 815static noinline void __init check_alloc_range(struct maple_tree *mt)
e15e06a8 816{
120b1162
LH
817 /*
818 * Generated by:
819 * cat /proc/self/maps|awk '{print $1}'|
820 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
821 */
e15e06a8 822
eaf9790d 823 static const unsigned long range[] = {
120b1162
LH
824 /* Inclusive , Exclusive. */
825 0x565234af2000, 0x565234af4000,
826 0x565234af4000, 0x565234af9000,
827 0x565234af9000, 0x565234afb000,
828 0x565234afc000, 0x565234afd000,
829 0x565234afd000, 0x565234afe000,
830 0x565235def000, 0x565235e10000,
831 0x7f36d4bfd000, 0x7f36d4ee2000,
832 0x7f36d4ee2000, 0x7f36d4f04000,
833 0x7f36d4f04000, 0x7f36d504c000,
834 0x7f36d504c000, 0x7f36d5098000,
835 0x7f36d5098000, 0x7f36d5099000,
836 0x7f36d5099000, 0x7f36d509d000,
837 0x7f36d509d000, 0x7f36d509f000,
838 0x7f36d509f000, 0x7f36d50a5000,
839 0x7f36d50b9000, 0x7f36d50db000,
840 0x7f36d50db000, 0x7f36d50dc000,
841 0x7f36d50dc000, 0x7f36d50fa000,
842 0x7f36d50fa000, 0x7f36d5102000,
843 0x7f36d5102000, 0x7f36d5103000,
844 0x7f36d5103000, 0x7f36d5104000,
845 0x7f36d5104000, 0x7f36d5105000,
846 0x7fff5876b000, 0x7fff5878d000,
847 0x7fff5878e000, 0x7fff58791000,
848 0x7fff58791000, 0x7fff58793000,
849 };
eaf9790d 850 static const unsigned long holes[] = {
120b1162
LH
851 /* Start of hole, end of hole, size of hole (+1) */
852 0x565234afb000, 0x565234afc000, 0x1000,
853 0x565234afe000, 0x565235def000, 0x12F1000,
854 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
855 };
e15e06a8 856
120b1162
LH
857 /*
858 * req_range consists of 4 values.
859 * 1. min index
860 * 2. max index
861 * 3. size
862 * 4. number that should be returned.
863 * 5. return value
864 */
eaf9790d 865 static const unsigned long req_range[] = {
120b1162
LH
866 0x565234af9000, /* Min */
867 0x7fff58791000, /* Max */
868 0x1000, /* Size */
869 0x565234afb000, /* First hole in our data of size 1000. */
870 0, /* Return value success. */
e15e06a8 871
120b1162
LH
872 0x0, /* Min */
873 0x7fff58791000, /* Max */
874 0x1F00, /* Size */
875 0x0, /* First hole in our data of size 2000. */
876 0, /* Return value success. */
e15e06a8 877
120b1162
LH
878 /* Test ascend. */
879 34148797436 << 12, /* Min */
880 0x7fff587AF000, /* Max */
881 0x3000, /* Size */
882 34148798629 << 12, /* Expected location */
883 0, /* Return value success. */
e15e06a8 884
120b1162
LH
885 /* Test failing. */
886 34148798623 << 12, /* Min */
887 34148798683 << 12, /* Max */
888 0x15000, /* Size */
889 0, /* Expected location */
890 -EBUSY, /* Return value failed. */
e15e06a8 891
120b1162
LH
892 /* Test filling entire gap. */
893 34148798623 << 12, /* Min */
894 0x7fff587AF000, /* Max */
895 0x10000, /* Size */
896 34148798632 << 12, /* Expected location */
897 0, /* Return value success. */
e15e06a8 898
120b1162
LH
899 /* Test walking off the end of root. */
900 0, /* Min */
901 -1, /* Max */
902 -1, /* Size */
903 0, /* Expected location */
904 -EBUSY, /* Return value failure. */
e15e06a8 905
120b1162
LH
906 /* Test looking for too large a hole across entire range. */
907 0, /* Min */
908 -1, /* Max */
909 4503599618982063UL << 12, /* Size */
910 34359052178 << 12, /* Expected location */
911 -EBUSY, /* Return failure. */
ba997212
LH
912
913 /* Test a single entry */
914 34148798648 << 12, /* Min */
915 34148798648 << 12, /* Max */
916 4096, /* Size of 1 */
917 34148798648 << 12, /* Location is the same as min/max */
918 0, /* Success */
120b1162
LH
919 };
920 int i, range_count = ARRAY_SIZE(range);
921 int req_range_count = ARRAY_SIZE(req_range);
922 unsigned long min = 0x565234af2000;
e15e06a8
LH
923 MA_STATE(mas, mt, 0, 0);
924
120b1162
LH
925 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
926 GFP_KERNEL);
927 for (i = 0; i < range_count; i += 2) {
928#define DEBUG_ALLOC_RANGE 0
929#if DEBUG_ALLOC_RANGE
930 pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
931 (range[i + 1] >> 12) - 1);
89f499f3 932 mt_dump(mt, mt_dump_hex);
e15e06a8 933#endif
120b1162
LH
934 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
935 xa_mk_value(range[i] >> 12), 0);
936 mt_validate(mt);
937 }
e15e06a8 938
e15e06a8 939
e15e06a8 940
120b1162
LH
941 mas_lock(&mas);
942 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
e15e06a8 943
120b1162
LH
944#if DEBUG_ALLOC_RANGE
945 pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
946 holes[i+1] >> 12, holes[i+2] >> 12,
947 min, holes[i+1]);
948#endif
949 MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
950 holes[i+1] >> 12,
951 holes[i+2] >> 12));
952 MT_BUG_ON(mt, mas.index != holes[i] >> 12);
953 min = holes[i+1];
e15e06a8 954 mas_reset(&mas);
120b1162
LH
955 }
956 mas_unlock(&mas);
957 for (i = 0; i < req_range_count; i += 5) {
958#if DEBUG_ALLOC_RANGE
959 pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
960 i/5, req_range[i] >> 12, req_range[i + 1] >> 12,
961 req_range[i + 2] >> 12, req_range[i + 3] >> 12,
962 req_range[i], req_range[i+1]);
e15e06a8 963#endif
120b1162
LH
964 check_mtree_alloc_range(mt,
965 req_range[i] >> 12, /* start */
966 req_range[i+1] >> 12, /* end */
967 req_range[i+2] >> 12, /* size */
968 req_range[i+3] >> 12, /* expected address */
969 req_range[i+4], /* expected return */
970 xa_mk_value(req_range[i] >> 12)); /* pointer */
e15e06a8 971 mt_validate(mt);
120b1162 972#if DEBUG_ALLOC_RANGE
89f499f3 973 mt_dump(mt, mt_dump_hex);
e15e06a8 974#endif
e15e06a8 975 }
e15e06a8 976
120b1162
LH
977 mtree_destroy(mt);
978}
979#endif
e15e06a8 980
eaf9790d 981static noinline void __init check_ranges(struct maple_tree *mt)
e15e06a8 982{
120b1162 983 int i, val, val2;
eaf9790d 984 static const unsigned long r[] = {
120b1162
LH
985 10, 15,
986 20, 25,
987 17, 22, /* Overlaps previous range. */
988 9, 1000, /* Huge. */
989 100, 200,
990 45, 168,
991 118, 128,
992 };
e15e06a8 993
120b1162
LH
994 MT_BUG_ON(mt, !mtree_empty(mt));
995 check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
996 check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
997 check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
998 MT_BUG_ON(mt, !mt_height(mt));
999 /* Store */
1000 check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1001 check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1002 check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1003 MT_BUG_ON(mt, !mt_height(mt));
1004 mtree_destroy(mt);
1005 MT_BUG_ON(mt, mt_height(mt));
e15e06a8 1006
120b1162
LH
1007 check_seq(mt, 50, false);
1008 mt_set_non_kernel(4);
1009 check_store_range(mt, 5, 47, xa_mk_value(47), 0);
1010 MT_BUG_ON(mt, !mt_height(mt));
1011 mtree_destroy(mt);
e15e06a8 1012
120b1162
LH
1013 /* Create tree of 1-100 */
1014 check_seq(mt, 100, false);
1015 /* Store 45-168 */
1016 mt_set_non_kernel(10);
1017 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1018 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1019 mtree_destroy(mt);
1020
120b1162
LH
1021 /* Create tree of 1-200 */
1022 check_seq(mt, 200, false);
1023 /* Store 45-168 */
1024 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1025 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1026 mtree_destroy(mt);
1027
120b1162
LH
1028 check_seq(mt, 30, false);
1029 check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1030 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1031 mtree_destroy(mt);
1032
120b1162
LH
1033 /* Overwrite across multiple levels. */
1034 /* Create tree of 1-400 */
1035 check_seq(mt, 400, false);
1036 mt_set_non_kernel(50);
1037 /* Store 118-128 */
1038 check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1039 mt_set_non_kernel(50);
1040 mtree_test_erase(mt, 140);
1041 mtree_test_erase(mt, 141);
1042 mtree_test_erase(mt, 142);
1043 mtree_test_erase(mt, 143);
1044 mtree_test_erase(mt, 130);
1045 mtree_test_erase(mt, 131);
1046 mtree_test_erase(mt, 132);
1047 mtree_test_erase(mt, 133);
1048 mtree_test_erase(mt, 134);
1049 mtree_test_erase(mt, 135);
1050 check_load(mt, r[12], xa_mk_value(r[12]));
1051 check_load(mt, r[13], xa_mk_value(r[12]));
1052 check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1053 check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1054 check_load(mt, 135, NULL);
1055 check_load(mt, 140, NULL);
e15e06a8 1056 mt_set_non_kernel(0);
120b1162 1057 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1058 mtree_destroy(mt);
1059
e15e06a8 1060
e15e06a8 1061
120b1162
LH
1062 /* Overwrite multiple levels at the end of the tree (slot 7) */
1063 mt_set_non_kernel(50);
1064 check_seq(mt, 400, false);
1065 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1066 check_store_range(mt, 347, 352, xa_mk_value(347), 0);
e15e06a8 1067
120b1162
LH
1068 check_load(mt, 346, xa_mk_value(346));
1069 for (i = 347; i <= 352; i++)
1070 check_load(mt, i, xa_mk_value(347));
1071 for (i = 353; i <= 361; i++)
1072 check_load(mt, i, xa_mk_value(353));
1073 check_load(mt, 362, xa_mk_value(362));
1074 mt_set_non_kernel(0);
1075 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1076 mtree_destroy(mt);
1077
120b1162
LH
1078 mt_set_non_kernel(50);
1079 check_seq(mt, 400, false);
1080 check_store_range(mt, 352, 364, NULL, 0);
1081 check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1082 check_load(mt, 350, xa_mk_value(350));
1083 check_load(mt, 351, xa_mk_value(352));
1084 for (i = 352; i <= 363; i++)
1085 check_load(mt, i, xa_mk_value(352));
1086 check_load(mt, 364, NULL);
1087 check_load(mt, 365, xa_mk_value(365));
1088 mt_set_non_kernel(0);
1089 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1090 mtree_destroy(mt);
1091
120b1162
LH
1092 mt_set_non_kernel(5);
1093 check_seq(mt, 400, false);
1094 check_store_range(mt, 352, 364, NULL, 0);
1095 check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1096 check_load(mt, 350, xa_mk_value(350));
1097 check_load(mt, 351, xa_mk_value(352));
1098 for (i = 352; i <= 364; i++)
1099 check_load(mt, i, xa_mk_value(352));
1100 check_load(mt, 365, xa_mk_value(365));
1101 mt_set_non_kernel(0);
1102 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1103 mtree_destroy(mt);
1104
e15e06a8 1105
120b1162
LH
1106 mt_set_non_kernel(50);
1107 check_seq(mt, 400, false);
1108 check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1109 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1110 mt_set_non_kernel(0);
1111 mt_validate(mt);
1112 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8 1113 mtree_destroy(mt);
120b1162
LH
1114 /*
1115 * Interesting cases:
1116 * 1. Overwrite the end of a node and end in the first entry of the next
1117 * node.
1118 * 2. Split a single range
1119 * 3. Overwrite the start of a range
1120 * 4. Overwrite the end of a range
1121 * 5. Overwrite the entire range
1122 * 6. Overwrite a range that causes multiple parent nodes to be
1123 * combined
1124 * 7. Overwrite a range that causes multiple parent nodes and part of
1125 * root to be combined
1126 * 8. Overwrite the whole tree
1127 * 9. Try to overwrite the zero entry of an alloc tree.
1128 * 10. Write a range larger than a nodes current pivot
1129 */
e15e06a8 1130
120b1162
LH
1131 mt_set_non_kernel(50);
1132 for (i = 0; i <= 500; i++) {
1133 val = i*5;
1134 val2 = (i+1)*5;
1135 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1136 }
1137 check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1138 check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1139 check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1140 check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1141 check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
e15e06a8 1142 mtree_destroy(mt);
120b1162 1143 mt_set_non_kernel(0);
e15e06a8 1144
120b1162
LH
1145 mt_set_non_kernel(50);
1146 for (i = 0; i <= 500; i++) {
1147 val = i*5;
1148 val2 = (i+1)*5;
1149 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1150 }
1151 check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1152 check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1153 check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1154 check_store_range(mt, 2460, 2470, NULL, 0);
1155 check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1156 check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
e15e06a8 1157 mt_set_non_kernel(0);
120b1162 1158 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1159 mtree_destroy(mt);
1160
d6e8d0dc
PZ
1161 /* Check in-place modifications */
1162 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1163 /* Append to the start of last range */
1164 mt_set_non_kernel(50);
1165 for (i = 0; i <= 500; i++) {
1166 val = i * 5 + 1;
1167 val2 = val + 4;
1168 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1169 }
1170
1171 /* Append to the last range without touching any boundaries */
1172 for (i = 0; i < 10; i++) {
1173 val = val2 + 5;
1174 val2 = val + 4;
1175 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1176 }
1177
1178 /* Append to the end of last range */
1179 val = val2;
1180 for (i = 0; i < 10; i++) {
1181 val += 5;
1182 MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1183 xa_mk_value(val)) != 0);
1184 }
1185
1186 /* Overwriting the range and over a part of the next range */
1187 for (i = 10; i < 30; i += 2) {
1188 val = i * 5 + 1;
1189 val2 = val + 5;
1190 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1191 }
1192
1193 /* Overwriting a part of the range and over the next range */
1194 for (i = 50; i < 70; i += 2) {
1195 val2 = i * 5;
1196 val = val2 - 5;
1197 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1198 }
1199
1200 /*
1201 * Expand the range, only partially overwriting the previous and
1202 * next ranges
1203 */
1204 for (i = 100; i < 130; i += 3) {
1205 val = i * 5 - 5;
1206 val2 = i * 5 + 1;
1207 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1208 }
1209
1210 /*
1211 * Expand the range, only partially overwriting the previous and
1212 * next ranges, in RCU mode
1213 */
1214 mt_set_in_rcu(mt);
1215 for (i = 150; i < 180; i += 3) {
1216 val = i * 5 - 5;
1217 val2 = i * 5 + 1;
1218 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1219 }
1220
1221 MT_BUG_ON(mt, !mt_height(mt));
1222 mt_validate(mt);
1223 mt_set_non_kernel(0);
1224 mtree_destroy(mt);
1225
120b1162 1226 /* Test rebalance gaps */
e15e06a8 1227 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1228 mt_set_non_kernel(50);
1229 for (i = 0; i <= 50; i++) {
1230 val = i*10;
1231 val2 = (i+1)*10;
1232 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1233 }
1234 check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1235 check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1236 check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1237 check_store_range(mt, 240, 249, NULL, 0);
1238 mtree_erase(mt, 200);
1239 mtree_erase(mt, 210);
1240 mtree_erase(mt, 220);
1241 mtree_erase(mt, 230);
e15e06a8 1242 mt_set_non_kernel(0);
120b1162 1243 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1244 mtree_destroy(mt);
1245
e15e06a8 1246 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1247 for (i = 0; i <= 500; i++) {
1248 val = i*10;
1249 val2 = (i+1)*10;
1250 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1251 }
1252 check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1253 mt_validate(mt);
1254 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1255 mtree_destroy(mt);
1256
1257 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1258 for (i = 0; i <= 500; i++) {
1259 val = i*10;
1260 val2 = (i+1)*10;
1261 check_store_range(mt, val, val2, xa_mk_value(val), 0);
e15e06a8 1262 }
120b1162
LH
1263 check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1264 check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1265 check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1266 check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1267 check_store_range(mt, 4842, 4849, NULL, 0);
1268 mt_validate(mt);
1269 MT_BUG_ON(mt, !mt_height(mt));
e15e06a8
LH
1270 mtree_destroy(mt);
1271
1272 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1273 for (i = 0; i <= 1300; i++) {
1274 val = i*10;
1275 val2 = (i+1)*10;
1276 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1277 MT_BUG_ON(mt, mt_height(mt) >= 4);
1278 }
1279 /* Cause a 3 child split all the way up the tree. */
1280 for (i = 5; i < 215; i += 10)
1281 check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1282 for (i = 5; i < 65; i += 10)
1283 check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
e15e06a8 1284
120b1162
LH
1285 MT_BUG_ON(mt, mt_height(mt) >= 4);
1286 for (i = 5; i < 45; i += 10)
1287 check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1288 if (!MAPLE_32BIT)
1289 MT_BUG_ON(mt, mt_height(mt) < 4);
e15e06a8
LH
1290 mtree_destroy(mt);
1291
120b1162 1292
e15e06a8 1293 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
120b1162
LH
1294 for (i = 0; i <= 1200; i++) {
1295 val = i*10;
1296 val2 = (i+1)*10;
1297 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1298 MT_BUG_ON(mt, mt_height(mt) >= 4);
e15e06a8 1299 }
120b1162
LH
1300 /* Fill parents and leaves before split. */
1301 for (i = 5; i < 455; i += 10)
1302 check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
e15e06a8 1303
120b1162
LH
1304 for (i = 1; i < 16; i++)
1305 check_store_range(mt, 8185 + i, 8185 + i + 1,
1306 xa_mk_value(8185+i), 0);
1307 MT_BUG_ON(mt, mt_height(mt) >= 4);
1308 /* triple split across multiple levels. */
1309 check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1310 if (!MAPLE_32BIT)
1311 MT_BUG_ON(mt, mt_height(mt) != 4);
e15e06a8
LH
1312}
1313
eaf9790d 1314static noinline void __init check_next_entry(struct maple_tree *mt)
e15e06a8 1315{
120b1162
LH
1316 void *entry = NULL;
1317 unsigned long limit = 30, i = 0;
1318 MA_STATE(mas, mt, i, i);
e15e06a8 1319
120b1162 1320 MT_BUG_ON(mt, !mtree_empty(mt));
e15e06a8 1321
120b1162
LH
1322 check_seq(mt, limit, false);
1323 rcu_read_lock();
1324
1325 /* Check the first one and get ma_state in the correct state. */
1326 MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1327 for ( ; i <= limit + 1; i++) {
1328 entry = mas_next(&mas, limit);
1329 if (i > limit)
1330 MT_BUG_ON(mt, entry != NULL);
1331 else
1332 MT_BUG_ON(mt, xa_mk_value(i) != entry);
e15e06a8 1333 }
120b1162
LH
1334 rcu_read_unlock();
1335 mtree_destroy(mt);
e15e06a8 1336}
e15e06a8 1337
eaf9790d 1338static noinline void __init check_prev_entry(struct maple_tree *mt)
e15e06a8 1339{
120b1162
LH
1340 unsigned long index = 16;
1341 void *value;
1342 int i;
e15e06a8 1343
120b1162 1344 MA_STATE(mas, mt, index, index);
e15e06a8 1345
120b1162
LH
1346 MT_BUG_ON(mt, !mtree_empty(mt));
1347 check_seq(mt, 30, false);
e15e06a8 1348
120b1162
LH
1349 rcu_read_lock();
1350 value = mas_find(&mas, ULONG_MAX);
1351 MT_BUG_ON(mt, value != xa_mk_value(index));
1352 value = mas_prev(&mas, 0);
1353 MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1354 rcu_read_unlock();
1355 mtree_destroy(mt);
e15e06a8 1356
120b1162
LH
1357 /* Check limits on prev */
1358 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1359 mas_lock(&mas);
1360 for (i = 0; i <= index; i++) {
1361 mas_set_range(&mas, i*10, i*10+5);
1362 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1363 }
e15e06a8 1364
120b1162
LH
1365 mas_set(&mas, 20);
1366 value = mas_walk(&mas);
1367 MT_BUG_ON(mt, value != xa_mk_value(2));
e15e06a8 1368
120b1162
LH
1369 value = mas_prev(&mas, 19);
1370 MT_BUG_ON(mt, value != NULL);
e15e06a8 1371
120b1162
LH
1372 mas_set(&mas, 80);
1373 value = mas_walk(&mas);
1374 MT_BUG_ON(mt, value != xa_mk_value(8));
e15e06a8 1375
120b1162
LH
1376 value = mas_prev(&mas, 76);
1377 MT_BUG_ON(mt, value != NULL);
e15e06a8 1378
120b1162 1379 mas_unlock(&mas);
e15e06a8 1380}
e15e06a8 1381
eaf9790d 1382static noinline void __init check_root_expand(struct maple_tree *mt)
e15e06a8 1383{
120b1162
LH
1384 MA_STATE(mas, mt, 0, 0);
1385 void *ptr;
e15e06a8 1386
e15e06a8 1387
120b1162
LH
1388 mas_lock(&mas);
1389 mas_set(&mas, 3);
1390 ptr = mas_walk(&mas);
eb2e817f 1391 MT_BUG_ON(mt, mas.index != 0);
120b1162
LH
1392 MT_BUG_ON(mt, ptr != NULL);
1393 MT_BUG_ON(mt, mas.index != 0);
1394 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 1395
120b1162
LH
1396 ptr = &check_prev_entry;
1397 mas_set(&mas, 1);
1398 mas_store_gfp(&mas, ptr, GFP_KERNEL);
e15e06a8 1399
120b1162
LH
1400 mas_set(&mas, 0);
1401 ptr = mas_walk(&mas);
1402 MT_BUG_ON(mt, ptr != NULL);
e15e06a8 1403
120b1162
LH
1404 mas_set(&mas, 1);
1405 ptr = mas_walk(&mas);
1406 MT_BUG_ON(mt, ptr != &check_prev_entry);
e15e06a8 1407
120b1162
LH
1408 mas_set(&mas, 2);
1409 ptr = mas_walk(&mas);
1410 MT_BUG_ON(mt, ptr != NULL);
1411 mas_unlock(&mas);
1412 mtree_destroy(mt);
e15e06a8 1413
e15e06a8 1414
120b1162
LH
1415 mt_init_flags(mt, 0);
1416 mas_lock(&mas);
e15e06a8 1417
120b1162
LH
1418 mas_set(&mas, 0);
1419 ptr = &check_prev_entry;
1420 mas_store_gfp(&mas, ptr, GFP_KERNEL);
e15e06a8 1421
120b1162
LH
1422 mas_set(&mas, 5);
1423 ptr = mas_walk(&mas);
1424 MT_BUG_ON(mt, ptr != NULL);
1425 MT_BUG_ON(mt, mas.index != 1);
1426 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 1427
120b1162
LH
1428 mas_set_range(&mas, 0, 100);
1429 ptr = mas_walk(&mas);
1430 MT_BUG_ON(mt, ptr != &check_prev_entry);
1431 MT_BUG_ON(mt, mas.last != 0);
1432 mas_unlock(&mas);
1433 mtree_destroy(mt);
e15e06a8 1434
120b1162
LH
1435 mt_init_flags(mt, 0);
1436 mas_lock(&mas);
1437
1438 mas_set(&mas, 0);
1439 ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1440 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1441 ptr = mas_next(&mas, ULONG_MAX);
1442 MT_BUG_ON(mt, ptr != NULL);
1443 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1444
1445 mas_set(&mas, 1);
1446 ptr = mas_prev(&mas, 0);
1447 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1448 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
e15e06a8 1449
120b1162 1450 mas_unlock(&mas);
e15e06a8 1451
120b1162 1452 mtree_destroy(mt);
e15e06a8 1453
120b1162
LH
1454 mt_init_flags(mt, 0);
1455 mas_lock(&mas);
1456 mas_set(&mas, 0);
1457 ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1458 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1459 ptr = mas_next(&mas, ULONG_MAX);
1460 MT_BUG_ON(mt, ptr != NULL);
eb2e817f 1461 MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
e15e06a8 1462
120b1162
LH
1463 mas_set(&mas, 1);
1464 ptr = mas_prev(&mas, 0);
1465 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1466 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
e15e06a8 1467
e15e06a8 1468
120b1162 1469 mas_unlock(&mas);
e15e06a8 1470}
e15e06a8 1471
eaf9790d 1472static noinline void __init check_gap_combining(struct maple_tree *mt)
e15e06a8 1473{
120b1162
LH
1474 struct maple_enode *mn1, *mn2;
1475 void *entry;
1476 unsigned long singletons = 100;
eaf9790d
LH
1477 static const unsigned long *seq100;
1478 static const unsigned long seq100_64[] = {
120b1162
LH
1479 /* 0-5 */
1480 74, 75, 76,
1481 50, 100, 2,
e15e06a8 1482
120b1162
LH
1483 /* 6-12 */
1484 44, 45, 46, 43,
1485 20, 50, 3,
e15e06a8 1486
120b1162
LH
1487 /* 13-20*/
1488 80, 81, 82,
1489 76, 2, 79, 85, 4,
1490 };
e15e06a8 1491
eaf9790d 1492 static const unsigned long seq100_32[] = {
120b1162
LH
1493 /* 0-5 */
1494 61, 62, 63,
1495 50, 100, 2,
e15e06a8 1496
120b1162
LH
1497 /* 6-12 */
1498 31, 32, 33, 30,
1499 20, 50, 3,
e15e06a8 1500
120b1162
LH
1501 /* 13-20*/
1502 80, 81, 82,
1503 76, 2, 79, 85, 4,
1504 };
e15e06a8 1505
eaf9790d 1506 static const unsigned long seq2000[] = {
120b1162
LH
1507 1152, 1151,
1508 1100, 1200, 2,
1509 };
eaf9790d 1510 static const unsigned long seq400[] = {
120b1162
LH
1511 286, 318,
1512 256, 260, 266, 270, 275, 280, 290, 398,
1513 286, 310,
1514 };
e15e06a8 1515
120b1162 1516 unsigned long index;
e15e06a8 1517
120b1162 1518 MA_STATE(mas, mt, 0, 0);
e15e06a8 1519
120b1162
LH
1520 if (MAPLE_32BIT)
1521 seq100 = seq100_32;
1522 else
1523 seq100 = seq100_64;
e15e06a8 1524
120b1162
LH
1525 index = seq100[0];
1526 mas_set(&mas, index);
1527 MT_BUG_ON(mt, !mtree_empty(mt));
1528 check_seq(mt, singletons, false); /* create 100 singletons. */
e15e06a8 1529
120b1162
LH
1530 mt_set_non_kernel(1);
1531 mtree_test_erase(mt, seq100[2]);
1532 check_load(mt, seq100[2], NULL);
1533 mtree_test_erase(mt, seq100[1]);
1534 check_load(mt, seq100[1], NULL);
e15e06a8 1535
120b1162
LH
1536 rcu_read_lock();
1537 entry = mas_find(&mas, ULONG_MAX);
1538 MT_BUG_ON(mt, entry != xa_mk_value(index));
1539 mn1 = mas.node;
1540 mas_next(&mas, ULONG_MAX);
1541 entry = mas_next(&mas, ULONG_MAX);
1542 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1543 mn2 = mas.node;
1544 MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
e15e06a8 1545
120b1162
LH
1546 /*
1547 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1548 * seq100[4]. Search for the gap.
1549 */
1550 mt_set_non_kernel(1);
e15e06a8 1551 mas_reset(&mas);
120b1162
LH
1552 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1553 seq100[5]));
1554 MT_BUG_ON(mt, mas.index != index + 1);
1555 rcu_read_unlock();
e15e06a8 1556
120b1162
LH
1557 mtree_test_erase(mt, seq100[6]);
1558 check_load(mt, seq100[6], NULL);
1559 mtree_test_erase(mt, seq100[7]);
1560 check_load(mt, seq100[7], NULL);
1561 mtree_test_erase(mt, seq100[8]);
1562 index = seq100[9];
e15e06a8 1563
120b1162
LH
1564 rcu_read_lock();
1565 mas.index = index;
1566 mas.last = index;
e15e06a8 1567 mas_reset(&mas);
120b1162
LH
1568 entry = mas_find(&mas, ULONG_MAX);
1569 MT_BUG_ON(mt, entry != xa_mk_value(index));
1570 mn1 = mas.node;
1571 entry = mas_next(&mas, ULONG_MAX);
1572 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1573 mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1574 mn2 = mas.node;
1575 MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
e15e06a8 1576
120b1162
LH
1577 /*
1578 * At this point, there is a gap of 3 at seq100[6]. Find it by
1579 * searching 20 - 50 for size 3.
1580 */
e15e06a8 1581 mas_reset(&mas);
120b1162
LH
1582 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1583 seq100[12]));
1584 MT_BUG_ON(mt, mas.index != seq100[6]);
1585 rcu_read_unlock();
e15e06a8 1586
120b1162
LH
1587 mt_set_non_kernel(1);
1588 mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1589 check_load(mt, seq100[13], NULL);
1590 check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1591 mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1592 check_load(mt, seq100[13], NULL);
1593 check_load(mt, seq100[14], NULL);
e15e06a8 1594
120b1162
LH
1595 mas_reset(&mas);
1596 rcu_read_lock();
1597 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1598 seq100[17]));
1599 MT_BUG_ON(mt, mas.index != seq100[13]);
1600 mt_validate(mt);
1601 rcu_read_unlock();
e15e06a8 1602
120b1162
LH
1603 /*
1604 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1605 * gap.
1606 */
1607 mt_set_non_kernel(2);
1608 mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1609 mtree_test_erase(mt, seq100[15]);
e15e06a8 1610 mas_reset(&mas);
120b1162
LH
1611 rcu_read_lock();
1612 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1613 seq100[20]));
1614 rcu_read_unlock();
1615 MT_BUG_ON(mt, mas.index != seq100[18]);
1616 mt_validate(mt);
1617 mtree_destroy(mt);
e15e06a8 1618
120b1162
LH
1619 /* seq 2000 tests are for multi-level tree gaps */
1620 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1621 check_seq(mt, 2000, false);
1622 mt_set_non_kernel(1);
1623 mtree_test_erase(mt, seq2000[0]);
1624 mtree_test_erase(mt, seq2000[1]);
e15e06a8 1625
120b1162
LH
1626 mt_set_non_kernel(2);
1627 mas_reset(&mas);
1628 rcu_read_lock();
1629 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1630 seq2000[4]));
1631 MT_BUG_ON(mt, mas.index != seq2000[1]);
1632 rcu_read_unlock();
1633 mt_validate(mt);
e15e06a8
LH
1634 mtree_destroy(mt);
1635
120b1162
LH
1636 /* seq 400 tests rebalancing over two levels. */
1637 mt_set_non_kernel(99);
1638 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1639 check_seq(mt, 400, false);
1640 mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1641 mt_set_non_kernel(0);
1642 mtree_destroy(mt);
e15e06a8 1643
120b1162
LH
1644 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1645 check_seq(mt, 400, false);
1646 mt_set_non_kernel(50);
1647 mtree_test_store_range(mt, seq400[2], seq400[9],
1648 xa_mk_value(seq400[2]));
1649 mtree_test_store_range(mt, seq400[3], seq400[9],
1650 xa_mk_value(seq400[3]));
1651 mtree_test_store_range(mt, seq400[4], seq400[9],
1652 xa_mk_value(seq400[4]));
1653 mtree_test_store_range(mt, seq400[5], seq400[9],
1654 xa_mk_value(seq400[5]));
1655 mtree_test_store_range(mt, seq400[0], seq400[9],
1656 xa_mk_value(seq400[0]));
1657 mtree_test_store_range(mt, seq400[6], seq400[9],
1658 xa_mk_value(seq400[6]));
1659 mtree_test_store_range(mt, seq400[7], seq400[9],
1660 xa_mk_value(seq400[7]));
1661 mtree_test_store_range(mt, seq400[8], seq400[9],
1662 xa_mk_value(seq400[8]));
1663 mtree_test_store_range(mt, seq400[10], seq400[11],
1664 xa_mk_value(seq400[10]));
1665 mt_validate(mt);
1666 mt_set_non_kernel(0);
1667 mtree_destroy(mt);
1668}
eaf9790d 1669static noinline void __init check_node_overwrite(struct maple_tree *mt)
e15e06a8 1670{
120b1162 1671 int i, max = 4000;
e15e06a8 1672
120b1162
LH
1673 for (i = 0; i < max; i++)
1674 mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
e15e06a8 1675
120b1162 1676 mtree_test_store_range(mt, 319951, 367950, NULL);
89f499f3 1677 /*mt_dump(mt, mt_dump_dec); */
120b1162 1678 mt_validate(mt);
e15e06a8
LH
1679}
1680
120b1162 1681#if defined(BENCH_SLOT_STORE)
eaf9790d 1682static noinline void __init bench_slot_store(struct maple_tree *mt)
e15e06a8 1683{
120b1162 1684 int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
e15e06a8 1685
120b1162
LH
1686 for (i = 0; i < max; i += 10)
1687 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1688
120b1162
LH
1689 for (i = 0; i < count; i++) {
1690 mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1691 mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1692 GFP_KERNEL);
e15e06a8 1693 }
e15e06a8 1694}
120b1162 1695#endif
e15e06a8 1696
120b1162 1697#if defined(BENCH_NODE_STORE)
eaf9790d 1698static noinline void __init bench_node_store(struct maple_tree *mt)
e15e06a8 1699{
120b1162 1700 int i, overwrite = 76, max = 240, count = 20000000;
e15e06a8 1701
120b1162
LH
1702 for (i = 0; i < max; i += 10)
1703 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1704
120b1162
LH
1705 for (i = 0; i < count; i++) {
1706 mtree_store_range(mt, overwrite, overwrite + 15,
1707 xa_mk_value(overwrite), GFP_KERNEL);
e15e06a8 1708
120b1162
LH
1709 overwrite += 5;
1710 if (overwrite >= 135)
1711 overwrite = 76;
e15e06a8
LH
1712 }
1713}
120b1162 1714#endif
e15e06a8 1715
120b1162 1716#if defined(BENCH_AWALK)
eaf9790d 1717static noinline void __init bench_awalk(struct maple_tree *mt)
e15e06a8 1718{
120b1162
LH
1719 int i, max = 2500, count = 50000000;
1720 MA_STATE(mas, mt, 1470, 1470);
e15e06a8 1721
120b1162
LH
1722 for (i = 0; i < max; i += 10)
1723 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1724
120b1162 1725 mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
e15e06a8 1726
120b1162
LH
1727 for (i = 0; i < count; i++) {
1728 mas_empty_area_rev(&mas, 0, 2000, 10);
1729 mas_reset(&mas);
e15e06a8 1730 }
120b1162
LH
1731}
1732#endif
1733#if defined(BENCH_WALK)
eaf9790d 1734static noinline void __init bench_walk(struct maple_tree *mt)
120b1162
LH
1735{
1736 int i, max = 2500, count = 550000000;
1737 MA_STATE(mas, mt, 1470, 1470);
e15e06a8 1738
120b1162
LH
1739 for (i = 0; i < max; i += 10)
1740 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
e15e06a8 1741
120b1162
LH
1742 for (i = 0; i < count; i++) {
1743 mas_walk(&mas);
1744 mas_reset(&mas);
e15e06a8
LH
1745 }
1746
e15e06a8 1747}
120b1162 1748#endif
e15e06a8 1749
120b1162 1750#if defined(BENCH_MT_FOR_EACH)
eaf9790d 1751static noinline void __init bench_mt_for_each(struct maple_tree *mt)
e15e06a8 1752{
120b1162
LH
1753 int i, count = 1000000;
1754 unsigned long max = 2500, index = 0;
1755 void *entry;
e15e06a8 1756
120b1162
LH
1757 for (i = 0; i < max; i += 5)
1758 mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1759
1760 for (i = 0; i < count; i++) {
1761 unsigned long j = 0;
e15e06a8 1762
120b1162
LH
1763 mt_for_each(mt, entry, index, max) {
1764 MT_BUG_ON(mt, entry != xa_mk_value(j));
1765 j += 5;
e15e06a8 1766 }
120b1162
LH
1767
1768 index = 0;
e15e06a8
LH
1769 }
1770
e15e06a8 1771}
120b1162 1772#endif
e15e06a8 1773
361c678b
LH
1774#if defined(BENCH_MAS_FOR_EACH)
1775static noinline void __init bench_mas_for_each(struct maple_tree *mt)
1776{
1777 int i, count = 1000000;
1778 unsigned long max = 2500;
1779 void *entry;
1780 MA_STATE(mas, mt, 0, 0);
1781
1782 for (i = 0; i < max; i += 5) {
1783 int gap = 4;
1784
1785 if (i % 30 == 0)
1786 gap = 3;
1787 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1788 }
1789
1790 rcu_read_lock();
1791 for (i = 0; i < count; i++) {
1792 unsigned long j = 0;
1793
1794 mas_for_each(&mas, entry, max) {
1795 MT_BUG_ON(mt, entry != xa_mk_value(j));
1796 j += 5;
1797 }
1798 mas_set(&mas, 0);
1799 }
1800 rcu_read_unlock();
1801
1802}
1803#endif
1804
120b1162 1805/* check_forking - simulate the kernel forking sequence with the tree. */
eaf9790d 1806static noinline void __init check_forking(struct maple_tree *mt)
e15e06a8 1807{
e15e06a8 1808
120b1162
LH
1809 struct maple_tree newmt;
1810 int i, nr_entries = 134;
1811 void *val;
1812 MA_STATE(mas, mt, 0, 0);
1813 MA_STATE(newmas, mt, 0, 0);
1814
1815 for (i = 0; i <= nr_entries; i++)
1816 mtree_store_range(mt, i*10, i*10 + 5,
1817 xa_mk_value(i), GFP_KERNEL);
1818
1819 mt_set_non_kernel(99999);
1820 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1821 newmas.tree = &newmt;
1822 mas_reset(&newmas);
1823 mas_reset(&mas);
1824 mas_lock(&newmas);
1825 mas.index = 0;
1826 mas.last = 0;
1827 if (mas_expected_entries(&newmas, nr_entries)) {
1828 pr_err("OOM!");
1829 BUG_ON(1);
1830 }
1831 rcu_read_lock();
1832 mas_for_each(&mas, val, ULONG_MAX) {
1833 newmas.index = mas.index;
1834 newmas.last = mas.last;
1835 mas_store(&newmas, val);
e15e06a8 1836 }
120b1162
LH
1837 rcu_read_unlock();
1838 mas_destroy(&newmas);
1839 mas_unlock(&newmas);
1840 mt_validate(&newmt);
1841 mt_set_non_kernel(0);
1842 mtree_destroy(&newmt);
e15e06a8
LH
1843}
1844
eaf9790d 1845static noinline void __init check_iteration(struct maple_tree *mt)
5159d64b
LH
1846{
1847 int i, nr_entries = 125;
1848 void *val;
1849 MA_STATE(mas, mt, 0, 0);
1850
1851 for (i = 0; i <= nr_entries; i++)
1852 mtree_store_range(mt, i * 10, i * 10 + 9,
1853 xa_mk_value(i), GFP_KERNEL);
1854
1855 mt_set_non_kernel(99999);
1856
1857 i = 0;
1858 mas_lock(&mas);
1859 mas_for_each(&mas, val, 925) {
1860 MT_BUG_ON(mt, mas.index != i * 10);
1861 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1862 /* Overwrite end of entry 92 */
1863 if (i == 92) {
1864 mas.index = 925;
1865 mas.last = 929;
1866 mas_store(&mas, val);
1867 }
1868 i++;
1869 }
1870 /* Ensure mas_find() gets the next value */
1871 val = mas_find(&mas, ULONG_MAX);
1872 MT_BUG_ON(mt, val != xa_mk_value(i));
1873
1874 mas_set(&mas, 0);
1875 i = 0;
1876 mas_for_each(&mas, val, 785) {
1877 MT_BUG_ON(mt, mas.index != i * 10);
1878 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1879 /* Overwrite start of entry 78 */
1880 if (i == 78) {
1881 mas.index = 780;
1882 mas.last = 785;
1883 mas_store(&mas, val);
1884 } else {
1885 i++;
1886 }
1887 }
1888 val = mas_find(&mas, ULONG_MAX);
1889 MT_BUG_ON(mt, val != xa_mk_value(i));
1890
1891 mas_set(&mas, 0);
1892 i = 0;
1893 mas_for_each(&mas, val, 765) {
1894 MT_BUG_ON(mt, mas.index != i * 10);
1895 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1896 /* Overwrite end of entry 76 and advance to the end */
1897 if (i == 76) {
1898 mas.index = 760;
1899 mas.last = 765;
1900 mas_store(&mas, val);
5159d64b
LH
1901 }
1902 i++;
1903 }
1904 /* Make sure the next find returns the one after 765, 766-769 */
1905 val = mas_find(&mas, ULONG_MAX);
1906 MT_BUG_ON(mt, val != xa_mk_value(76));
1907 mas_unlock(&mas);
1908 mas_destroy(&mas);
1909 mt_set_non_kernel(0);
1910}
1911
eaf9790d 1912static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
e15e06a8 1913{
e15e06a8 1914
120b1162
LH
1915 struct maple_tree newmt;
1916 int i, nr_entries = 135;
1917 void *val;
1918 MA_STATE(mas, mt, 0, 0);
1919 MA_STATE(newmas, mt, 0, 0);
e15e06a8 1920
120b1162
LH
1921 for (i = 0; i <= nr_entries; i++)
1922 mtree_store_range(mt, i*10, i*10 + 5,
1923 xa_mk_value(i), GFP_KERNEL);
e15e06a8 1924
120b1162
LH
1925 mt_set_non_kernel(99999);
1926 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1927 newmas.tree = &newmt;
1928 rcu_read_lock();
1929 mas_lock(&newmas);
1930 mas_reset(&newmas);
1931 mas_set(&mas, 0);
1932 mas_for_each(&mas, val, ULONG_MAX) {
1933 newmas.index = mas.index;
1934 newmas.last = mas.last;
1935 mas_store_gfp(&newmas, val, GFP_KERNEL);
e15e06a8 1936 }
120b1162
LH
1937 mas_unlock(&newmas);
1938 rcu_read_unlock();
1939 mt_validate(&newmt);
1940 mt_set_non_kernel(0);
1941 mtree_destroy(&newmt);
e15e06a8
LH
1942}
1943
120b1162 1944#if defined(BENCH_FORK)
eaf9790d 1945static noinline void __init bench_forking(struct maple_tree *mt)
e15e06a8
LH
1946{
1947
120b1162
LH
1948 struct maple_tree newmt;
1949 int i, nr_entries = 134, nr_fork = 80000;
1950 void *val;
1951 MA_STATE(mas, mt, 0, 0);
1952 MA_STATE(newmas, mt, 0, 0);
e15e06a8 1953
120b1162
LH
1954 for (i = 0; i <= nr_entries; i++)
1955 mtree_store_range(mt, i*10, i*10 + 5,
1956 xa_mk_value(i), GFP_KERNEL);
e15e06a8 1957
120b1162
LH
1958 for (i = 0; i < nr_fork; i++) {
1959 mt_set_non_kernel(99999);
1960 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1961 newmas.tree = &newmt;
1962 mas_reset(&newmas);
1963 mas_reset(&mas);
1964 mas.index = 0;
1965 mas.last = 0;
1966 rcu_read_lock();
1967 mas_lock(&newmas);
1968 if (mas_expected_entries(&newmas, nr_entries)) {
1969 printk("OOM!");
1970 BUG_ON(1);
1971 }
1972 mas_for_each(&mas, val, ULONG_MAX) {
1973 newmas.index = mas.index;
1974 newmas.last = mas.last;
1975 mas_store(&newmas, val);
e15e06a8 1976 }
120b1162
LH
1977 mas_destroy(&newmas);
1978 mas_unlock(&newmas);
1979 rcu_read_unlock();
1980 mt_validate(&newmt);
1981 mt_set_non_kernel(0);
1982 mtree_destroy(&newmt);
e15e06a8 1983 }
e15e06a8 1984}
120b1162 1985#endif
e15e06a8 1986
eaf9790d 1987static noinline void __init next_prev_test(struct maple_tree *mt)
e15e06a8 1988{
120b1162
LH
1989 int i, nr_entries;
1990 void *val;
1991 MA_STATE(mas, mt, 0, 0);
1992 struct maple_enode *mn;
eaf9790d
LH
1993 static const unsigned long *level2;
1994 static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
1995 725};
1996 static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
1997 1760, 1765};
7a93c71a 1998 unsigned long last_index;
e15e06a8 1999
120b1162
LH
2000 if (MAPLE_32BIT) {
2001 nr_entries = 500;
2002 level2 = level2_32;
7a93c71a 2003 last_index = 0x138e;
120b1162
LH
2004 } else {
2005 nr_entries = 200;
2006 level2 = level2_64;
7a93c71a 2007 last_index = 0x7d6;
120b1162 2008 }
e15e06a8 2009
120b1162
LH
2010 for (i = 0; i <= nr_entries; i++)
2011 mtree_store_range(mt, i*10, i*10 + 5,
2012 xa_mk_value(i), GFP_KERNEL);
e15e06a8 2013
120b1162
LH
2014 mas_lock(&mas);
2015 for (i = 0; i <= nr_entries / 2; i++) {
2016 mas_next(&mas, 1000);
2017 if (mas_is_none(&mas))
2018 break;
e15e06a8 2019
e15e06a8 2020 }
120b1162
LH
2021 mas_reset(&mas);
2022 mas_set(&mas, 0);
2023 i = 0;
2024 mas_for_each(&mas, val, 1000) {
2025 i++;
e15e06a8
LH
2026 }
2027
120b1162
LH
2028 mas_reset(&mas);
2029 mas_set(&mas, 0);
2030 i = 0;
2031 mas_for_each(&mas, val, 1000) {
2032 mas_pause(&mas);
2033 i++;
2034 }
e15e06a8 2035
120b1162
LH
2036 /*
2037 * 680 - 685 = 0x61a00001930c
2038 * 686 - 689 = NULL;
2039 * 690 - 695 = 0x61a00001930c
2040 * Check simple next/prev
2041 */
2042 mas_set(&mas, 686);
2043 val = mas_walk(&mas);
2044 MT_BUG_ON(mt, val != NULL);
e15e06a8 2045
120b1162
LH
2046 val = mas_next(&mas, 1000);
2047 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2048 MT_BUG_ON(mt, mas.index != 690);
2049 MT_BUG_ON(mt, mas.last != 695);
e15e06a8 2050
120b1162
LH
2051 val = mas_prev(&mas, 0);
2052 MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2053 MT_BUG_ON(mt, mas.index != 680);
2054 MT_BUG_ON(mt, mas.last != 685);
e15e06a8 2055
120b1162
LH
2056 val = mas_next(&mas, 1000);
2057 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2058 MT_BUG_ON(mt, mas.index != 690);
2059 MT_BUG_ON(mt, mas.last != 695);
e15e06a8 2060
120b1162
LH
2061 val = mas_next(&mas, 1000);
2062 MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2063 MT_BUG_ON(mt, mas.index != 700);
2064 MT_BUG_ON(mt, mas.last != 705);
e15e06a8 2065
120b1162
LH
2066 /* Check across node boundaries of the tree */
2067 mas_set(&mas, 70);
2068 val = mas_walk(&mas);
2069 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2070 MT_BUG_ON(mt, mas.index != 70);
2071 MT_BUG_ON(mt, mas.last != 75);
e15e06a8 2072
120b1162
LH
2073 val = mas_next(&mas, 1000);
2074 MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2075 MT_BUG_ON(mt, mas.index != 80);
2076 MT_BUG_ON(mt, mas.last != 85);
e15e06a8 2077
120b1162
LH
2078 val = mas_prev(&mas, 70);
2079 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2080 MT_BUG_ON(mt, mas.index != 70);
2081 MT_BUG_ON(mt, mas.last != 75);
e15e06a8 2082
120b1162
LH
2083 /* Check across two levels of the tree */
2084 mas_reset(&mas);
2085 mas_set(&mas, level2[0]);
2086 val = mas_walk(&mas);
2087 MT_BUG_ON(mt, val != NULL);
2088 val = mas_next(&mas, level2[1]);
2089 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2090 MT_BUG_ON(mt, mas.index != level2[2]);
2091 MT_BUG_ON(mt, mas.last != level2[3]);
2092 mn = mas.node;
e15e06a8 2093
120b1162
LH
2094 val = mas_next(&mas, level2[1]);
2095 MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2096 MT_BUG_ON(mt, mas.index != level2[4]);
2097 MT_BUG_ON(mt, mas.last != level2[5]);
2098 MT_BUG_ON(mt, mn == mas.node);
e15e06a8 2099
120b1162
LH
2100 val = mas_prev(&mas, 0);
2101 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2102 MT_BUG_ON(mt, mas.index != level2[2]);
2103 MT_BUG_ON(mt, mas.last != level2[3]);
e15e06a8 2104
120b1162
LH
2105 /* Check running off the end and back on */
2106 mas_set(&mas, nr_entries * 10);
2107 val = mas_walk(&mas);
2108 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2109 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2110 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
e15e06a8 2111
120b1162
LH
2112 val = mas_next(&mas, ULONG_MAX);
2113 MT_BUG_ON(mt, val != NULL);
7a93c71a 2114 MT_BUG_ON(mt, mas.index != last_index);
120b1162 2115 MT_BUG_ON(mt, mas.last != ULONG_MAX);
e15e06a8 2116
120b1162
LH
2117 val = mas_prev(&mas, 0);
2118 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2119 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2120 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
e15e06a8 2121
120b1162
LH
2122 /* Check running off the start and back on */
2123 mas_reset(&mas);
2124 mas_set(&mas, 10);
2125 val = mas_walk(&mas);
2126 MT_BUG_ON(mt, val != xa_mk_value(1));
2127 MT_BUG_ON(mt, mas.index != 10);
2128 MT_BUG_ON(mt, mas.last != 15);
e15e06a8 2129
120b1162
LH
2130 val = mas_prev(&mas, 0);
2131 MT_BUG_ON(mt, val != xa_mk_value(0));
2132 MT_BUG_ON(mt, mas.index != 0);
2133 MT_BUG_ON(mt, mas.last != 5);
e15e06a8 2134
120b1162
LH
2135 val = mas_prev(&mas, 0);
2136 MT_BUG_ON(mt, val != NULL);
2137 MT_BUG_ON(mt, mas.index != 0);
eb2e817f
LH
2138 MT_BUG_ON(mt, mas.last != 5);
2139 MT_BUG_ON(mt, mas.node != MAS_NONE);
e15e06a8 2140
120b1162
LH
2141 mas.index = 0;
2142 mas.last = 5;
2143 mas_store(&mas, NULL);
2144 mas_reset(&mas);
2145 mas_set(&mas, 10);
2146 mas_walk(&mas);
e15e06a8 2147
120b1162
LH
2148 val = mas_prev(&mas, 0);
2149 MT_BUG_ON(mt, val != NULL);
2150 MT_BUG_ON(mt, mas.index != 0);
eb2e817f 2151 MT_BUG_ON(mt, mas.last != 9);
120b1162 2152 mas_unlock(&mas);
e15e06a8 2153
120b1162 2154 mtree_destroy(mt);
e15e06a8 2155
120b1162
LH
2156 mt_init(mt);
2157 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2158 mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
e15e06a8 2159 rcu_read_lock();
120b1162
LH
2160 mas_set(&mas, 5);
2161 val = mas_prev(&mas, 4);
2162 MT_BUG_ON(mt, val != NULL);
e15e06a8 2163 rcu_read_unlock();
e15e06a8
LH
2164}
2165
e15e06a8 2166
e15e06a8
LH
2167
2168/* Test spanning writes that require balancing right sibling or right cousin */
eaf9790d 2169static noinline void __init check_spanning_relatives(struct maple_tree *mt)
e15e06a8
LH
2170{
2171
2172 unsigned long i, nr_entries = 1000;
2173
2174 for (i = 0; i <= nr_entries; i++)
2175 mtree_store_range(mt, i*10, i*10 + 5,
2176 xa_mk_value(i), GFP_KERNEL);
2177
2178
2179 mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2180}
2181
eaf9790d 2182static noinline void __init check_fuzzer(struct maple_tree *mt)
e15e06a8
LH
2183{
2184 /*
2185 * 1. Causes a spanning rebalance of a single root node.
2186 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2187 * entire right side is consumed.
2188 */
2189 mtree_test_insert(mt, 88, (void *)0xb1);
2190 mtree_test_insert(mt, 84, (void *)0xa9);
2191 mtree_test_insert(mt, 2, (void *)0x5);
2192 mtree_test_insert(mt, 4, (void *)0x9);
2193 mtree_test_insert(mt, 14, (void *)0x1d);
2194 mtree_test_insert(mt, 7, (void *)0xf);
2195 mtree_test_insert(mt, 12, (void *)0x19);
2196 mtree_test_insert(mt, 18, (void *)0x25);
2197 mtree_test_store_range(mt, 8, 18, (void *)0x11);
2198 mtree_destroy(mt);
2199
2200
2201 /*
2202 * 2. Cause a spanning rebalance of two nodes in root.
2203 * Fixed by setting mast->r->max correctly.
2204 */
2205 mt_init_flags(mt, 0);
2206 mtree_test_store(mt, 87, (void *)0xaf);
2207 mtree_test_store(mt, 0, (void *)0x1);
2208 mtree_test_load(mt, 4);
2209 mtree_test_insert(mt, 4, (void *)0x9);
2210 mtree_test_store(mt, 8, (void *)0x11);
2211 mtree_test_store(mt, 44, (void *)0x59);
2212 mtree_test_store(mt, 68, (void *)0x89);
2213 mtree_test_store(mt, 2, (void *)0x5);
2214 mtree_test_insert(mt, 43, (void *)0x57);
2215 mtree_test_insert(mt, 24, (void *)0x31);
2216 mtree_test_insert(mt, 844, (void *)0x699);
2217 mtree_test_store(mt, 84, (void *)0xa9);
2218 mtree_test_store(mt, 4, (void *)0x9);
2219 mtree_test_erase(mt, 4);
2220 mtree_test_load(mt, 5);
2221 mtree_test_erase(mt, 0);
2222 mtree_destroy(mt);
2223
2224 /*
2225 * 3. Cause a node overflow on copy
2226 * Fixed by using the correct check for node size in mas_wr_modify()
2227 * Also discovered issue with metadata setting.
2228 */
2229 mt_init_flags(mt, 0);
120b1162 2230 mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
e15e06a8
LH
2231 mtree_test_store(mt, 4, (void *)0x9);
2232 mtree_test_erase(mt, 5);
2233 mtree_test_erase(mt, 0);
2234 mtree_test_erase(mt, 4);
2235 mtree_test_store(mt, 5, (void *)0xb);
2236 mtree_test_erase(mt, 5);
2237 mtree_test_store(mt, 5, (void *)0xb);
2238 mtree_test_erase(mt, 5);
2239 mtree_test_erase(mt, 4);
2240 mtree_test_store(mt, 4, (void *)0x9);
2241 mtree_test_store(mt, 444, (void *)0x379);
2242 mtree_test_store(mt, 0, (void *)0x1);
2243 mtree_test_load(mt, 0);
2244 mtree_test_store(mt, 5, (void *)0xb);
2245 mtree_test_erase(mt, 0);
2246 mtree_destroy(mt);
2247
2248 /*
2249 * 4. spanning store failure due to writing incorrect pivot value at
2250 * last slot.
2251 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2252 *
2253 */
2254 mt_init_flags(mt, 0);
2255 mtree_test_insert(mt, 261, (void *)0x20b);
2256 mtree_test_store(mt, 516, (void *)0x409);
2257 mtree_test_store(mt, 6, (void *)0xd);
2258 mtree_test_insert(mt, 5, (void *)0xb);
2259 mtree_test_insert(mt, 1256, (void *)0x9d1);
2260 mtree_test_store(mt, 4, (void *)0x9);
2261 mtree_test_erase(mt, 1);
2262 mtree_test_store(mt, 56, (void *)0x71);
2263 mtree_test_insert(mt, 1, (void *)0x3);
2264 mtree_test_store(mt, 24, (void *)0x31);
2265 mtree_test_erase(mt, 1);
2266 mtree_test_insert(mt, 2263, (void *)0x11af);
2267 mtree_test_insert(mt, 446, (void *)0x37d);
2268 mtree_test_store_range(mt, 6, 45, (void *)0xd);
2269 mtree_test_store_range(mt, 3, 446, (void *)0x7);
2270 mtree_destroy(mt);
2271
2272 /*
2273 * 5. mas_wr_extend_null() may overflow slots.
2274 * Fix by checking against wr_mas->node_end.
2275 */
2276 mt_init_flags(mt, 0);
2277 mtree_test_store(mt, 48, (void *)0x61);
2278 mtree_test_store(mt, 3, (void *)0x7);
2279 mtree_test_load(mt, 0);
2280 mtree_test_store(mt, 88, (void *)0xb1);
2281 mtree_test_store(mt, 81, (void *)0xa3);
2282 mtree_test_insert(mt, 0, (void *)0x1);
2283 mtree_test_insert(mt, 8, (void *)0x11);
2284 mtree_test_insert(mt, 4, (void *)0x9);
2285 mtree_test_insert(mt, 2480, (void *)0x1361);
120b1162 2286 mtree_test_insert(mt, ULONG_MAX,
e15e06a8 2287 (void *)0xffffffffffffffff);
120b1162 2288 mtree_test_erase(mt, ULONG_MAX);
e15e06a8
LH
2289 mtree_destroy(mt);
2290
2291 /*
2292 * 6. When reusing a node with an implied pivot and the node is
2293 * shrinking, old data would be left in the implied slot
2294 * Fixed by checking the last pivot for the mas->max and clear
2295 * accordingly. This only affected the left-most node as that node is
2296 * the only one allowed to end in NULL.
2297 */
2298 mt_init_flags(mt, 0);
2299 mtree_test_erase(mt, 3);
2300 mtree_test_insert(mt, 22, (void *)0x2d);
2301 mtree_test_insert(mt, 15, (void *)0x1f);
2302 mtree_test_load(mt, 2);
2303 mtree_test_insert(mt, 1, (void *)0x3);
2304 mtree_test_insert(mt, 1, (void *)0x3);
2305 mtree_test_insert(mt, 5, (void *)0xb);
2306 mtree_test_erase(mt, 1);
2307 mtree_test_insert(mt, 1, (void *)0x3);
2308 mtree_test_insert(mt, 4, (void *)0x9);
2309 mtree_test_insert(mt, 1, (void *)0x3);
2310 mtree_test_erase(mt, 1);
2311 mtree_test_insert(mt, 2, (void *)0x5);
2312 mtree_test_insert(mt, 1, (void *)0x3);
2313 mtree_test_erase(mt, 3);
2314 mtree_test_insert(mt, 22, (void *)0x2d);
2315 mtree_test_insert(mt, 15, (void *)0x1f);
2316 mtree_test_insert(mt, 2, (void *)0x5);
2317 mtree_test_insert(mt, 1, (void *)0x3);
2318 mtree_test_insert(mt, 8, (void *)0x11);
2319 mtree_test_load(mt, 2);
2320 mtree_test_insert(mt, 1, (void *)0x3);
2321 mtree_test_insert(mt, 1, (void *)0x3);
2322 mtree_test_store(mt, 1, (void *)0x3);
2323 mtree_test_insert(mt, 5, (void *)0xb);
2324 mtree_test_erase(mt, 1);
2325 mtree_test_insert(mt, 1, (void *)0x3);
2326 mtree_test_insert(mt, 4, (void *)0x9);
2327 mtree_test_insert(mt, 1, (void *)0x3);
2328 mtree_test_erase(mt, 1);
2329 mtree_test_insert(mt, 2, (void *)0x5);
2330 mtree_test_insert(mt, 1, (void *)0x3);
2331 mtree_test_erase(mt, 3);
2332 mtree_test_insert(mt, 22, (void *)0x2d);
2333 mtree_test_insert(mt, 15, (void *)0x1f);
2334 mtree_test_insert(mt, 2, (void *)0x5);
2335 mtree_test_insert(mt, 1, (void *)0x3);
2336 mtree_test_insert(mt, 8, (void *)0x11);
2337 mtree_test_insert(mt, 12, (void *)0x19);
2338 mtree_test_erase(mt, 1);
2339 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2340 mtree_test_erase(mt, 62);
2341 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2342 mtree_test_insert(mt, 11, (void *)0x17);
2343 mtree_test_insert(mt, 3, (void *)0x7);
2344 mtree_test_insert(mt, 3, (void *)0x7);
2345 mtree_test_store(mt, 62, (void *)0x7d);
2346 mtree_test_erase(mt, 62);
2347 mtree_test_store_range(mt, 1, 15, (void *)0x3);
2348 mtree_test_erase(mt, 1);
2349 mtree_test_insert(mt, 22, (void *)0x2d);
2350 mtree_test_insert(mt, 12, (void *)0x19);
2351 mtree_test_erase(mt, 1);
2352 mtree_test_insert(mt, 3, (void *)0x7);
2353 mtree_test_store(mt, 62, (void *)0x7d);
2354 mtree_test_erase(mt, 62);
2355 mtree_test_insert(mt, 122, (void *)0xf5);
2356 mtree_test_store(mt, 3, (void *)0x7);
2357 mtree_test_insert(mt, 0, (void *)0x1);
2358 mtree_test_store_range(mt, 0, 1, (void *)0x1);
2359 mtree_test_insert(mt, 85, (void *)0xab);
2360 mtree_test_insert(mt, 72, (void *)0x91);
2361 mtree_test_insert(mt, 81, (void *)0xa3);
2362 mtree_test_insert(mt, 726, (void *)0x5ad);
2363 mtree_test_insert(mt, 0, (void *)0x1);
2364 mtree_test_insert(mt, 1, (void *)0x3);
2365 mtree_test_store(mt, 51, (void *)0x67);
2366 mtree_test_insert(mt, 611, (void *)0x4c7);
2367 mtree_test_insert(mt, 485, (void *)0x3cb);
2368 mtree_test_insert(mt, 1, (void *)0x3);
2369 mtree_test_erase(mt, 1);
2370 mtree_test_insert(mt, 0, (void *)0x1);
2371 mtree_test_insert(mt, 1, (void *)0x3);
2372 mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2373 mtree_test_load(mt, 1);
2374 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2375 mtree_test_insert(mt, 1, (void *)0x3);
2376 mtree_test_erase(mt, 1);
2377 mtree_test_load(mt, 53);
2378 mtree_test_load(mt, 1);
2379 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2380 mtree_test_insert(mt, 222, (void *)0x1bd);
2381 mtree_test_insert(mt, 485, (void *)0x3cb);
2382 mtree_test_insert(mt, 1, (void *)0x3);
2383 mtree_test_erase(mt, 1);
2384 mtree_test_load(mt, 0);
2385 mtree_test_insert(mt, 21, (void *)0x2b);
2386 mtree_test_insert(mt, 3, (void *)0x7);
2387 mtree_test_store(mt, 621, (void *)0x4db);
2388 mtree_test_insert(mt, 0, (void *)0x1);
2389 mtree_test_erase(mt, 5);
2390 mtree_test_insert(mt, 1, (void *)0x3);
2391 mtree_test_store(mt, 62, (void *)0x7d);
2392 mtree_test_erase(mt, 62);
2393 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2394 mtree_test_insert(mt, 22, (void *)0x2d);
2395 mtree_test_insert(mt, 12, (void *)0x19);
2396 mtree_test_erase(mt, 1);
2397 mtree_test_insert(mt, 1, (void *)0x3);
2398 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2399 mtree_test_erase(mt, 62);
2400 mtree_test_erase(mt, 1);
2401 mtree_test_load(mt, 1);
2402 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2403 mtree_test_insert(mt, 1, (void *)0x3);
2404 mtree_test_erase(mt, 1);
2405 mtree_test_load(mt, 53);
2406 mtree_test_load(mt, 1);
2407 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2408 mtree_test_insert(mt, 222, (void *)0x1bd);
2409 mtree_test_insert(mt, 485, (void *)0x3cb);
2410 mtree_test_insert(mt, 1, (void *)0x3);
2411 mtree_test_erase(mt, 1);
2412 mtree_test_insert(mt, 1, (void *)0x3);
2413 mtree_test_load(mt, 0);
2414 mtree_test_load(mt, 0);
2415 mtree_destroy(mt);
2416
2417 /*
2418 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2419 * data by overwriting it first - that way metadata is of no concern.
2420 */
2421 mt_init_flags(mt, 0);
2422 mtree_test_load(mt, 1);
2423 mtree_test_insert(mt, 102, (void *)0xcd);
2424 mtree_test_erase(mt, 2);
2425 mtree_test_erase(mt, 0);
2426 mtree_test_load(mt, 0);
2427 mtree_test_insert(mt, 4, (void *)0x9);
2428 mtree_test_insert(mt, 2, (void *)0x5);
2429 mtree_test_insert(mt, 110, (void *)0xdd);
2430 mtree_test_insert(mt, 1, (void *)0x3);
2431 mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2432 mtree_test_erase(mt, 2);
2433 mtree_test_store(mt, 0, (void *)0x1);
2434 mtree_test_store(mt, 112, (void *)0xe1);
2435 mtree_test_insert(mt, 21, (void *)0x2b);
2436 mtree_test_store(mt, 1, (void *)0x3);
2437 mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2438 mtree_test_store(mt, 2, (void *)0x5);
2439 mtree_test_load(mt, 22);
2440 mtree_test_erase(mt, 2);
2441 mtree_test_store(mt, 210, (void *)0x1a5);
2442 mtree_test_store_range(mt, 0, 2, (void *)0x1);
2443 mtree_test_store(mt, 2, (void *)0x5);
2444 mtree_test_erase(mt, 2);
2445 mtree_test_erase(mt, 22);
2446 mtree_test_erase(mt, 1);
2447 mtree_test_erase(mt, 2);
2448 mtree_test_store(mt, 0, (void *)0x1);
2449 mtree_test_load(mt, 112);
2450 mtree_test_insert(mt, 2, (void *)0x5);
2451 mtree_test_erase(mt, 2);
2452 mtree_test_store(mt, 1, (void *)0x3);
2453 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2454 mtree_test_erase(mt, 0);
2455 mtree_test_erase(mt, 2);
2456 mtree_test_store(mt, 2, (void *)0x5);
2457 mtree_test_erase(mt, 0);
2458 mtree_test_erase(mt, 2);
2459 mtree_test_store(mt, 0, (void *)0x1);
2460 mtree_test_store(mt, 0, (void *)0x1);
2461 mtree_test_erase(mt, 2);
2462 mtree_test_store(mt, 2, (void *)0x5);
2463 mtree_test_erase(mt, 2);
2464 mtree_test_insert(mt, 2, (void *)0x5);
2465 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2466 mtree_test_erase(mt, 0);
2467 mtree_test_erase(mt, 2);
2468 mtree_test_store(mt, 0, (void *)0x1);
2469 mtree_test_load(mt, 112);
2470 mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2471 mtree_test_store(mt, 2, (void *)0x5);
2472 mtree_test_load(mt, 110);
2473 mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2474 mtree_test_load(mt, 2);
2475 mtree_test_store(mt, 2, (void *)0x5);
2476 mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2477 mtree_test_erase(mt, 12);
2478 mtree_test_store(mt, 2, (void *)0x5);
2479 mtree_test_load(mt, 22);
2480 mtree_destroy(mt);
2481
2482
2483 /*
2484 * 8. When rebalancing or spanning_rebalance(), the max of the new node
2485 * may be set incorrectly to the final pivot and not the right max.
2486 * Fix by setting the left max to orig right max if the entire node is
2487 * consumed.
2488 */
2489 mt_init_flags(mt, 0);
2490 mtree_test_store(mt, 6, (void *)0xd);
2491 mtree_test_store(mt, 67, (void *)0x87);
2492 mtree_test_insert(mt, 15, (void *)0x1f);
2493 mtree_test_insert(mt, 6716, (void *)0x3479);
2494 mtree_test_store(mt, 61, (void *)0x7b);
2495 mtree_test_insert(mt, 13, (void *)0x1b);
2496 mtree_test_store(mt, 8, (void *)0x11);
2497 mtree_test_insert(mt, 1, (void *)0x3);
2498 mtree_test_load(mt, 0);
2499 mtree_test_erase(mt, 67167);
2500 mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2501 mtree_test_insert(mt, 6, (void *)0xd);
2502 mtree_test_erase(mt, 67);
2503 mtree_test_insert(mt, 1, (void *)0x3);
2504 mtree_test_erase(mt, 667167);
2505 mtree_test_insert(mt, 6, (void *)0xd);
2506 mtree_test_store(mt, 67, (void *)0x87);
2507 mtree_test_insert(mt, 5, (void *)0xb);
2508 mtree_test_erase(mt, 1);
2509 mtree_test_insert(mt, 6, (void *)0xd);
2510 mtree_test_erase(mt, 67);
2511 mtree_test_insert(mt, 15, (void *)0x1f);
2512 mtree_test_insert(mt, 67167, (void *)0x20cbf);
2513 mtree_test_insert(mt, 1, (void *)0x3);
2514 mtree_test_load(mt, 7);
2515 mtree_test_insert(mt, 16, (void *)0x21);
2516 mtree_test_insert(mt, 36, (void *)0x49);
2517 mtree_test_store(mt, 67, (void *)0x87);
2518 mtree_test_store(mt, 6, (void *)0xd);
2519 mtree_test_insert(mt, 367, (void *)0x2df);
2520 mtree_test_insert(mt, 115, (void *)0xe7);
2521 mtree_test_store(mt, 0, (void *)0x1);
2522 mtree_test_store_range(mt, 1, 3, (void *)0x3);
2523 mtree_test_store(mt, 1, (void *)0x3);
2524 mtree_test_erase(mt, 67167);
2525 mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2526 mtree_test_store(mt, 1, (void *)0x3);
2527 mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2528 mtree_test_load(mt, 67);
2529 mtree_test_insert(mt, 1, (void *)0x3);
2530 mtree_test_erase(mt, 67167);
2531 mtree_destroy(mt);
2532
2533 /*
2534 * 9. spanning store to the end of data caused an invalid metadata
2535 * length which resulted in a crash eventually.
2536 * Fix by checking if there is a value in pivot before incrementing the
2537 * metadata end in mab_mas_cp(). To ensure this doesn't happen again,
2538 * abstract the two locations this happens into a function called
2539 * mas_leaf_set_meta().
2540 */
2541 mt_init_flags(mt, 0);
2542 mtree_test_insert(mt, 21, (void *)0x2b);
2543 mtree_test_insert(mt, 12, (void *)0x19);
2544 mtree_test_insert(mt, 6, (void *)0xd);
2545 mtree_test_insert(mt, 8, (void *)0x11);
2546 mtree_test_insert(mt, 2, (void *)0x5);
2547 mtree_test_insert(mt, 91, (void *)0xb7);
2548 mtree_test_insert(mt, 18, (void *)0x25);
2549 mtree_test_insert(mt, 81, (void *)0xa3);
2550 mtree_test_store_range(mt, 0, 128, (void *)0x1);
2551 mtree_test_store(mt, 1, (void *)0x3);
2552 mtree_test_erase(mt, 8);
2553 mtree_test_insert(mt, 11, (void *)0x17);
2554 mtree_test_insert(mt, 8, (void *)0x11);
2555 mtree_test_insert(mt, 21, (void *)0x2b);
2556 mtree_test_insert(mt, 2, (void *)0x5);
120b1162
LH
2557 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2558 mtree_test_erase(mt, ULONG_MAX - 10);
e15e06a8
LH
2559 mtree_test_store_range(mt, 0, 281, (void *)0x1);
2560 mtree_test_erase(mt, 2);
2561 mtree_test_insert(mt, 1211, (void *)0x977);
2562 mtree_test_insert(mt, 111, (void *)0xdf);
2563 mtree_test_insert(mt, 13, (void *)0x1b);
2564 mtree_test_insert(mt, 211, (void *)0x1a7);
2565 mtree_test_insert(mt, 11, (void *)0x17);
2566 mtree_test_insert(mt, 5, (void *)0xb);
2567 mtree_test_insert(mt, 1218, (void *)0x985);
2568 mtree_test_insert(mt, 61, (void *)0x7b);
2569 mtree_test_store(mt, 1, (void *)0x3);
2570 mtree_test_insert(mt, 121, (void *)0xf3);
2571 mtree_test_insert(mt, 8, (void *)0x11);
2572 mtree_test_insert(mt, 21, (void *)0x2b);
2573 mtree_test_insert(mt, 2, (void *)0x5);
120b1162
LH
2574 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2575 mtree_test_erase(mt, ULONG_MAX - 10);
e15e06a8 2576}
120b1162
LH
2577
2578/* duplicate the tree with a specific gap */
eaf9790d 2579static noinline void __init check_dup_gaps(struct maple_tree *mt,
e15e06a8
LH
2580 unsigned long nr_entries, bool zero_start,
2581 unsigned long gap)
2582{
2583 unsigned long i = 0;
2584 struct maple_tree newmt;
2585 int ret;
2586 void *tmp;
2587 MA_STATE(mas, mt, 0, 0);
2588 MA_STATE(newmas, &newmt, 0, 0);
2589
e15e06a8
LH
2590 if (!zero_start)
2591 i = 1;
2592
2593 mt_zero_nr_tallocated();
2594 for (; i <= nr_entries; i++)
2595 mtree_store_range(mt, i*10, (i+1)*10 - gap,
2596 xa_mk_value(i), GFP_KERNEL);
2597
2598 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2599 mt_set_non_kernel(99999);
120b1162 2600 mas_lock(&newmas);
e15e06a8
LH
2601 ret = mas_expected_entries(&newmas, nr_entries);
2602 mt_set_non_kernel(0);
2603 MT_BUG_ON(mt, ret != 0);
2604
120b1162 2605 rcu_read_lock();
e15e06a8
LH
2606 mas_for_each(&mas, tmp, ULONG_MAX) {
2607 newmas.index = mas.index;
2608 newmas.last = mas.last;
2609 mas_store(&newmas, tmp);
2610 }
120b1162 2611 rcu_read_unlock();
e15e06a8 2612 mas_destroy(&newmas);
120b1162
LH
2613 mas_unlock(&newmas);
2614
e15e06a8
LH
2615 mtree_destroy(&newmt);
2616}
2617
120b1162 2618/* Duplicate many sizes of trees. Mainly to test expected entry values */
eaf9790d 2619static noinline void __init check_dup(struct maple_tree *mt)
e15e06a8
LH
2620{
2621 int i;
120b1162 2622 int big_start = 100010;
e15e06a8
LH
2623
2624 /* Check with a value at zero */
2625 for (i = 10; i < 1000; i++) {
2626 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2627 check_dup_gaps(mt, i, true, 5);
2628 mtree_destroy(mt);
120b1162 2629 rcu_barrier();
e15e06a8
LH
2630 }
2631
120b1162
LH
2632 cond_resched();
2633 mt_cache_shrink();
e15e06a8
LH
2634 /* Check with a value at zero, no gap */
2635 for (i = 1000; i < 2000; i++) {
2636 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2637 check_dup_gaps(mt, i, true, 0);
2638 mtree_destroy(mt);
120b1162 2639 rcu_barrier();
e15e06a8
LH
2640 }
2641
120b1162
LH
2642 cond_resched();
2643 mt_cache_shrink();
e15e06a8 2644 /* Check with a value at zero and unreasonably large */
120b1162 2645 for (i = big_start; i < big_start + 10; i++) {
e15e06a8
LH
2646 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2647 check_dup_gaps(mt, i, true, 5);
2648 mtree_destroy(mt);
120b1162 2649 rcu_barrier();
e15e06a8
LH
2650 }
2651
120b1162
LH
2652 cond_resched();
2653 mt_cache_shrink();
e15e06a8
LH
2654 /* Small to medium size not starting at zero*/
2655 for (i = 200; i < 1000; i++) {
2656 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2657 check_dup_gaps(mt, i, false, 5);
2658 mtree_destroy(mt);
120b1162 2659 rcu_barrier();
e15e06a8
LH
2660 }
2661
120b1162
LH
2662 cond_resched();
2663 mt_cache_shrink();
e15e06a8 2664 /* Unreasonably large not starting at zero*/
120b1162 2665 for (i = big_start; i < big_start + 10; i++) {
e15e06a8
LH
2666 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2667 check_dup_gaps(mt, i, false, 5);
2668 mtree_destroy(mt);
120b1162
LH
2669 rcu_barrier();
2670 cond_resched();
2671 mt_cache_shrink();
e15e06a8
LH
2672 }
2673
2674 /* Check non-allocation tree not starting at zero */
2675 for (i = 1500; i < 3000; i++) {
2676 mt_init_flags(mt, 0);
2677 check_dup_gaps(mt, i, false, 5);
2678 mtree_destroy(mt);
120b1162
LH
2679 rcu_barrier();
2680 cond_resched();
2681 if (i % 2 == 0)
2682 mt_cache_shrink();
e15e06a8
LH
2683 }
2684
120b1162 2685 mt_cache_shrink();
e15e06a8
LH
2686 /* Check non-allocation tree starting at zero */
2687 for (i = 200; i < 1000; i++) {
2688 mt_init_flags(mt, 0);
2689 check_dup_gaps(mt, i, true, 5);
2690 mtree_destroy(mt);
120b1162
LH
2691 rcu_barrier();
2692 cond_resched();
e15e06a8
LH
2693 }
2694
120b1162 2695 mt_cache_shrink();
e15e06a8 2696 /* Unreasonably large */
120b1162 2697 for (i = big_start + 5; i < big_start + 10; i++) {
e15e06a8
LH
2698 mt_init_flags(mt, 0);
2699 check_dup_gaps(mt, i, true, 5);
2700 mtree_destroy(mt);
120b1162
LH
2701 rcu_barrier();
2702 mt_cache_shrink();
2703 cond_resched();
e15e06a8 2704 }
e15e06a8
LH
2705}
2706
eaf9790d 2707static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
c5651b31
LH
2708{
2709 int i = 50;
2710 MA_STATE(mas, mt, 0, 0);
2711
2712 mt_set_non_kernel(9999);
2713 mas_lock(&mas);
2714 do {
2715 mas_set_range(&mas, i*10, i*10+9);
2716 mas_store(&mas, check_bnode_min_spanning);
2717 } while (i--);
2718
2719 mas_set_range(&mas, 240, 509);
2720 mas_store(&mas, NULL);
2721 mas_unlock(&mas);
2722 mas_destroy(&mas);
2723 mt_set_non_kernel(0);
2724}
2725
eaf9790d 2726static noinline void __init check_empty_area_window(struct maple_tree *mt)
7327e811
LH
2727{
2728 unsigned long i, nr_entries = 20;
2729 MA_STATE(mas, mt, 0, 0);
2730
2731 for (i = 1; i <= nr_entries; i++)
2732 mtree_store_range(mt, i*10, i*10 + 9,
2733 xa_mk_value(i), GFP_KERNEL);
2734
2735 /* Create another hole besides the one at 0 */
2736 mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2737
2738 /* Check lower bounds that don't fit */
2739 rcu_read_lock();
2740 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2741
2742 mas_reset(&mas);
2743 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2744
2745 /* Check lower bound that does fit */
2746 mas_reset(&mas);
2747 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2748 MT_BUG_ON(mt, mas.index != 5);
2749 MT_BUG_ON(mt, mas.last != 9);
2750 rcu_read_unlock();
2751
2752 /* Check one gap that doesn't fit and one that does */
2753 rcu_read_lock();
2754 mas_reset(&mas);
2755 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2756 MT_BUG_ON(mt, mas.index != 161);
2757 MT_BUG_ON(mt, mas.last != 169);
2758
2759 /* Check one gap that does fit above the min */
2760 mas_reset(&mas);
2761 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2762 MT_BUG_ON(mt, mas.index != 216);
2763 MT_BUG_ON(mt, mas.last != 218);
2764
2765 /* Check size that doesn't fit any gap */
2766 mas_reset(&mas);
2767 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2768
2769 /*
2770 * Check size that doesn't fit the lower end of the window but
2771 * does fit the gap
2772 */
2773 mas_reset(&mas);
2774 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2775
2776 /*
2777 * Check size that doesn't fit the upper end of the window but
2778 * does fit the gap
2779 */
2780 mas_reset(&mas);
2781 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2782
2783 /* Check mas_empty_area forward */
2784 mas_reset(&mas);
2785 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2786 MT_BUG_ON(mt, mas.index != 0);
2787 MT_BUG_ON(mt, mas.last != 8);
2788
2789 mas_reset(&mas);
2790 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2791 MT_BUG_ON(mt, mas.index != 0);
2792 MT_BUG_ON(mt, mas.last != 3);
2793
2794 mas_reset(&mas);
2795 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2796
2797 mas_reset(&mas);
2798 MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2799
2800 mas_reset(&mas);
17e7436b 2801 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
7327e811
LH
2802
2803 mas_reset(&mas);
2804 mas_empty_area(&mas, 100, 165, 3);
2805
2806 mas_reset(&mas);
2807 MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2808 rcu_read_unlock();
2809}
2810
eaf9790d 2811static noinline void __init check_empty_area_fill(struct maple_tree *mt)
4bd6dded
LH
2812{
2813 const unsigned long max = 0x25D78000;
2814 unsigned long size;
2815 int loop, shift;
2816 MA_STATE(mas, mt, 0, 0);
2817
2818 mt_set_non_kernel(99999);
2819 for (shift = 12; shift <= 16; shift++) {
2820 loop = 5000;
2821 size = 1 << shift;
2822 while (loop--) {
2823 mas_set(&mas, 0);
2824 mas_lock(&mas);
2825 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2826 MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2827 mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2828 mas_unlock(&mas);
2829 mas_reset(&mas);
2830 }
2831 }
2832
2833 /* No space left. */
2834 size = 0x1000;
2835 rcu_read_lock();
2836 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2837 rcu_read_unlock();
2838
2839 /* Fill a depth 3 node to the maximum */
2840 for (unsigned long i = 629440511; i <= 629440800; i += 6)
2841 mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2842 /* Make space in the second-last depth 4 node */
2843 mtree_erase(mt, 631668735);
2844 /* Make space in the last depth 4 node */
2845 mtree_erase(mt, 629506047);
2846 mas_reset(&mas);
2847 /* Search from just after the gap in the second-last depth 4 */
2848 rcu_read_lock();
2849 MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2850 rcu_read_unlock();
2851 mt_set_non_kernel(0);
2852}
2853
eb2e817f
LH
2854/*
2855 * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
2856 *
2857 * The table below shows the single entry tree (0-0 pointer) and normal tree
2858 * with nodes.
2859 *
2860 * Function ENTRY Start Result index & last
2861 * ┬ ┬ ┬ ┬ ┬
2862 * │ │ │ │ └─ the final range
2863 * │ │ │ └─ The node value after execution
2864 * │ │ └─ The node value before execution
2865 * │ └─ If the entry exists or does not exists (DNE)
2866 * └─ The function name
2867 *
2868 * Function ENTRY Start Result index & last
2869 * mas_next()
2870 * - after last
2871 * Single entry tree at 0-0
2872 * ------------------------
2873 * DNE MAS_START MAS_NONE 1 - oo
2874 * DNE MAS_PAUSE MAS_NONE 1 - oo
2875 * DNE MAS_ROOT MAS_NONE 1 - oo
2876 * when index = 0
2877 * DNE MAS_NONE MAS_ROOT 0
2878 * when index > 0
2879 * DNE MAS_NONE MAS_NONE 1 - oo
2880 *
2881 * Normal tree
2882 * -----------
2883 * exists MAS_START active range
2884 * DNE MAS_START active set to last range
2885 * exists MAS_PAUSE active range
2886 * DNE MAS_PAUSE active set to last range
2887 * exists MAS_NONE active range
2888 * exists active active range
2889 * DNE active active set to last range
2890 *
2891 * Function ENTRY Start Result index & last
2892 * mas_prev()
2893 * - before index
2894 * Single entry tree at 0-0
2895 * ------------------------
2896 * if index > 0
2897 * exists MAS_START MAS_ROOT 0
2898 * exists MAS_PAUSE MAS_ROOT 0
2899 * exists MAS_NONE MAS_ROOT 0
2900 *
2901 * if index == 0
2902 * DNE MAS_START MAS_NONE 0
2903 * DNE MAS_PAUSE MAS_NONE 0
2904 * DNE MAS_NONE MAS_NONE 0
2905 * DNE MAS_ROOT MAS_NONE 0
2906 *
2907 * Normal tree
2908 * -----------
2909 * exists MAS_START active range
2910 * DNE MAS_START active set to min
2911 * exists MAS_PAUSE active range
2912 * DNE MAS_PAUSE active set to min
2913 * exists MAS_NONE active range
2914 * DNE MAS_NONE MAS_NONE set to min
2915 * any MAS_ROOT MAS_NONE 0
2916 * exists active active range
2917 * DNE active active last range
2918 *
2919 * Function ENTRY Start Result index & last
2920 * mas_find()
2921 * - at index or next
2922 * Single entry tree at 0-0
2923 * ------------------------
2924 * if index > 0
2925 * DNE MAS_START MAS_NONE 0
2926 * DNE MAS_PAUSE MAS_NONE 0
2927 * DNE MAS_ROOT MAS_NONE 0
2928 * DNE MAS_NONE MAS_NONE 0
2929 * if index == 0
2930 * exists MAS_START MAS_ROOT 0
2931 * exists MAS_PAUSE MAS_ROOT 0
2932 * exists MAS_NONE MAS_ROOT 0
2933 *
2934 * Normal tree
2935 * -----------
2936 * exists MAS_START active range
2937 * DNE MAS_START active set to max
2938 * exists MAS_PAUSE active range
2939 * DNE MAS_PAUSE active set to max
2940 * exists MAS_NONE active range
2941 * exists active active range
2942 * DNE active active last range (max < last)
2943 *
2944 * Function ENTRY Start Result index & last
2945 * mas_find_rev()
2946 * - at index or before
2947 * Single entry tree at 0-0
2948 * ------------------------
2949 * if index > 0
2950 * exists MAS_START MAS_ROOT 0
2951 * exists MAS_PAUSE MAS_ROOT 0
2952 * exists MAS_NONE MAS_ROOT 0
2953 * if index == 0
2954 * DNE MAS_START MAS_NONE 0
2955 * DNE MAS_PAUSE MAS_NONE 0
2956 * DNE MAS_NONE MAS_NONE 0
2957 * DNE MAS_ROOT MAS_NONE 0
2958 *
2959 * Normal tree
2960 * -----------
2961 * exists MAS_START active range
2962 * DNE MAS_START active set to min
2963 * exists MAS_PAUSE active range
2964 * DNE MAS_PAUSE active set to min
2965 * exists MAS_NONE active range
2966 * exists active active range
2967 * DNE active active last range (min > index)
2968 *
2969 * Function ENTRY Start Result index & last
2970 * mas_walk()
2971 * - Look up index
2972 * Single entry tree at 0-0
2973 * ------------------------
2974 * if index > 0
2975 * DNE MAS_START MAS_ROOT 1 - oo
2976 * DNE MAS_PAUSE MAS_ROOT 1 - oo
2977 * DNE MAS_NONE MAS_ROOT 1 - oo
2978 * DNE MAS_ROOT MAS_ROOT 1 - oo
2979 * if index == 0
2980 * exists MAS_START MAS_ROOT 0
2981 * exists MAS_PAUSE MAS_ROOT 0
2982 * exists MAS_NONE MAS_ROOT 0
2983 * exists MAS_ROOT MAS_ROOT 0
2984 *
2985 * Normal tree
2986 * -----------
2987 * exists MAS_START active range
2988 * DNE MAS_START active range of NULL
2989 * exists MAS_PAUSE active range
2990 * DNE MAS_PAUSE active range of NULL
2991 * exists MAS_NONE active range
2992 * DNE MAS_NONE active range of NULL
2993 * exists active active range
2994 * DNE active active range of NULL
2995 */
2996
2997#define mas_active(x) (((x).node != MAS_ROOT) && \
2998 ((x).node != MAS_START) && \
2999 ((x).node != MAS_PAUSE) && \
3000 ((x).node != MAS_NONE))
3001static noinline void __init check_state_handling(struct maple_tree *mt)
3002{
3003 MA_STATE(mas, mt, 0, 0);
3004 void *entry, *ptr = (void *) 0x1234500;
3005 void *ptr2 = &ptr;
3006 void *ptr3 = &ptr2;
3007
3008 /* Check MAS_ROOT First */
3009 mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
3010
3011 mas_lock(&mas);
3012 /* prev: Start -> none */
3013 entry = mas_prev(&mas, 0);
3014 MT_BUG_ON(mt, entry != NULL);
3015 MT_BUG_ON(mt, mas.node != MAS_NONE);
3016
3017 /* prev: Start -> root */
3018 mas_set(&mas, 10);
3019 entry = mas_prev(&mas, 0);
3020 MT_BUG_ON(mt, entry != ptr);
3021 MT_BUG_ON(mt, mas.index != 0);
3022 MT_BUG_ON(mt, mas.last != 0);
3023 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3024
3025 /* prev: pause -> root */
3026 mas_set(&mas, 10);
3027 mas_pause(&mas);
3028 entry = mas_prev(&mas, 0);
3029 MT_BUG_ON(mt, entry != ptr);
3030 MT_BUG_ON(mt, mas.index != 0);
3031 MT_BUG_ON(mt, mas.last != 0);
3032 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3033
3034 /* next: start -> none */
3035 mas_set(&mas, 0);
3036 entry = mas_next(&mas, ULONG_MAX);
3037 MT_BUG_ON(mt, mas.index != 1);
3038 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3039 MT_BUG_ON(mt, entry != NULL);
3040 MT_BUG_ON(mt, mas.node != MAS_NONE);
3041
3042 /* next: start -> none */
3043 mas_set(&mas, 10);
3044 entry = mas_next(&mas, ULONG_MAX);
3045 MT_BUG_ON(mt, mas.index != 1);
3046 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3047 MT_BUG_ON(mt, entry != NULL);
3048 MT_BUG_ON(mt, mas.node != MAS_NONE);
3049
3050 /* find: start -> root */
3051 mas_set(&mas, 0);
3052 entry = mas_find(&mas, ULONG_MAX);
3053 MT_BUG_ON(mt, entry != ptr);
3054 MT_BUG_ON(mt, mas.index != 0);
3055 MT_BUG_ON(mt, mas.last != 0);
3056 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3057
3058 /* find: root -> none */
3059 entry = mas_find(&mas, ULONG_MAX);
3060 MT_BUG_ON(mt, entry != NULL);
3061 MT_BUG_ON(mt, mas.index != 1);
3062 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3063 MT_BUG_ON(mt, mas.node != MAS_NONE);
3064
3065 /* find: none -> none */
3066 entry = mas_find(&mas, ULONG_MAX);
3067 MT_BUG_ON(mt, entry != NULL);
3068 MT_BUG_ON(mt, mas.index != 1);
3069 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3070 MT_BUG_ON(mt, mas.node != MAS_NONE);
3071
3072 /* find: start -> none */
3073 mas_set(&mas, 10);
3074 entry = mas_find(&mas, ULONG_MAX);
3075 MT_BUG_ON(mt, entry != NULL);
3076 MT_BUG_ON(mt, mas.index != 1);
3077 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3078 MT_BUG_ON(mt, mas.node != MAS_NONE);
3079
3080 /* find_rev: none -> root */
3081 entry = mas_find_rev(&mas, 0);
3082 MT_BUG_ON(mt, entry != ptr);
3083 MT_BUG_ON(mt, mas.index != 0);
3084 MT_BUG_ON(mt, mas.last != 0);
3085 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3086
3087 /* find_rev: start -> root */
3088 mas_set(&mas, 0);
3089 entry = mas_find_rev(&mas, 0);
3090 MT_BUG_ON(mt, entry != ptr);
3091 MT_BUG_ON(mt, mas.index != 0);
3092 MT_BUG_ON(mt, mas.last != 0);
3093 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3094
3095 /* find_rev: root -> none */
3096 entry = mas_find_rev(&mas, 0);
3097 MT_BUG_ON(mt, entry != NULL);
3098 MT_BUG_ON(mt, mas.index != 0);
3099 MT_BUG_ON(mt, mas.last != 0);
3100 MT_BUG_ON(mt, mas.node != MAS_NONE);
3101
3102 /* find_rev: none -> none */
3103 entry = mas_find_rev(&mas, 0);
3104 MT_BUG_ON(mt, entry != NULL);
3105 MT_BUG_ON(mt, mas.index != 0);
3106 MT_BUG_ON(mt, mas.last != 0);
3107 MT_BUG_ON(mt, mas.node != MAS_NONE);
3108
3109 /* find_rev: start -> root */
3110 mas_set(&mas, 10);
3111 entry = mas_find_rev(&mas, 0);
3112 MT_BUG_ON(mt, entry != ptr);
3113 MT_BUG_ON(mt, mas.index != 0);
3114 MT_BUG_ON(mt, mas.last != 0);
3115 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3116
3117 /* walk: start -> none */
3118 mas_set(&mas, 10);
3119 entry = mas_walk(&mas);
3120 MT_BUG_ON(mt, entry != NULL);
3121 MT_BUG_ON(mt, mas.index != 1);
3122 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3123 MT_BUG_ON(mt, mas.node != MAS_NONE);
3124
3125 /* walk: pause -> none*/
3126 mas_set(&mas, 10);
3127 mas_pause(&mas);
3128 entry = mas_walk(&mas);
3129 MT_BUG_ON(mt, entry != NULL);
3130 MT_BUG_ON(mt, mas.index != 1);
3131 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3132 MT_BUG_ON(mt, mas.node != MAS_NONE);
3133
3134 /* walk: none -> none */
3135 mas.index = mas.last = 10;
3136 entry = mas_walk(&mas);
3137 MT_BUG_ON(mt, entry != NULL);
3138 MT_BUG_ON(mt, mas.index != 1);
3139 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3140 MT_BUG_ON(mt, mas.node != MAS_NONE);
3141
3142 /* walk: none -> none */
3143 entry = mas_walk(&mas);
3144 MT_BUG_ON(mt, entry != NULL);
3145 MT_BUG_ON(mt, mas.index != 1);
3146 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3147 MT_BUG_ON(mt, mas.node != MAS_NONE);
3148
3149 /* walk: start -> root */
3150 mas_set(&mas, 0);
3151 entry = mas_walk(&mas);
3152 MT_BUG_ON(mt, entry != ptr);
3153 MT_BUG_ON(mt, mas.index != 0);
3154 MT_BUG_ON(mt, mas.last != 0);
3155 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3156
3157 /* walk: pause -> root */
3158 mas_set(&mas, 0);
3159 mas_pause(&mas);
3160 entry = mas_walk(&mas);
3161 MT_BUG_ON(mt, entry != ptr);
3162 MT_BUG_ON(mt, mas.index != 0);
3163 MT_BUG_ON(mt, mas.last != 0);
3164 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3165
3166 /* walk: none -> root */
3167 mas.node = MAS_NONE;
3168 entry = mas_walk(&mas);
3169 MT_BUG_ON(mt, entry != ptr);
3170 MT_BUG_ON(mt, mas.index != 0);
3171 MT_BUG_ON(mt, mas.last != 0);
3172 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3173
3174 /* walk: root -> root */
3175 entry = mas_walk(&mas);
3176 MT_BUG_ON(mt, entry != ptr);
3177 MT_BUG_ON(mt, mas.index != 0);
3178 MT_BUG_ON(mt, mas.last != 0);
3179 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3180
3181 /* walk: root -> none */
3182 mas_set(&mas, 10);
3183 entry = mas_walk(&mas);
3184 MT_BUG_ON(mt, entry != NULL);
3185 MT_BUG_ON(mt, mas.index != 1);
3186 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3187 MT_BUG_ON(mt, mas.node != MAS_NONE);
3188
3189 /* walk: none -> root */
3190 mas.index = mas.last = 0;
3191 entry = mas_walk(&mas);
3192 MT_BUG_ON(mt, entry != ptr);
3193 MT_BUG_ON(mt, mas.index != 0);
3194 MT_BUG_ON(mt, mas.last != 0);
3195 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3196
3197 mas_unlock(&mas);
3198
3199 /* Check when there is an actual node */
3200 mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3201 mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3202 mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3203 mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3204
3205 mas_lock(&mas);
3206
3207 /* next: start ->active */
3208 mas_set(&mas, 0);
3209 entry = mas_next(&mas, ULONG_MAX);
3210 MT_BUG_ON(mt, entry != ptr);
3211 MT_BUG_ON(mt, mas.index != 0x1000);
3212 MT_BUG_ON(mt, mas.last != 0x1500);
3213 MT_BUG_ON(mt, !mas_active(mas));
3214
3215 /* next: pause ->active */
3216 mas_set(&mas, 0);
3217 mas_pause(&mas);
3218 entry = mas_next(&mas, ULONG_MAX);
3219 MT_BUG_ON(mt, entry != ptr);
3220 MT_BUG_ON(mt, mas.index != 0x1000);
3221 MT_BUG_ON(mt, mas.last != 0x1500);
3222 MT_BUG_ON(mt, !mas_active(mas));
3223
3224 /* next: none ->active */
3225 mas.index = mas.last = 0;
3226 mas.offset = 0;
3227 mas.node = MAS_NONE;
3228 entry = mas_next(&mas, ULONG_MAX);
3229 MT_BUG_ON(mt, entry != ptr);
3230 MT_BUG_ON(mt, mas.index != 0x1000);
3231 MT_BUG_ON(mt, mas.last != 0x1500);
3232 MT_BUG_ON(mt, !mas_active(mas));
3233
3234 /* next:active ->active */
3235 entry = mas_next(&mas, ULONG_MAX);
3236 MT_BUG_ON(mt, entry != ptr2);
3237 MT_BUG_ON(mt, mas.index != 0x2000);
3238 MT_BUG_ON(mt, mas.last != 0x2500);
3239 MT_BUG_ON(mt, !mas_active(mas));
3240
3241 /* next:active -> active out of range*/
3242 entry = mas_next(&mas, 0x2999);
3243 MT_BUG_ON(mt, entry != NULL);
3244 MT_BUG_ON(mt, mas.index != 0x2501);
3245 MT_BUG_ON(mt, mas.last != 0x2fff);
3246 MT_BUG_ON(mt, !mas_active(mas));
3247
3248 /* Continue after out of range*/
3249 entry = mas_next(&mas, ULONG_MAX);
3250 MT_BUG_ON(mt, entry != ptr3);
3251 MT_BUG_ON(mt, mas.index != 0x3000);
3252 MT_BUG_ON(mt, mas.last != 0x3500);
3253 MT_BUG_ON(mt, !mas_active(mas));
3254
3255 /* next:active -> active out of range*/
3256 entry = mas_next(&mas, ULONG_MAX);
3257 MT_BUG_ON(mt, entry != NULL);
3258 MT_BUG_ON(mt, mas.index != 0x3501);
3259 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3260 MT_BUG_ON(mt, !mas_active(mas));
3261
3262 /* next: none -> active, skip value at location */
3263 mas_set(&mas, 0);
3264 entry = mas_next(&mas, ULONG_MAX);
3265 mas.node = MAS_NONE;
3266 mas.offset = 0;
3267 entry = mas_next(&mas, ULONG_MAX);
3268 MT_BUG_ON(mt, entry != ptr2);
3269 MT_BUG_ON(mt, mas.index != 0x2000);
3270 MT_BUG_ON(mt, mas.last != 0x2500);
3271 MT_BUG_ON(mt, !mas_active(mas));
3272
3273 /* prev:active ->active */
3274 entry = mas_prev(&mas, 0);
3275 MT_BUG_ON(mt, entry != ptr);
3276 MT_BUG_ON(mt, mas.index != 0x1000);
3277 MT_BUG_ON(mt, mas.last != 0x1500);
3278 MT_BUG_ON(mt, !mas_active(mas));
3279
3280 /* prev:active -> active out of range*/
3281 entry = mas_prev(&mas, 0);
3282 MT_BUG_ON(mt, entry != NULL);
3283 MT_BUG_ON(mt, mas.index != 0);
3284 MT_BUG_ON(mt, mas.last != 0x0FFF);
3285 MT_BUG_ON(mt, !mas_active(mas));
3286
3287 /* prev: pause ->active */
3288 mas_set(&mas, 0x3600);
3289 entry = mas_prev(&mas, 0);
3290 MT_BUG_ON(mt, entry != ptr3);
3291 mas_pause(&mas);
3292 entry = mas_prev(&mas, 0);
3293 MT_BUG_ON(mt, entry != ptr2);
3294 MT_BUG_ON(mt, mas.index != 0x2000);
3295 MT_BUG_ON(mt, mas.last != 0x2500);
3296 MT_BUG_ON(mt, !mas_active(mas));
3297
3298 /* prev:active -> active out of range*/
3299 entry = mas_prev(&mas, 0x1600);
3300 MT_BUG_ON(mt, entry != NULL);
3301 MT_BUG_ON(mt, mas.index != 0x1501);
3302 MT_BUG_ON(mt, mas.last != 0x1FFF);
3303 MT_BUG_ON(mt, !mas_active(mas));
3304
3305 /* prev: active ->active, continue*/
3306 entry = mas_prev(&mas, 0);
3307 MT_BUG_ON(mt, entry != ptr);
3308 MT_BUG_ON(mt, mas.index != 0x1000);
3309 MT_BUG_ON(mt, mas.last != 0x1500);
3310 MT_BUG_ON(mt, !mas_active(mas));
3311
3312 /* find: start ->active */
3313 mas_set(&mas, 0);
3314 entry = mas_find(&mas, ULONG_MAX);
3315 MT_BUG_ON(mt, entry != ptr);
3316 MT_BUG_ON(mt, mas.index != 0x1000);
3317 MT_BUG_ON(mt, mas.last != 0x1500);
3318 MT_BUG_ON(mt, !mas_active(mas));
3319
3320 /* find: pause ->active */
3321 mas_set(&mas, 0);
3322 mas_pause(&mas);
3323 entry = mas_find(&mas, ULONG_MAX);
3324 MT_BUG_ON(mt, entry != ptr);
3325 MT_BUG_ON(mt, mas.index != 0x1000);
3326 MT_BUG_ON(mt, mas.last != 0x1500);
3327 MT_BUG_ON(mt, !mas_active(mas));
3328
3329 /* find: start ->active on value */;
3330 mas_set(&mas, 1200);
3331 entry = mas_find(&mas, ULONG_MAX);
3332 MT_BUG_ON(mt, entry != ptr);
3333 MT_BUG_ON(mt, mas.index != 0x1000);
3334 MT_BUG_ON(mt, mas.last != 0x1500);
3335 MT_BUG_ON(mt, !mas_active(mas));
3336
3337 /* find:active ->active */
3338 entry = mas_find(&mas, ULONG_MAX);
3339 MT_BUG_ON(mt, entry != ptr2);
3340 MT_BUG_ON(mt, mas.index != 0x2000);
3341 MT_BUG_ON(mt, mas.last != 0x2500);
3342 MT_BUG_ON(mt, !mas_active(mas));
3343
3344
3345 /* find:active -> active (NULL)*/
3346 entry = mas_find(&mas, 0x2700);
3347 MT_BUG_ON(mt, entry != NULL);
3348 MT_BUG_ON(mt, mas.index != 0x2501);
3349 MT_BUG_ON(mt, mas.last != 0x2FFF);
3350 MT_BUG_ON(mt, !mas_active(mas));
3351
3352 /* find: none ->active */
3353 entry = mas_find(&mas, 0x5000);
3354 MT_BUG_ON(mt, entry != ptr3);
3355 MT_BUG_ON(mt, mas.index != 0x3000);
3356 MT_BUG_ON(mt, mas.last != 0x3500);
3357 MT_BUG_ON(mt, !mas_active(mas));
3358
3359 /* find:active -> active (NULL) end*/
3360 entry = mas_find(&mas, ULONG_MAX);
3361 MT_BUG_ON(mt, entry != NULL);
3362 MT_BUG_ON(mt, mas.index != 0x3501);
3363 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3364 MT_BUG_ON(mt, !mas_active(mas));
3365
3366 /* find_rev: active (END) ->active */
3367 entry = mas_find_rev(&mas, 0);
3368 MT_BUG_ON(mt, entry != ptr3);
3369 MT_BUG_ON(mt, mas.index != 0x3000);
3370 MT_BUG_ON(mt, mas.last != 0x3500);
3371 MT_BUG_ON(mt, !mas_active(mas));
3372
3373 /* find_rev:active ->active */
3374 entry = mas_find_rev(&mas, 0);
3375 MT_BUG_ON(mt, entry != ptr2);
3376 MT_BUG_ON(mt, mas.index != 0x2000);
3377 MT_BUG_ON(mt, mas.last != 0x2500);
3378 MT_BUG_ON(mt, !mas_active(mas));
3379
3380 /* find_rev: pause ->active */
3381 mas_pause(&mas);
3382 entry = mas_find_rev(&mas, 0);
3383 MT_BUG_ON(mt, entry != ptr);
3384 MT_BUG_ON(mt, mas.index != 0x1000);
3385 MT_BUG_ON(mt, mas.last != 0x1500);
3386 MT_BUG_ON(mt, !mas_active(mas));
3387
3388 /* find_rev:active -> active */
3389 entry = mas_find_rev(&mas, 0);
3390 MT_BUG_ON(mt, entry != NULL);
3391 MT_BUG_ON(mt, mas.index != 0);
3392 MT_BUG_ON(mt, mas.last != 0x0FFF);
3393 MT_BUG_ON(mt, !mas_active(mas));
3394
3395 /* find_rev: start ->active */
3396 mas_set(&mas, 0x1200);
3397 entry = mas_find_rev(&mas, 0);
3398 MT_BUG_ON(mt, entry != ptr);
3399 MT_BUG_ON(mt, mas.index != 0x1000);
3400 MT_BUG_ON(mt, mas.last != 0x1500);
3401 MT_BUG_ON(mt, !mas_active(mas));
3402
3403 /* mas_walk start ->active */
3404 mas_set(&mas, 0x1200);
3405 entry = mas_walk(&mas);
3406 MT_BUG_ON(mt, entry != ptr);
3407 MT_BUG_ON(mt, mas.index != 0x1000);
3408 MT_BUG_ON(mt, mas.last != 0x1500);
3409 MT_BUG_ON(mt, !mas_active(mas));
3410
3411 /* mas_walk start ->active */
3412 mas_set(&mas, 0x1600);
3413 entry = mas_walk(&mas);
3414 MT_BUG_ON(mt, entry != NULL);
3415 MT_BUG_ON(mt, mas.index != 0x1501);
3416 MT_BUG_ON(mt, mas.last != 0x1fff);
3417 MT_BUG_ON(mt, !mas_active(mas));
3418
3419 /* mas_walk pause ->active */
3420 mas_set(&mas, 0x1200);
3421 mas_pause(&mas);
3422 entry = mas_walk(&mas);
3423 MT_BUG_ON(mt, entry != ptr);
3424 MT_BUG_ON(mt, mas.index != 0x1000);
3425 MT_BUG_ON(mt, mas.last != 0x1500);
3426 MT_BUG_ON(mt, !mas_active(mas));
3427
3428 /* mas_walk pause -> active */
3429 mas_set(&mas, 0x1600);
3430 mas_pause(&mas);
3431 entry = mas_walk(&mas);
3432 MT_BUG_ON(mt, entry != NULL);
3433 MT_BUG_ON(mt, mas.index != 0x1501);
3434 MT_BUG_ON(mt, mas.last != 0x1fff);
3435 MT_BUG_ON(mt, !mas_active(mas));
3436
3437 /* mas_walk none -> active */
3438 mas_set(&mas, 0x1200);
3439 mas.node = MAS_NONE;
3440 entry = mas_walk(&mas);
3441 MT_BUG_ON(mt, entry != ptr);
3442 MT_BUG_ON(mt, mas.index != 0x1000);
3443 MT_BUG_ON(mt, mas.last != 0x1500);
3444 MT_BUG_ON(mt, !mas_active(mas));
3445
3446 /* mas_walk none -> active */
3447 mas_set(&mas, 0x1600);
3448 mas.node = MAS_NONE;
3449 entry = mas_walk(&mas);
3450 MT_BUG_ON(mt, entry != NULL);
3451 MT_BUG_ON(mt, mas.index != 0x1501);
3452 MT_BUG_ON(mt, mas.last != 0x1fff);
3453 MT_BUG_ON(mt, !mas_active(mas));
3454
3455 /* mas_walk active -> active */
3456 mas.index = 0x1200;
3457 mas.last = 0x1200;
3458 mas.offset = 0;
3459 entry = mas_walk(&mas);
3460 MT_BUG_ON(mt, entry != ptr);
3461 MT_BUG_ON(mt, mas.index != 0x1000);
3462 MT_BUG_ON(mt, mas.last != 0x1500);
3463 MT_BUG_ON(mt, !mas_active(mas));
3464
3465 /* mas_walk active -> active */
3466 mas.index = 0x1600;
3467 mas.last = 0x1600;
3468 entry = mas_walk(&mas);
3469 MT_BUG_ON(mt, entry != NULL);
3470 MT_BUG_ON(mt, mas.index != 0x1501);
3471 MT_BUG_ON(mt, mas.last != 0x1fff);
3472 MT_BUG_ON(mt, !mas_active(mas));
3473
3474 mas_unlock(&mas);
3475}
3476
e15e06a8 3477static DEFINE_MTREE(tree);
eaf9790d 3478static int __init maple_tree_seed(void)
e15e06a8 3479{
eaf9790d
LH
3480 unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3481 1001, 1002, 1003, 1005, 0,
3482 5003, 5002};
e15e06a8
LH
3483 void *ptr = &set;
3484
3485 pr_info("\nTEST STARTING\n\n");
3486
3487 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3488 check_root_expand(&tree);
3489 mtree_destroy(&tree);
3490
3491#if defined(BENCH_SLOT_STORE)
3492#define BENCH
3493 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3494 bench_slot_store(&tree);
3495 mtree_destroy(&tree);
3496 goto skip;
3497#endif
3498#if defined(BENCH_NODE_STORE)
3499#define BENCH
3500 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3501 bench_node_store(&tree);
3502 mtree_destroy(&tree);
3503 goto skip;
3504#endif
3505#if defined(BENCH_AWALK)
3506#define BENCH
3507 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3508 bench_awalk(&tree);
3509 mtree_destroy(&tree);
3510 goto skip;
3511#endif
3512#if defined(BENCH_WALK)
3513#define BENCH
3514 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3515 bench_walk(&tree);
3516 mtree_destroy(&tree);
3517 goto skip;
3518#endif
3519#if defined(BENCH_FORK)
3520#define BENCH
3521 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3522 bench_forking(&tree);
3523 mtree_destroy(&tree);
3524 goto skip;
3525#endif
3526#if defined(BENCH_MT_FOR_EACH)
3527#define BENCH
3528 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3529 bench_mt_for_each(&tree);
3530 mtree_destroy(&tree);
3531 goto skip;
3532#endif
361c678b
LH
3533#if defined(BENCH_MAS_FOR_EACH)
3534#define BENCH
3535 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3536 bench_mas_for_each(&tree);
3537 mtree_destroy(&tree);
3538 goto skip;
3539#endif
e15e06a8 3540
5159d64b
LH
3541 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3542 check_iteration(&tree);
3543 mtree_destroy(&tree);
3544
e15e06a8
LH
3545 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3546 check_forking(&tree);
3547 mtree_destroy(&tree);
3548
3549 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3550 check_mas_store_gfp(&tree);
3551 mtree_destroy(&tree);
3552
3553 /* Test ranges (store and insert) */
3554 mt_init_flags(&tree, 0);
3555 check_ranges(&tree);
3556 mtree_destroy(&tree);
3557
120b1162
LH
3558#if defined(CONFIG_64BIT)
3559 /* These tests have ranges outside of 4GB */
e15e06a8
LH
3560 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3561 check_alloc_range(&tree);
3562 mtree_destroy(&tree);
3563
3564 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3565 check_alloc_rev_range(&tree);
3566 mtree_destroy(&tree);
120b1162 3567#endif
e15e06a8
LH
3568
3569 mt_init_flags(&tree, 0);
3570
3571 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3572
3573 check_insert(&tree, set[9], &tree); /* Insert 0 */
3574 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3575 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3576
3577 check_insert(&tree, set[10], ptr); /* Insert 5003 */
3578 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3579 check_load(&tree, set[11], NULL); /* See if 5002 -> NULL */
3580 check_load(&tree, set[10], ptr); /* See if 5003 -> ptr */
3581
3582 /* Clear out the tree */
3583 mtree_destroy(&tree);
3584
3585 /* Try to insert, insert a dup, and load back what was inserted. */
3586 mt_init_flags(&tree, 0);
3587 check_insert(&tree, set[0], &tree); /* Insert 5015 */
3588 check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3589 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3590
3591 /*
3592 * Second set of tests try to load a value that doesn't exist, inserts
3593 * a second value, then loads the value again
3594 */
3595 check_load(&tree, set[1], NULL); /* See if 5014 -> NULL */
3596 check_insert(&tree, set[1], ptr); /* insert 5014 -> ptr */
3597 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3598 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3599 /*
3600 * Tree currently contains:
3601 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3602 */
3603 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3604 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3605
3606 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3607 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3608 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3609 check_load(&tree, set[7], &tree); /* 1003 = &tree ? */
3610
3611 /* Clear out tree */
3612 mtree_destroy(&tree);
3613
3614 mt_init_flags(&tree, 0);
3615 /* Test inserting into a NULL hole. */
3616 check_insert(&tree, set[5], ptr); /* insert 1001 -> ptr */
3617 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3618 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3619 check_load(&tree, set[5], ptr); /* See if 1001 -> ptr */
3620 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3621 check_load(&tree, set[7], &tree); /* See if 1003 -> &tree */
3622
3623 /* Clear out the tree */
3624 mtree_destroy(&tree);
3625
e15e06a8
LH
3626 mt_init_flags(&tree, 0);
3627 /*
3628 * set[] = {5015, 5014, 5017, 25, 1000,
3629 * 1001, 1002, 1003, 1005, 0,
3630 * 5003, 5002};
3631 */
3632
3633 check_insert(&tree, set[0], ptr); /* 5015 */
3634 check_insert(&tree, set[1], &tree); /* 5014 */
3635 check_insert(&tree, set[2], ptr); /* 5017 */
3636 check_insert(&tree, set[3], &tree); /* 25 */
3637 check_load(&tree, set[0], ptr);
3638 check_load(&tree, set[1], &tree);
3639 check_load(&tree, set[2], ptr);
3640 check_load(&tree, set[3], &tree);
3641 check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3642 check_load(&tree, set[0], ptr);
3643 check_load(&tree, set[1], &tree);
3644 check_load(&tree, set[2], ptr);
3645 check_load(&tree, set[3], &tree); /*25 */
3646 check_load(&tree, set[4], ptr);
3647 check_insert(&tree, set[5], &tree); /* 1001 */
3648 check_load(&tree, set[0], ptr);
3649 check_load(&tree, set[1], &tree);
3650 check_load(&tree, set[2], ptr);
3651 check_load(&tree, set[3], &tree);
3652 check_load(&tree, set[4], ptr);
3653 check_load(&tree, set[5], &tree);
3654 check_insert(&tree, set[6], ptr);
3655 check_load(&tree, set[0], ptr);
3656 check_load(&tree, set[1], &tree);
3657 check_load(&tree, set[2], ptr);
3658 check_load(&tree, set[3], &tree);
3659 check_load(&tree, set[4], ptr);
3660 check_load(&tree, set[5], &tree);
3661 check_load(&tree, set[6], ptr);
3662 check_insert(&tree, set[7], &tree);
3663 check_load(&tree, set[0], ptr);
3664 check_insert(&tree, set[8], ptr);
3665
3666 check_insert(&tree, set[9], &tree);
3667
3668 check_load(&tree, set[0], ptr);
3669 check_load(&tree, set[1], &tree);
3670 check_load(&tree, set[2], ptr);
3671 check_load(&tree, set[3], &tree);
3672 check_load(&tree, set[4], ptr);
3673 check_load(&tree, set[5], &tree);
3674 check_load(&tree, set[6], ptr);
3675 check_load(&tree, set[9], &tree);
3676 mtree_destroy(&tree);
3677
e15e06a8
LH
3678 mt_init_flags(&tree, 0);
3679 check_seq(&tree, 16, false);
3680 mtree_destroy(&tree);
3681
3682 mt_init_flags(&tree, 0);
3683 check_seq(&tree, 1000, true);
3684 mtree_destroy(&tree);
3685
3686 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3687 check_rev_seq(&tree, 1000, true);
3688 mtree_destroy(&tree);
3689
3690 check_lower_bound_split(&tree);
3691 check_upper_bound_split(&tree);
3692 check_mid_split(&tree);
3693
3694 mt_init_flags(&tree, 0);
3695 check_next_entry(&tree);
3696 check_find(&tree);
3697 check_find_2(&tree);
3698 mtree_destroy(&tree);
3699
3700 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3701 check_prev_entry(&tree);
3702 mtree_destroy(&tree);
3703
e15e06a8
LH
3704 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3705 check_gap_combining(&tree);
3706 mtree_destroy(&tree);
3707
3708 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3709 check_node_overwrite(&tree);
3710 mtree_destroy(&tree);
3711
3712 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3713 next_prev_test(&tree);
3714 mtree_destroy(&tree);
3715
e15e06a8
LH
3716 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3717 check_spanning_relatives(&tree);
3718 mtree_destroy(&tree);
3719
3720 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3721 check_rev_find(&tree);
3722 mtree_destroy(&tree);
3723
3724 mt_init_flags(&tree, 0);
3725 check_fuzzer(&tree);
3726 mtree_destroy(&tree);
3727
3728 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3729 check_dup(&tree);
3730 mtree_destroy(&tree);
3731
c5651b31
LH
3732 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3733 check_bnode_min_spanning(&tree);
3734 mtree_destroy(&tree);
3735
7327e811
LH
3736 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3737 check_empty_area_window(&tree);
3738 mtree_destroy(&tree);
3739
4bd6dded
LH
3740 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3741 check_empty_area_fill(&tree);
3742 mtree_destroy(&tree);
3743
3744
eb2e817f
LH
3745 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3746 check_state_handling(&tree);
3747 mtree_destroy(&tree);
3748
e15e06a8
LH
3749#if defined(BENCH)
3750skip:
3751#endif
3752 rcu_barrier();
3753 pr_info("maple_tree: %u of %u tests passed\n",
3754 atomic_read(&maple_tree_tests_passed),
3755 atomic_read(&maple_tree_tests_run));
3756 if (atomic_read(&maple_tree_tests_run) ==
3757 atomic_read(&maple_tree_tests_passed))
3758 return 0;
3759
3760 return -EINVAL;
3761}
3762
eaf9790d 3763static void __exit maple_tree_harvest(void)
e15e06a8
LH
3764{
3765
3766}
3767
3768module_init(maple_tree_seed);
3769module_exit(maple_tree_harvest);
3770MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
3771MODULE_LICENSE("GPL");