Commit | Line | Data |
---|---|---|
457c8996 | 1 | // SPDX-License-Identifier: GPL-2.0-only |
fff3fd8a | 2 | #include <linux/interval_tree.h> |
9826a516 | 3 | #include <linux/interval_tree_generic.h> |
85c5e27c RV |
4 | #include <linux/compiler.h> |
5 | #include <linux/export.h> | |
fff3fd8a | 6 | |
9826a516 ML |
7 | #define START(node) ((node)->start) |
8 | #define LAST(node) ((node)->last) | |
6b2dbba8 | 9 | |
9826a516 ML |
10 | INTERVAL_TREE_DEFINE(struct interval_tree_node, rb, |
11 | unsigned long, __subtree_last, | |
12 | START, LAST,, interval_tree) | |
a88cc108 CW |
13 | |
14 | EXPORT_SYMBOL_GPL(interval_tree_insert); | |
15 | EXPORT_SYMBOL_GPL(interval_tree_remove); | |
16 | EXPORT_SYMBOL_GPL(interval_tree_iter_first); | |
17 | EXPORT_SYMBOL_GPL(interval_tree_iter_next); | |
5fe93786 JG |
18 | |
19 | #ifdef CONFIG_INTERVAL_TREE_SPAN_ITER | |
20 | /* | |
21 | * Roll nodes[1] into nodes[0] by advancing nodes[1] to the end of a contiguous | |
22 | * span of nodes. This makes nodes[0]->last the end of that contiguous used span | |
ceb08ee9 WY |
23 | * of indexes that started at the original nodes[1]->start. |
24 | * | |
25 | * If there is an interior hole, nodes[1] is now the first node starting the | |
26 | * next used span. A hole span is between nodes[0]->last and nodes[1]->start. | |
27 | * | |
28 | * If there is a tailing hole, nodes[1] is now NULL. A hole span is between | |
29 | * nodes[0]->last and last_index. | |
30 | * | |
31 | * If the contiguous used range span to last_index, nodes[1] is set to NULL. | |
5fe93786 JG |
32 | */ |
33 | static void | |
34 | interval_tree_span_iter_next_gap(struct interval_tree_span_iter *state) | |
35 | { | |
36 | struct interval_tree_node *cur = state->nodes[1]; | |
37 | ||
38 | state->nodes[0] = cur; | |
39 | do { | |
40 | if (cur->last > state->nodes[0]->last) | |
41 | state->nodes[0] = cur; | |
42 | cur = interval_tree_iter_next(cur, state->first_index, | |
43 | state->last_index); | |
44 | } while (cur && (state->nodes[0]->last >= cur->start || | |
45 | state->nodes[0]->last + 1 == cur->start)); | |
46 | state->nodes[1] = cur; | |
47 | } | |
48 | ||
49 | void interval_tree_span_iter_first(struct interval_tree_span_iter *iter, | |
50 | struct rb_root_cached *itree, | |
51 | unsigned long first_index, | |
52 | unsigned long last_index) | |
53 | { | |
54 | iter->first_index = first_index; | |
55 | iter->last_index = last_index; | |
56 | iter->nodes[0] = NULL; | |
57 | iter->nodes[1] = | |
58 | interval_tree_iter_first(itree, first_index, last_index); | |
59 | if (!iter->nodes[1]) { | |
60 | /* No nodes intersect the span, whole span is hole */ | |
61 | iter->start_hole = first_index; | |
62 | iter->last_hole = last_index; | |
63 | iter->is_hole = 1; | |
64 | return; | |
65 | } | |
66 | if (iter->nodes[1]->start > first_index) { | |
67 | /* Leading hole on first iteration */ | |
68 | iter->start_hole = first_index; | |
69 | iter->last_hole = iter->nodes[1]->start - 1; | |
70 | iter->is_hole = 1; | |
71 | interval_tree_span_iter_next_gap(iter); | |
72 | return; | |
73 | } | |
74 | ||
75 | /* Starting inside a used */ | |
76 | iter->start_used = first_index; | |
77 | iter->is_hole = 0; | |
78 | interval_tree_span_iter_next_gap(iter); | |
79 | iter->last_used = iter->nodes[0]->last; | |
80 | if (iter->last_used >= last_index) { | |
81 | iter->last_used = last_index; | |
82 | iter->nodes[0] = NULL; | |
83 | iter->nodes[1] = NULL; | |
84 | } | |
85 | } | |
86 | EXPORT_SYMBOL_GPL(interval_tree_span_iter_first); | |
87 | ||
88 | void interval_tree_span_iter_next(struct interval_tree_span_iter *iter) | |
89 | { | |
90 | if (!iter->nodes[0] && !iter->nodes[1]) { | |
91 | iter->is_hole = -1; | |
92 | return; | |
93 | } | |
94 | ||
95 | if (iter->is_hole) { | |
96 | iter->start_used = iter->last_hole + 1; | |
97 | iter->last_used = iter->nodes[0]->last; | |
98 | if (iter->last_used >= iter->last_index) { | |
99 | iter->last_used = iter->last_index; | |
100 | iter->nodes[0] = NULL; | |
101 | iter->nodes[1] = NULL; | |
102 | } | |
103 | iter->is_hole = 0; | |
104 | return; | |
105 | } | |
106 | ||
107 | if (!iter->nodes[1]) { | |
108 | /* Trailing hole */ | |
109 | iter->start_hole = iter->nodes[0]->last + 1; | |
110 | iter->last_hole = iter->last_index; | |
111 | iter->nodes[0] = NULL; | |
112 | iter->is_hole = 1; | |
113 | return; | |
114 | } | |
115 | ||
116 | /* must have both nodes[0] and [1], interior hole */ | |
117 | iter->start_hole = iter->nodes[0]->last + 1; | |
118 | iter->last_hole = iter->nodes[1]->start - 1; | |
119 | iter->is_hole = 1; | |
120 | interval_tree_span_iter_next_gap(iter); | |
121 | } | |
122 | EXPORT_SYMBOL_GPL(interval_tree_span_iter_next); | |
123 | ||
124 | /* | |
125 | * Advance the iterator index to a specific position. The returned used/hole is | |
126 | * updated to start at new_index. This is faster than calling | |
127 | * interval_tree_span_iter_first() as it can avoid full searches in several | |
128 | * cases where the iterator is already set. | |
129 | */ | |
130 | void interval_tree_span_iter_advance(struct interval_tree_span_iter *iter, | |
131 | struct rb_root_cached *itree, | |
132 | unsigned long new_index) | |
133 | { | |
134 | if (iter->is_hole == -1) | |
135 | return; | |
136 | ||
137 | iter->first_index = new_index; | |
138 | if (new_index > iter->last_index) { | |
139 | iter->is_hole = -1; | |
140 | return; | |
141 | } | |
142 | ||
143 | /* Rely on the union aliasing hole/used */ | |
144 | if (iter->start_hole <= new_index && new_index <= iter->last_hole) { | |
145 | iter->start_hole = new_index; | |
146 | return; | |
147 | } | |
148 | if (new_index == iter->last_hole + 1) | |
149 | interval_tree_span_iter_next(iter); | |
150 | else | |
151 | interval_tree_span_iter_first(iter, itree, new_index, | |
152 | iter->last_index); | |
153 | } | |
154 | EXPORT_SYMBOL_GPL(interval_tree_span_iter_advance); | |
155 | #endif |