2 #include <linux/atomic.h>
3 #include <linux/export.h>
4 #include <linux/generic-radix-tree.h>
6 #include <linux/kmemleak.h>
9 * Returns pointer to the specified byte @offset within @radix, or NULL if not
12 void *__genradix_ptr(struct __genradix *radix, size_t offset)
14 return __genradix_ptr_inlined(radix, offset);
16 EXPORT_SYMBOL(__genradix_ptr);
19 * Returns pointer to the specified byte @offset within @radix, allocating it if
20 * necessary - newly allocated slots are always zeroed out:
22 void *__genradix_ptr_alloc(struct __genradix *radix, size_t offset,
23 struct genradix_node **preallocated,
26 struct genradix_root *v = READ_ONCE(radix->root);
27 struct genradix_node *n, *new_node = NULL;
31 swap(new_node, *preallocated);
33 /* Increase tree depth if necessary: */
35 struct genradix_root *r = v, *new_root;
37 n = genradix_root_to_node(r);
38 level = genradix_root_to_depth(r);
40 if (n && ilog2(offset) < genradix_depth_shift(level))
44 new_node = genradix_alloc_node(gfp_mask);
49 new_node->children[0] = n;
50 new_root = ((struct genradix_root *)
51 ((unsigned long) new_node | (n ? level + 1 : 0)));
53 if ((v = cmpxchg_release(&radix->root, r, new_root)) == r) {
57 new_node->children[0] = NULL;
62 struct genradix_node **p =
63 &n->children[offset >> genradix_depth_shift(level)];
64 offset &= genradix_depth_size(level) - 1;
69 new_node = genradix_alloc_node(gfp_mask);
74 if (!(n = cmpxchg_release(p, NULL, new_node)))
80 genradix_free_node(new_node);
82 return &n->data[offset];
84 EXPORT_SYMBOL(__genradix_ptr_alloc);
86 void *__genradix_iter_peek(struct genradix_iter *iter,
87 struct __genradix *radix,
90 struct genradix_root *r;
91 struct genradix_node *n;
94 if (iter->offset == SIZE_MAX)
98 r = READ_ONCE(radix->root);
102 n = genradix_root_to_node(r);
103 level = genradix_root_to_depth(r);
105 if (ilog2(iter->offset) >= genradix_depth_shift(level))
111 i = (iter->offset >> genradix_depth_shift(level)) &
114 while (!n->children[i]) {
115 size_t objs_per_ptr = genradix_depth_size(level);
117 if (iter->offset + objs_per_ptr < iter->offset) {
118 iter->offset = SIZE_MAX;
119 iter->pos = SIZE_MAX;
124 iter->offset = round_down(iter->offset + objs_per_ptr,
126 iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) *
128 if (i == GENRADIX_ARY)
135 return &n->data[iter->offset & (GENRADIX_NODE_SIZE - 1)];
137 EXPORT_SYMBOL(__genradix_iter_peek);
139 void *__genradix_iter_peek_prev(struct genradix_iter *iter,
140 struct __genradix *radix,
141 size_t objs_per_page,
142 size_t obj_size_plus_page_remainder)
144 struct genradix_root *r;
145 struct genradix_node *n;
148 if (iter->offset == SIZE_MAX)
152 r = READ_ONCE(radix->root);
156 n = genradix_root_to_node(r);
157 level = genradix_root_to_depth(r);
159 if (ilog2(iter->offset) >= genradix_depth_shift(level)) {
160 iter->offset = genradix_depth_size(level);
161 iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) * objs_per_page;
163 iter->offset -= obj_size_plus_page_remainder;
170 i = (iter->offset >> genradix_depth_shift(level)) &
173 while (!n->children[i]) {
174 size_t objs_per_ptr = genradix_depth_size(level);
176 iter->offset = round_down(iter->offset, objs_per_ptr);
177 iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) * objs_per_page;
182 iter->offset -= obj_size_plus_page_remainder;
193 return &n->data[iter->offset & (GENRADIX_NODE_SIZE - 1)];
195 EXPORT_SYMBOL(__genradix_iter_peek_prev);
197 static void genradix_free_recurse(struct genradix_node *n, unsigned level)
202 for (i = 0; i < GENRADIX_ARY; i++)
204 genradix_free_recurse(n->children[i], level - 1);
207 genradix_free_node(n);
210 int __genradix_prealloc(struct __genradix *radix, size_t size,
215 for (offset = 0; offset < size; offset += GENRADIX_NODE_SIZE)
216 if (!__genradix_ptr_alloc(radix, offset, NULL, gfp_mask))
221 EXPORT_SYMBOL(__genradix_prealloc);
223 void __genradix_free(struct __genradix *radix)
225 struct genradix_root *r = xchg(&radix->root, NULL);
227 genradix_free_recurse(genradix_root_to_node(r),
228 genradix_root_to_depth(r));
230 EXPORT_SYMBOL(__genradix_free);