xref: /openbmc/linux/lib/generic-radix-tree.c (revision d942e855)
1 
2 #include <linux/export.h>
3 #include <linux/generic-radix-tree.h>
4 #include <linux/gfp.h>
5 #include <linux/kmemleak.h>
6 
7 #define GENRADIX_ARY		(PAGE_SIZE / sizeof(struct genradix_node *))
8 #define GENRADIX_ARY_SHIFT	ilog2(GENRADIX_ARY)
9 
10 struct genradix_node {
11 	union {
12 		/* Interior node: */
13 		struct genradix_node	*children[GENRADIX_ARY];
14 
15 		/* Leaf: */
16 		u8			data[PAGE_SIZE];
17 	};
18 };
19 
genradix_depth_shift(unsigned depth)20 static inline int genradix_depth_shift(unsigned depth)
21 {
22 	return PAGE_SHIFT + GENRADIX_ARY_SHIFT * depth;
23 }
24 
25 /*
26  * Returns size (of data, in bytes) that a tree of a given depth holds:
27  */
genradix_depth_size(unsigned depth)28 static inline size_t genradix_depth_size(unsigned depth)
29 {
30 	return 1UL << genradix_depth_shift(depth);
31 }
32 
33 /* depth that's needed for a genradix that can address up to ULONG_MAX: */
34 #define GENRADIX_MAX_DEPTH	\
35 	DIV_ROUND_UP(BITS_PER_LONG - PAGE_SHIFT, GENRADIX_ARY_SHIFT)
36 
37 #define GENRADIX_DEPTH_MASK				\
38 	((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1))
39 
genradix_root_to_depth(struct genradix_root * r)40 static inline unsigned genradix_root_to_depth(struct genradix_root *r)
41 {
42 	return (unsigned long) r & GENRADIX_DEPTH_MASK;
43 }
44 
genradix_root_to_node(struct genradix_root * r)45 static inline struct genradix_node *genradix_root_to_node(struct genradix_root *r)
46 {
47 	return (void *) ((unsigned long) r & ~GENRADIX_DEPTH_MASK);
48 }
49 
50 /*
51  * Returns pointer to the specified byte @offset within @radix, or NULL if not
52  * allocated
53  */
__genradix_ptr(struct __genradix * radix,size_t offset)54 void *__genradix_ptr(struct __genradix *radix, size_t offset)
55 {
56 	struct genradix_root *r = READ_ONCE(radix->root);
57 	struct genradix_node *n = genradix_root_to_node(r);
58 	unsigned level		= genradix_root_to_depth(r);
59 
60 	if (ilog2(offset) >= genradix_depth_shift(level))
61 		return NULL;
62 
63 	while (1) {
64 		if (!n)
65 			return NULL;
66 		if (!level)
67 			break;
68 
69 		level--;
70 
71 		n = n->children[offset >> genradix_depth_shift(level)];
72 		offset &= genradix_depth_size(level) - 1;
73 	}
74 
75 	return &n->data[offset];
76 }
77 EXPORT_SYMBOL(__genradix_ptr);
78 
genradix_alloc_node(gfp_t gfp_mask)79 static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask)
80 {
81 	struct genradix_node *node;
82 
83 	node = (struct genradix_node *)__get_free_page(gfp_mask|__GFP_ZERO);
84 
85 	/*
86 	 * We're using pages (not slab allocations) directly for kernel data
87 	 * structures, so we need to explicitly inform kmemleak of them in order
88 	 * to avoid false positive memory leak reports.
89 	 */
90 	kmemleak_alloc(node, PAGE_SIZE, 1, gfp_mask);
91 	return node;
92 }
93 
genradix_free_node(struct genradix_node * node)94 static inline void genradix_free_node(struct genradix_node *node)
95 {
96 	kmemleak_free(node);
97 	free_page((unsigned long)node);
98 }
99 
100 /*
101  * Returns pointer to the specified byte @offset within @radix, allocating it if
102  * necessary - newly allocated slots are always zeroed out:
103  */
__genradix_ptr_alloc(struct __genradix * radix,size_t offset,gfp_t gfp_mask)104 void *__genradix_ptr_alloc(struct __genradix *radix, size_t offset,
105 			   gfp_t gfp_mask)
106 {
107 	struct genradix_root *v = READ_ONCE(radix->root);
108 	struct genradix_node *n, *new_node = NULL;
109 	unsigned level;
110 
111 	/* Increase tree depth if necessary: */
112 	while (1) {
113 		struct genradix_root *r = v, *new_root;
114 
115 		n	= genradix_root_to_node(r);
116 		level	= genradix_root_to_depth(r);
117 
118 		if (n && ilog2(offset) < genradix_depth_shift(level))
119 			break;
120 
121 		if (!new_node) {
122 			new_node = genradix_alloc_node(gfp_mask);
123 			if (!new_node)
124 				return NULL;
125 		}
126 
127 		new_node->children[0] = n;
128 		new_root = ((struct genradix_root *)
129 			    ((unsigned long) new_node | (n ? level + 1 : 0)));
130 
131 		if ((v = cmpxchg_release(&radix->root, r, new_root)) == r) {
132 			v = new_root;
133 			new_node = NULL;
134 		} else {
135 			new_node->children[0] = NULL;
136 		}
137 	}
138 
139 	while (level--) {
140 		struct genradix_node **p =
141 			&n->children[offset >> genradix_depth_shift(level)];
142 		offset &= genradix_depth_size(level) - 1;
143 
144 		n = READ_ONCE(*p);
145 		if (!n) {
146 			if (!new_node) {
147 				new_node = genradix_alloc_node(gfp_mask);
148 				if (!new_node)
149 					return NULL;
150 			}
151 
152 			if (!(n = cmpxchg_release(p, NULL, new_node)))
153 				swap(n, new_node);
154 		}
155 	}
156 
157 	if (new_node)
158 		genradix_free_node(new_node);
159 
160 	return &n->data[offset];
161 }
162 EXPORT_SYMBOL(__genradix_ptr_alloc);
163 
__genradix_iter_peek(struct genradix_iter * iter,struct __genradix * radix,size_t objs_per_page)164 void *__genradix_iter_peek(struct genradix_iter *iter,
165 			   struct __genradix *radix,
166 			   size_t objs_per_page)
167 {
168 	struct genradix_root *r;
169 	struct genradix_node *n;
170 	unsigned level, i;
171 
172 	if (iter->offset == SIZE_MAX)
173 		return NULL;
174 
175 restart:
176 	r = READ_ONCE(radix->root);
177 	if (!r)
178 		return NULL;
179 
180 	n	= genradix_root_to_node(r);
181 	level	= genradix_root_to_depth(r);
182 
183 	if (ilog2(iter->offset) >= genradix_depth_shift(level))
184 		return NULL;
185 
186 	while (level) {
187 		level--;
188 
189 		i = (iter->offset >> genradix_depth_shift(level)) &
190 			(GENRADIX_ARY - 1);
191 
192 		while (!n->children[i]) {
193 			size_t objs_per_ptr = genradix_depth_size(level);
194 
195 			if (iter->offset + objs_per_ptr < iter->offset) {
196 				iter->offset	= SIZE_MAX;
197 				iter->pos	= SIZE_MAX;
198 				return NULL;
199 			}
200 
201 			i++;
202 			iter->offset = round_down(iter->offset + objs_per_ptr,
203 						  objs_per_ptr);
204 			iter->pos = (iter->offset >> PAGE_SHIFT) *
205 				objs_per_page;
206 			if (i == GENRADIX_ARY)
207 				goto restart;
208 		}
209 
210 		n = n->children[i];
211 	}
212 
213 	return &n->data[iter->offset & (PAGE_SIZE - 1)];
214 }
215 EXPORT_SYMBOL(__genradix_iter_peek);
216 
genradix_free_recurse(struct genradix_node * n,unsigned level)217 static void genradix_free_recurse(struct genradix_node *n, unsigned level)
218 {
219 	if (level) {
220 		unsigned i;
221 
222 		for (i = 0; i < GENRADIX_ARY; i++)
223 			if (n->children[i])
224 				genradix_free_recurse(n->children[i], level - 1);
225 	}
226 
227 	genradix_free_node(n);
228 }
229 
__genradix_prealloc(struct __genradix * radix,size_t size,gfp_t gfp_mask)230 int __genradix_prealloc(struct __genradix *radix, size_t size,
231 			gfp_t gfp_mask)
232 {
233 	size_t offset;
234 
235 	for (offset = 0; offset < size; offset += PAGE_SIZE)
236 		if (!__genradix_ptr_alloc(radix, offset, gfp_mask))
237 			return -ENOMEM;
238 
239 	return 0;
240 }
241 EXPORT_SYMBOL(__genradix_prealloc);
242 
__genradix_free(struct __genradix * radix)243 void __genradix_free(struct __genradix *radix)
244 {
245 	struct genradix_root *r = xchg(&radix->root, NULL);
246 
247 	genradix_free_recurse(genradix_root_to_node(r),
248 			      genradix_root_to_depth(r));
249 }
250 EXPORT_SYMBOL(__genradix_free);
251