1*fc8d29e2SArthur Grillo // SPDX-License-Identifier: GPL-2.0-only
2*fc8d29e2SArthur Grillo /*
3*fc8d29e2SArthur Grillo  * Test cases for the drm_mm range manager
4*fc8d29e2SArthur Grillo  *
5*fc8d29e2SArthur Grillo  * Copyright (c) 2022 Arthur Grillo <arthur.grillo@usp.br>
6*fc8d29e2SArthur Grillo  */
7*fc8d29e2SArthur Grillo 
8*fc8d29e2SArthur Grillo #include <kunit/test.h>
9*fc8d29e2SArthur Grillo 
10*fc8d29e2SArthur Grillo #include <linux/prime_numbers.h>
11*fc8d29e2SArthur Grillo #include <linux/slab.h>
12*fc8d29e2SArthur Grillo #include <linux/random.h>
13*fc8d29e2SArthur Grillo #include <linux/vmalloc.h>
14*fc8d29e2SArthur Grillo #include <linux/ktime.h>
15*fc8d29e2SArthur Grillo 
16*fc8d29e2SArthur Grillo #include <drm/drm_mm.h>
17*fc8d29e2SArthur Grillo 
18*fc8d29e2SArthur Grillo #include "../lib/drm_random.h"
19*fc8d29e2SArthur Grillo 
20*fc8d29e2SArthur Grillo static unsigned int random_seed;
21*fc8d29e2SArthur Grillo static unsigned int max_iterations = 8192;
22*fc8d29e2SArthur Grillo static unsigned int max_prime = 128;
23*fc8d29e2SArthur Grillo 
24*fc8d29e2SArthur Grillo enum {
25*fc8d29e2SArthur Grillo 	BEST,
26*fc8d29e2SArthur Grillo 	BOTTOMUP,
27*fc8d29e2SArthur Grillo 	TOPDOWN,
28*fc8d29e2SArthur Grillo 	EVICT,
29*fc8d29e2SArthur Grillo };
30*fc8d29e2SArthur Grillo 
31*fc8d29e2SArthur Grillo static const struct insert_mode {
32*fc8d29e2SArthur Grillo 	const char *name;
33*fc8d29e2SArthur Grillo 	enum drm_mm_insert_mode mode;
34*fc8d29e2SArthur Grillo } insert_modes[] = {
35*fc8d29e2SArthur Grillo 	[BEST] = { "best", DRM_MM_INSERT_BEST },
36*fc8d29e2SArthur Grillo 	[BOTTOMUP] = { "bottom-up", DRM_MM_INSERT_LOW },
37*fc8d29e2SArthur Grillo 	[TOPDOWN] = { "top-down", DRM_MM_INSERT_HIGH },
38*fc8d29e2SArthur Grillo 	[EVICT] = { "evict", DRM_MM_INSERT_EVICT },
39*fc8d29e2SArthur Grillo 	{}
40*fc8d29e2SArthur Grillo }, evict_modes[] = {
41*fc8d29e2SArthur Grillo 	{ "bottom-up", DRM_MM_INSERT_LOW },
42*fc8d29e2SArthur Grillo 	{ "top-down", DRM_MM_INSERT_HIGH },
43*fc8d29e2SArthur Grillo 	{}
44*fc8d29e2SArthur Grillo };
45*fc8d29e2SArthur Grillo 
46*fc8d29e2SArthur Grillo static bool assert_no_holes(struct kunit *test, const struct drm_mm *mm)
47*fc8d29e2SArthur Grillo {
48*fc8d29e2SArthur Grillo 	struct drm_mm_node *hole;
49*fc8d29e2SArthur Grillo 	u64 hole_start, __always_unused hole_end;
50*fc8d29e2SArthur Grillo 	unsigned long count;
51*fc8d29e2SArthur Grillo 
52*fc8d29e2SArthur Grillo 	count = 0;
53*fc8d29e2SArthur Grillo 	drm_mm_for_each_hole(hole, mm, hole_start, hole_end)
54*fc8d29e2SArthur Grillo 		count++;
55*fc8d29e2SArthur Grillo 	if (count) {
56*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
57*fc8d29e2SArthur Grillo 			   "Expected to find no holes (after reserve), found %lu instead\n", count);
58*fc8d29e2SArthur Grillo 		return false;
59*fc8d29e2SArthur Grillo 	}
60*fc8d29e2SArthur Grillo 
61*fc8d29e2SArthur Grillo 	drm_mm_for_each_node(hole, mm) {
62*fc8d29e2SArthur Grillo 		if (drm_mm_hole_follows(hole)) {
63*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "Hole follows node, expected none!\n");
64*fc8d29e2SArthur Grillo 			return false;
65*fc8d29e2SArthur Grillo 		}
66*fc8d29e2SArthur Grillo 	}
67*fc8d29e2SArthur Grillo 
68*fc8d29e2SArthur Grillo 	return true;
69*fc8d29e2SArthur Grillo }
70*fc8d29e2SArthur Grillo 
71*fc8d29e2SArthur Grillo static bool assert_one_hole(struct kunit *test, const struct drm_mm *mm, u64 start, u64 end)
72*fc8d29e2SArthur Grillo {
73*fc8d29e2SArthur Grillo 	struct drm_mm_node *hole;
74*fc8d29e2SArthur Grillo 	u64 hole_start, hole_end;
75*fc8d29e2SArthur Grillo 	unsigned long count;
76*fc8d29e2SArthur Grillo 	bool ok = true;
77*fc8d29e2SArthur Grillo 
78*fc8d29e2SArthur Grillo 	if (end <= start)
79*fc8d29e2SArthur Grillo 		return true;
80*fc8d29e2SArthur Grillo 
81*fc8d29e2SArthur Grillo 	count = 0;
82*fc8d29e2SArthur Grillo 	drm_mm_for_each_hole(hole, mm, hole_start, hole_end) {
83*fc8d29e2SArthur Grillo 		if (start != hole_start || end != hole_end) {
84*fc8d29e2SArthur Grillo 			if (ok)
85*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
86*fc8d29e2SArthur Grillo 					   "empty mm has incorrect hole, found (%llx, %llx), expect (%llx, %llx)\n",
87*fc8d29e2SArthur Grillo 					   hole_start, hole_end, start, end);
88*fc8d29e2SArthur Grillo 			ok = false;
89*fc8d29e2SArthur Grillo 		}
90*fc8d29e2SArthur Grillo 		count++;
91*fc8d29e2SArthur Grillo 	}
92*fc8d29e2SArthur Grillo 	if (count != 1) {
93*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "Expected to find one hole, found %lu instead\n", count);
94*fc8d29e2SArthur Grillo 		ok = false;
95*fc8d29e2SArthur Grillo 	}
96*fc8d29e2SArthur Grillo 
97*fc8d29e2SArthur Grillo 	return ok;
98*fc8d29e2SArthur Grillo }
99*fc8d29e2SArthur Grillo 
100*fc8d29e2SArthur Grillo static bool assert_continuous(struct kunit *test, const struct drm_mm *mm, u64 size)
101*fc8d29e2SArthur Grillo {
102*fc8d29e2SArthur Grillo 	struct drm_mm_node *node, *check, *found;
103*fc8d29e2SArthur Grillo 	unsigned long n;
104*fc8d29e2SArthur Grillo 	u64 addr;
105*fc8d29e2SArthur Grillo 
106*fc8d29e2SArthur Grillo 	if (!assert_no_holes(test, mm))
107*fc8d29e2SArthur Grillo 		return false;
108*fc8d29e2SArthur Grillo 
109*fc8d29e2SArthur Grillo 	n = 0;
110*fc8d29e2SArthur Grillo 	addr = 0;
111*fc8d29e2SArthur Grillo 	drm_mm_for_each_node(node, mm) {
112*fc8d29e2SArthur Grillo 		if (node->start != addr) {
113*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "node[%ld] list out of order, expected %llx found %llx\n",
114*fc8d29e2SArthur Grillo 				   n, addr, node->start);
115*fc8d29e2SArthur Grillo 			return false;
116*fc8d29e2SArthur Grillo 		}
117*fc8d29e2SArthur Grillo 
118*fc8d29e2SArthur Grillo 		if (node->size != size) {
119*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "node[%ld].size incorrect, expected %llx, found %llx\n",
120*fc8d29e2SArthur Grillo 				   n, size, node->size);
121*fc8d29e2SArthur Grillo 			return false;
122*fc8d29e2SArthur Grillo 		}
123*fc8d29e2SArthur Grillo 
124*fc8d29e2SArthur Grillo 		if (drm_mm_hole_follows(node)) {
125*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "node[%ld] is followed by a hole!\n", n);
126*fc8d29e2SArthur Grillo 			return false;
127*fc8d29e2SArthur Grillo 		}
128*fc8d29e2SArthur Grillo 
129*fc8d29e2SArthur Grillo 		found = NULL;
130*fc8d29e2SArthur Grillo 		drm_mm_for_each_node_in_range(check, mm, addr, addr + size) {
131*fc8d29e2SArthur Grillo 			if (node != check) {
132*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
133*fc8d29e2SArthur Grillo 					   "lookup return wrong node, expected start %llx, found %llx\n",
134*fc8d29e2SArthur Grillo 					   node->start, check->start);
135*fc8d29e2SArthur Grillo 				return false;
136*fc8d29e2SArthur Grillo 			}
137*fc8d29e2SArthur Grillo 			found = check;
138*fc8d29e2SArthur Grillo 		}
139*fc8d29e2SArthur Grillo 		if (!found) {
140*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "lookup failed for node %llx + %llx\n", addr, size);
141*fc8d29e2SArthur Grillo 			return false;
142*fc8d29e2SArthur Grillo 		}
143*fc8d29e2SArthur Grillo 
144*fc8d29e2SArthur Grillo 		addr += size;
145*fc8d29e2SArthur Grillo 		n++;
146*fc8d29e2SArthur Grillo 	}
147*fc8d29e2SArthur Grillo 
148*fc8d29e2SArthur Grillo 	return true;
149*fc8d29e2SArthur Grillo }
150*fc8d29e2SArthur Grillo 
151*fc8d29e2SArthur Grillo static u64 misalignment(struct drm_mm_node *node, u64 alignment)
152*fc8d29e2SArthur Grillo {
153*fc8d29e2SArthur Grillo 	u64 rem;
154*fc8d29e2SArthur Grillo 
155*fc8d29e2SArthur Grillo 	if (!alignment)
156*fc8d29e2SArthur Grillo 		return 0;
157*fc8d29e2SArthur Grillo 
158*fc8d29e2SArthur Grillo 	div64_u64_rem(node->start, alignment, &rem);
159*fc8d29e2SArthur Grillo 	return rem;
160*fc8d29e2SArthur Grillo }
161*fc8d29e2SArthur Grillo 
162*fc8d29e2SArthur Grillo static bool assert_node(struct kunit *test, struct drm_mm_node *node, struct drm_mm *mm,
163*fc8d29e2SArthur Grillo 			u64 size, u64 alignment, unsigned long color)
164*fc8d29e2SArthur Grillo {
165*fc8d29e2SArthur Grillo 	bool ok = true;
166*fc8d29e2SArthur Grillo 
167*fc8d29e2SArthur Grillo 	if (!drm_mm_node_allocated(node) || node->mm != mm) {
168*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "node not allocated\n");
169*fc8d29e2SArthur Grillo 		ok = false;
170*fc8d29e2SArthur Grillo 	}
171*fc8d29e2SArthur Grillo 
172*fc8d29e2SArthur Grillo 	if (node->size != size) {
173*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "node has wrong size, found %llu, expected %llu\n",
174*fc8d29e2SArthur Grillo 			   node->size, size);
175*fc8d29e2SArthur Grillo 		ok = false;
176*fc8d29e2SArthur Grillo 	}
177*fc8d29e2SArthur Grillo 
178*fc8d29e2SArthur Grillo 	if (misalignment(node, alignment)) {
179*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
180*fc8d29e2SArthur Grillo 			   "node is misaligned, start %llx rem %llu, expected alignment %llu\n",
181*fc8d29e2SArthur Grillo 			   node->start, misalignment(node, alignment), alignment);
182*fc8d29e2SArthur Grillo 		ok = false;
183*fc8d29e2SArthur Grillo 	}
184*fc8d29e2SArthur Grillo 
185*fc8d29e2SArthur Grillo 	if (node->color != color) {
186*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "node has wrong color, found %lu, expected %lu\n",
187*fc8d29e2SArthur Grillo 			   node->color, color);
188*fc8d29e2SArthur Grillo 		ok = false;
189*fc8d29e2SArthur Grillo 	}
190*fc8d29e2SArthur Grillo 
191*fc8d29e2SArthur Grillo 	return ok;
192*fc8d29e2SArthur Grillo }
193*fc8d29e2SArthur Grillo 
194*fc8d29e2SArthur Grillo static void igt_mm_init(struct kunit *test)
195*fc8d29e2SArthur Grillo {
196*fc8d29e2SArthur Grillo 	const unsigned int size = 4096;
197*fc8d29e2SArthur Grillo 	struct drm_mm mm;
198*fc8d29e2SArthur Grillo 	struct drm_mm_node tmp;
199*fc8d29e2SArthur Grillo 
200*fc8d29e2SArthur Grillo 	/* Start with some simple checks on initialising the struct drm_mm */
201*fc8d29e2SArthur Grillo 	memset(&mm, 0, sizeof(mm));
202*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_FALSE_MSG(test, drm_mm_initialized(&mm),
203*fc8d29e2SArthur Grillo 			       "zeroed mm claims to be initialized\n");
204*fc8d29e2SArthur Grillo 
205*fc8d29e2SArthur Grillo 	memset(&mm, 0xff, sizeof(mm));
206*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 0, size);
207*fc8d29e2SArthur Grillo 	if (!drm_mm_initialized(&mm)) {
208*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "mm claims not to be initialized\n");
209*fc8d29e2SArthur Grillo 		goto out;
210*fc8d29e2SArthur Grillo 	}
211*fc8d29e2SArthur Grillo 
212*fc8d29e2SArthur Grillo 	if (!drm_mm_clean(&mm)) {
213*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "mm not empty on creation\n");
214*fc8d29e2SArthur Grillo 		goto out;
215*fc8d29e2SArthur Grillo 	}
216*fc8d29e2SArthur Grillo 
217*fc8d29e2SArthur Grillo 	/* After creation, it should all be one massive hole */
218*fc8d29e2SArthur Grillo 	if (!assert_one_hole(test, &mm, 0, size)) {
219*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "");
220*fc8d29e2SArthur Grillo 		goto out;
221*fc8d29e2SArthur Grillo 	}
222*fc8d29e2SArthur Grillo 
223*fc8d29e2SArthur Grillo 	memset(&tmp, 0, sizeof(tmp));
224*fc8d29e2SArthur Grillo 	tmp.start = 0;
225*fc8d29e2SArthur Grillo 	tmp.size = size;
226*fc8d29e2SArthur Grillo 	if (drm_mm_reserve_node(&mm, &tmp)) {
227*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "failed to reserve whole drm_mm\n");
228*fc8d29e2SArthur Grillo 		goto out;
229*fc8d29e2SArthur Grillo 	}
230*fc8d29e2SArthur Grillo 
231*fc8d29e2SArthur Grillo 	/* After filling the range entirely, there should be no holes */
232*fc8d29e2SArthur Grillo 	if (!assert_no_holes(test, &mm)) {
233*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "");
234*fc8d29e2SArthur Grillo 		goto out;
235*fc8d29e2SArthur Grillo 	}
236*fc8d29e2SArthur Grillo 
237*fc8d29e2SArthur Grillo 	/* And then after emptying it again, the massive hole should be back */
238*fc8d29e2SArthur Grillo 	drm_mm_remove_node(&tmp);
239*fc8d29e2SArthur Grillo 	if (!assert_one_hole(test, &mm, 0, size)) {
240*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "");
241*fc8d29e2SArthur Grillo 		goto out;
242*fc8d29e2SArthur Grillo 	}
243*fc8d29e2SArthur Grillo 
244*fc8d29e2SArthur Grillo out:
245*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
246*fc8d29e2SArthur Grillo }
247*fc8d29e2SArthur Grillo 
248*fc8d29e2SArthur Grillo static void igt_mm_debug(struct kunit *test)
249*fc8d29e2SArthur Grillo {
250*fc8d29e2SArthur Grillo 	struct drm_mm mm;
251*fc8d29e2SArthur Grillo 	struct drm_mm_node nodes[2];
252*fc8d29e2SArthur Grillo 
253*fc8d29e2SArthur Grillo 	/* Create a small drm_mm with a couple of nodes and a few holes, and
254*fc8d29e2SArthur Grillo 	 * check that the debug iterator doesn't explode over a trivial drm_mm.
255*fc8d29e2SArthur Grillo 	 */
256*fc8d29e2SArthur Grillo 
257*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 0, 4096);
258*fc8d29e2SArthur Grillo 
259*fc8d29e2SArthur Grillo 	memset(nodes, 0, sizeof(nodes));
260*fc8d29e2SArthur Grillo 	nodes[0].start = 512;
261*fc8d29e2SArthur Grillo 	nodes[0].size = 1024;
262*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_FALSE_MSG(test, drm_mm_reserve_node(&mm, &nodes[0]),
263*fc8d29e2SArthur Grillo 			       "failed to reserve node[0] {start=%lld, size=%lld)\n",
264*fc8d29e2SArthur Grillo 			       nodes[0].start, nodes[0].size);
265*fc8d29e2SArthur Grillo 
266*fc8d29e2SArthur Grillo 	nodes[1].size = 1024;
267*fc8d29e2SArthur Grillo 	nodes[1].start = 4096 - 512 - nodes[1].size;
268*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_FALSE_MSG(test, drm_mm_reserve_node(&mm, &nodes[1]),
269*fc8d29e2SArthur Grillo 			       "failed to reserve node[0] {start=%lld, size=%lld)\n",
270*fc8d29e2SArthur Grillo 			       nodes[0].start, nodes[0].size);
271*fc8d29e2SArthur Grillo }
272*fc8d29e2SArthur Grillo 
273*fc8d29e2SArthur Grillo static struct drm_mm_node *set_node(struct drm_mm_node *node,
274*fc8d29e2SArthur Grillo 				    u64 start, u64 size)
275*fc8d29e2SArthur Grillo {
276*fc8d29e2SArthur Grillo 	node->start = start;
277*fc8d29e2SArthur Grillo 	node->size = size;
278*fc8d29e2SArthur Grillo 	return node;
279*fc8d29e2SArthur Grillo }
280*fc8d29e2SArthur Grillo 
281*fc8d29e2SArthur Grillo static bool expect_reserve_fail(struct kunit *test, struct drm_mm *mm, struct drm_mm_node *node)
282*fc8d29e2SArthur Grillo {
283*fc8d29e2SArthur Grillo 	int err;
284*fc8d29e2SArthur Grillo 
285*fc8d29e2SArthur Grillo 	err = drm_mm_reserve_node(mm, node);
286*fc8d29e2SArthur Grillo 	if (likely(err == -ENOSPC))
287*fc8d29e2SArthur Grillo 		return true;
288*fc8d29e2SArthur Grillo 
289*fc8d29e2SArthur Grillo 	if (!err) {
290*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "impossible reserve succeeded, node %llu + %llu\n",
291*fc8d29e2SArthur Grillo 			   node->start, node->size);
292*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
293*fc8d29e2SArthur Grillo 	} else {
294*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
295*fc8d29e2SArthur Grillo 			   "impossible reserve failed with wrong error %d [expected %d], node %llu + %llu\n",
296*fc8d29e2SArthur Grillo 		       err, -ENOSPC, node->start, node->size);
297*fc8d29e2SArthur Grillo 	}
298*fc8d29e2SArthur Grillo 	return false;
299*fc8d29e2SArthur Grillo }
300*fc8d29e2SArthur Grillo 
301*fc8d29e2SArthur Grillo static bool check_reserve_boundaries(struct kunit *test, struct drm_mm *mm,
302*fc8d29e2SArthur Grillo 				     unsigned int count,
303*fc8d29e2SArthur Grillo 				     u64 size)
304*fc8d29e2SArthur Grillo {
305*fc8d29e2SArthur Grillo 	const struct boundary {
306*fc8d29e2SArthur Grillo 		u64 start, size;
307*fc8d29e2SArthur Grillo 		const char *name;
308*fc8d29e2SArthur Grillo 	} boundaries[] = {
309*fc8d29e2SArthur Grillo #define B(st, sz) { (st), (sz), "{ " #st ", " #sz "}" }
310*fc8d29e2SArthur Grillo 		B(0, 0),
311*fc8d29e2SArthur Grillo 		B(-size, 0),
312*fc8d29e2SArthur Grillo 		B(size, 0),
313*fc8d29e2SArthur Grillo 		B(size * count, 0),
314*fc8d29e2SArthur Grillo 		B(-size, size),
315*fc8d29e2SArthur Grillo 		B(-size, -size),
316*fc8d29e2SArthur Grillo 		B(-size, 2 * size),
317*fc8d29e2SArthur Grillo 		B(0, -size),
318*fc8d29e2SArthur Grillo 		B(size, -size),
319*fc8d29e2SArthur Grillo 		B(count * size, size),
320*fc8d29e2SArthur Grillo 		B(count * size, -size),
321*fc8d29e2SArthur Grillo 		B(count * size, count * size),
322*fc8d29e2SArthur Grillo 		B(count * size, -count * size),
323*fc8d29e2SArthur Grillo 		B(count * size, -(count + 1) * size),
324*fc8d29e2SArthur Grillo 		B((count + 1) * size, size),
325*fc8d29e2SArthur Grillo 		B((count + 1) * size, -size),
326*fc8d29e2SArthur Grillo 		B((count + 1) * size, -2 * size),
327*fc8d29e2SArthur Grillo #undef B
328*fc8d29e2SArthur Grillo 	};
329*fc8d29e2SArthur Grillo 	struct drm_mm_node tmp = {};
330*fc8d29e2SArthur Grillo 	int n;
331*fc8d29e2SArthur Grillo 
332*fc8d29e2SArthur Grillo 	for (n = 0; n < ARRAY_SIZE(boundaries); n++) {
333*fc8d29e2SArthur Grillo 		if (!expect_reserve_fail(test, mm, set_node(&tmp, boundaries[n].start,
334*fc8d29e2SArthur Grillo 							    boundaries[n].size))) {
335*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "boundary[%d:%s] failed, count=%u, size=%lld\n",
336*fc8d29e2SArthur Grillo 				   n, boundaries[n].name, count, size);
337*fc8d29e2SArthur Grillo 			return false;
338*fc8d29e2SArthur Grillo 		}
339*fc8d29e2SArthur Grillo 	}
340*fc8d29e2SArthur Grillo 
341*fc8d29e2SArthur Grillo 	return true;
342*fc8d29e2SArthur Grillo }
343*fc8d29e2SArthur Grillo 
344*fc8d29e2SArthur Grillo static int __igt_reserve(struct kunit *test, unsigned int count, u64 size)
345*fc8d29e2SArthur Grillo {
346*fc8d29e2SArthur Grillo 	DRM_RND_STATE(prng, random_seed);
347*fc8d29e2SArthur Grillo 	struct drm_mm mm;
348*fc8d29e2SArthur Grillo 	struct drm_mm_node tmp, *nodes, *node, *next;
349*fc8d29e2SArthur Grillo 	unsigned int *order, n, m, o = 0;
350*fc8d29e2SArthur Grillo 	int ret, err;
351*fc8d29e2SArthur Grillo 
352*fc8d29e2SArthur Grillo 	/* For exercising drm_mm_reserve_node(struct kunit *test, ), we want to check that
353*fc8d29e2SArthur Grillo 	 * reservations outside of the drm_mm range are rejected, and to
354*fc8d29e2SArthur Grillo 	 * overlapping and otherwise already occupied ranges. Afterwards,
355*fc8d29e2SArthur Grillo 	 * the tree and nodes should be intact.
356*fc8d29e2SArthur Grillo 	 */
357*fc8d29e2SArthur Grillo 
358*fc8d29e2SArthur Grillo 	DRM_MM_BUG_ON(!count);
359*fc8d29e2SArthur Grillo 	DRM_MM_BUG_ON(!size);
360*fc8d29e2SArthur Grillo 
361*fc8d29e2SArthur Grillo 	ret = -ENOMEM;
362*fc8d29e2SArthur Grillo 	order = drm_random_order(count, &prng);
363*fc8d29e2SArthur Grillo 	if (!order)
364*fc8d29e2SArthur Grillo 		goto err;
365*fc8d29e2SArthur Grillo 
366*fc8d29e2SArthur Grillo 	nodes = vzalloc(array_size(count, sizeof(*nodes)));
367*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_TRUE(test, nodes);
368*fc8d29e2SArthur Grillo 
369*fc8d29e2SArthur Grillo 	ret = -EINVAL;
370*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 0, count * size);
371*fc8d29e2SArthur Grillo 
372*fc8d29e2SArthur Grillo 	if (!check_reserve_boundaries(test, &mm, count, size))
373*fc8d29e2SArthur Grillo 		goto out;
374*fc8d29e2SArthur Grillo 
375*fc8d29e2SArthur Grillo 	for (n = 0; n < count; n++) {
376*fc8d29e2SArthur Grillo 		nodes[n].start = order[n] * size;
377*fc8d29e2SArthur Grillo 		nodes[n].size = size;
378*fc8d29e2SArthur Grillo 
379*fc8d29e2SArthur Grillo 		err = drm_mm_reserve_node(&mm, &nodes[n]);
380*fc8d29e2SArthur Grillo 		if (err) {
381*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "reserve failed, step %d, start %llu\n",
382*fc8d29e2SArthur Grillo 				   n, nodes[n].start);
383*fc8d29e2SArthur Grillo 			ret = err;
384*fc8d29e2SArthur Grillo 			goto out;
385*fc8d29e2SArthur Grillo 		}
386*fc8d29e2SArthur Grillo 
387*fc8d29e2SArthur Grillo 		if (!drm_mm_node_allocated(&nodes[n])) {
388*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "reserved node not allocated! step %d, start %llu\n",
389*fc8d29e2SArthur Grillo 				   n, nodes[n].start);
390*fc8d29e2SArthur Grillo 			goto out;
391*fc8d29e2SArthur Grillo 		}
392*fc8d29e2SArthur Grillo 
393*fc8d29e2SArthur Grillo 		if (!expect_reserve_fail(test, &mm, &nodes[n]))
394*fc8d29e2SArthur Grillo 			goto out;
395*fc8d29e2SArthur Grillo 	}
396*fc8d29e2SArthur Grillo 
397*fc8d29e2SArthur Grillo 	/* After random insertion the nodes should be in order */
398*fc8d29e2SArthur Grillo 	if (!assert_continuous(test, &mm, size))
399*fc8d29e2SArthur Grillo 		goto out;
400*fc8d29e2SArthur Grillo 
401*fc8d29e2SArthur Grillo 	/* Repeated use should then fail */
402*fc8d29e2SArthur Grillo 	drm_random_reorder(order, count, &prng);
403*fc8d29e2SArthur Grillo 	for (n = 0; n < count; n++) {
404*fc8d29e2SArthur Grillo 		if (!expect_reserve_fail(test, &mm, set_node(&tmp, order[n] * size, 1)))
405*fc8d29e2SArthur Grillo 			goto out;
406*fc8d29e2SArthur Grillo 
407*fc8d29e2SArthur Grillo 		/* Remove and reinsert should work */
408*fc8d29e2SArthur Grillo 		drm_mm_remove_node(&nodes[order[n]]);
409*fc8d29e2SArthur Grillo 		err = drm_mm_reserve_node(&mm, &nodes[order[n]]);
410*fc8d29e2SArthur Grillo 		if (err) {
411*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "reserve failed, step %d, start %llu\n",
412*fc8d29e2SArthur Grillo 				   n, nodes[n].start);
413*fc8d29e2SArthur Grillo 			ret = err;
414*fc8d29e2SArthur Grillo 			goto out;
415*fc8d29e2SArthur Grillo 		}
416*fc8d29e2SArthur Grillo 	}
417*fc8d29e2SArthur Grillo 
418*fc8d29e2SArthur Grillo 	if (!assert_continuous(test, &mm, size))
419*fc8d29e2SArthur Grillo 		goto out;
420*fc8d29e2SArthur Grillo 
421*fc8d29e2SArthur Grillo 	/* Overlapping use should then fail */
422*fc8d29e2SArthur Grillo 	for (n = 0; n < count; n++) {
423*fc8d29e2SArthur Grillo 		if (!expect_reserve_fail(test, &mm, set_node(&tmp, 0, size * count)))
424*fc8d29e2SArthur Grillo 			goto out;
425*fc8d29e2SArthur Grillo 	}
426*fc8d29e2SArthur Grillo 	for (n = 0; n < count; n++) {
427*fc8d29e2SArthur Grillo 		if (!expect_reserve_fail(test, &mm, set_node(&tmp, size * n, size * (count - n))))
428*fc8d29e2SArthur Grillo 			goto out;
429*fc8d29e2SArthur Grillo 	}
430*fc8d29e2SArthur Grillo 
431*fc8d29e2SArthur Grillo 	/* Remove several, reinsert, check full */
432*fc8d29e2SArthur Grillo 	for_each_prime_number(n, min(max_prime, count)) {
433*fc8d29e2SArthur Grillo 		for (m = 0; m < n; m++) {
434*fc8d29e2SArthur Grillo 			node = &nodes[order[(o + m) % count]];
435*fc8d29e2SArthur Grillo 			drm_mm_remove_node(node);
436*fc8d29e2SArthur Grillo 		}
437*fc8d29e2SArthur Grillo 
438*fc8d29e2SArthur Grillo 		for (m = 0; m < n; m++) {
439*fc8d29e2SArthur Grillo 			node = &nodes[order[(o + m) % count]];
440*fc8d29e2SArthur Grillo 			err = drm_mm_reserve_node(&mm, node);
441*fc8d29e2SArthur Grillo 			if (err) {
442*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "reserve failed, step %d/%d, start %llu\n",
443*fc8d29e2SArthur Grillo 					   m, n, node->start);
444*fc8d29e2SArthur Grillo 				ret = err;
445*fc8d29e2SArthur Grillo 				goto out;
446*fc8d29e2SArthur Grillo 			}
447*fc8d29e2SArthur Grillo 		}
448*fc8d29e2SArthur Grillo 
449*fc8d29e2SArthur Grillo 		o += n;
450*fc8d29e2SArthur Grillo 
451*fc8d29e2SArthur Grillo 		if (!assert_continuous(test, &mm, size))
452*fc8d29e2SArthur Grillo 			goto out;
453*fc8d29e2SArthur Grillo 	}
454*fc8d29e2SArthur Grillo 
455*fc8d29e2SArthur Grillo 	ret = 0;
456*fc8d29e2SArthur Grillo out:
457*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm)
458*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
459*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
460*fc8d29e2SArthur Grillo 	vfree(nodes);
461*fc8d29e2SArthur Grillo 	kfree(order);
462*fc8d29e2SArthur Grillo err:
463*fc8d29e2SArthur Grillo 	return ret;
464*fc8d29e2SArthur Grillo }
465*fc8d29e2SArthur Grillo 
466*fc8d29e2SArthur Grillo static void igt_mm_reserve(struct kunit *test)
467*fc8d29e2SArthur Grillo {
468*fc8d29e2SArthur Grillo 	const unsigned int count = min_t(unsigned int, BIT(10), max_iterations);
469*fc8d29e2SArthur Grillo 	int n;
470*fc8d29e2SArthur Grillo 
471*fc8d29e2SArthur Grillo 	for_each_prime_number_from(n, 1, 54) {
472*fc8d29e2SArthur Grillo 		u64 size = BIT_ULL(n);
473*fc8d29e2SArthur Grillo 
474*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_reserve(test, count, size - 1));
475*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_reserve(test, count, size));
476*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_reserve(test, count, size + 1));
477*fc8d29e2SArthur Grillo 
478*fc8d29e2SArthur Grillo 		cond_resched();
479*fc8d29e2SArthur Grillo 	}
480*fc8d29e2SArthur Grillo }
481*fc8d29e2SArthur Grillo 
482*fc8d29e2SArthur Grillo static bool expect_insert(struct kunit *test, struct drm_mm *mm,
483*fc8d29e2SArthur Grillo 			  struct drm_mm_node *node, u64 size, u64 alignment, unsigned long color,
484*fc8d29e2SArthur Grillo 			const struct insert_mode *mode)
485*fc8d29e2SArthur Grillo {
486*fc8d29e2SArthur Grillo 	int err;
487*fc8d29e2SArthur Grillo 
488*fc8d29e2SArthur Grillo 	err = drm_mm_insert_node_generic(mm, node,
489*fc8d29e2SArthur Grillo 					 size, alignment, color,
490*fc8d29e2SArthur Grillo 					 mode->mode);
491*fc8d29e2SArthur Grillo 	if (err) {
492*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
493*fc8d29e2SArthur Grillo 			   "insert (size=%llu, alignment=%llu, color=%lu, mode=%s) failed with err=%d\n",
494*fc8d29e2SArthur Grillo 			   size, alignment, color, mode->name, err);
495*fc8d29e2SArthur Grillo 		return false;
496*fc8d29e2SArthur Grillo 	}
497*fc8d29e2SArthur Grillo 
498*fc8d29e2SArthur Grillo 	if (!assert_node(test, node, mm, size, alignment, color)) {
499*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
500*fc8d29e2SArthur Grillo 		return false;
501*fc8d29e2SArthur Grillo 	}
502*fc8d29e2SArthur Grillo 
503*fc8d29e2SArthur Grillo 	return true;
504*fc8d29e2SArthur Grillo }
505*fc8d29e2SArthur Grillo 
506*fc8d29e2SArthur Grillo static bool expect_insert_fail(struct kunit *test, struct drm_mm *mm, u64 size)
507*fc8d29e2SArthur Grillo {
508*fc8d29e2SArthur Grillo 	struct drm_mm_node tmp = {};
509*fc8d29e2SArthur Grillo 	int err;
510*fc8d29e2SArthur Grillo 
511*fc8d29e2SArthur Grillo 	err = drm_mm_insert_node(mm, &tmp, size);
512*fc8d29e2SArthur Grillo 	if (likely(err == -ENOSPC))
513*fc8d29e2SArthur Grillo 		return true;
514*fc8d29e2SArthur Grillo 
515*fc8d29e2SArthur Grillo 	if (!err) {
516*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "impossible insert succeeded, node %llu + %llu\n",
517*fc8d29e2SArthur Grillo 			   tmp.start, tmp.size);
518*fc8d29e2SArthur Grillo 		drm_mm_remove_node(&tmp);
519*fc8d29e2SArthur Grillo 	} else {
520*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
521*fc8d29e2SArthur Grillo 			   "impossible insert failed with wrong error %d [expected %d], size %llu\n",
522*fc8d29e2SArthur Grillo 			   err, -ENOSPC, size);
523*fc8d29e2SArthur Grillo 	}
524*fc8d29e2SArthur Grillo 	return false;
525*fc8d29e2SArthur Grillo }
526*fc8d29e2SArthur Grillo 
527*fc8d29e2SArthur Grillo static int __igt_insert(struct kunit *test, unsigned int count, u64 size, bool replace)
528*fc8d29e2SArthur Grillo {
529*fc8d29e2SArthur Grillo 	DRM_RND_STATE(prng, random_seed);
530*fc8d29e2SArthur Grillo 	const struct insert_mode *mode;
531*fc8d29e2SArthur Grillo 	struct drm_mm mm;
532*fc8d29e2SArthur Grillo 	struct drm_mm_node *nodes, *node, *next;
533*fc8d29e2SArthur Grillo 	unsigned int *order, n, m, o = 0;
534*fc8d29e2SArthur Grillo 	int ret;
535*fc8d29e2SArthur Grillo 
536*fc8d29e2SArthur Grillo 	/* Fill a range with lots of nodes, check it doesn't fail too early */
537*fc8d29e2SArthur Grillo 
538*fc8d29e2SArthur Grillo 	DRM_MM_BUG_ON(!count);
539*fc8d29e2SArthur Grillo 	DRM_MM_BUG_ON(!size);
540*fc8d29e2SArthur Grillo 
541*fc8d29e2SArthur Grillo 	ret = -ENOMEM;
542*fc8d29e2SArthur Grillo 	nodes = vmalloc(array_size(count, sizeof(*nodes)));
543*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_TRUE(test, nodes);
544*fc8d29e2SArthur Grillo 
545*fc8d29e2SArthur Grillo 	order = drm_random_order(count, &prng);
546*fc8d29e2SArthur Grillo 	if (!order)
547*fc8d29e2SArthur Grillo 		goto err_nodes;
548*fc8d29e2SArthur Grillo 
549*fc8d29e2SArthur Grillo 	ret = -EINVAL;
550*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 0, count * size);
551*fc8d29e2SArthur Grillo 
552*fc8d29e2SArthur Grillo 	for (mode = insert_modes; mode->name; mode++) {
553*fc8d29e2SArthur Grillo 		for (n = 0; n < count; n++) {
554*fc8d29e2SArthur Grillo 			struct drm_mm_node tmp;
555*fc8d29e2SArthur Grillo 
556*fc8d29e2SArthur Grillo 			node = replace ? &tmp : &nodes[n];
557*fc8d29e2SArthur Grillo 			memset(node, 0, sizeof(*node));
558*fc8d29e2SArthur Grillo 			if (!expect_insert(test, &mm, node, size, 0, n, mode)) {
559*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "%s insert failed, size %llu step %d\n",
560*fc8d29e2SArthur Grillo 					   mode->name, size, n);
561*fc8d29e2SArthur Grillo 				goto out;
562*fc8d29e2SArthur Grillo 			}
563*fc8d29e2SArthur Grillo 
564*fc8d29e2SArthur Grillo 			if (replace) {
565*fc8d29e2SArthur Grillo 				drm_mm_replace_node(&tmp, &nodes[n]);
566*fc8d29e2SArthur Grillo 				if (drm_mm_node_allocated(&tmp)) {
567*fc8d29e2SArthur Grillo 					KUNIT_FAIL(test,
568*fc8d29e2SArthur Grillo 						   "replaced old-node still allocated! step %d\n",
569*fc8d29e2SArthur Grillo 						   n);
570*fc8d29e2SArthur Grillo 					goto out;
571*fc8d29e2SArthur Grillo 				}
572*fc8d29e2SArthur Grillo 
573*fc8d29e2SArthur Grillo 				if (!assert_node(test, &nodes[n], &mm, size, 0, n)) {
574*fc8d29e2SArthur Grillo 					KUNIT_FAIL(test,
575*fc8d29e2SArthur Grillo 						   "replaced node did not inherit parameters, size %llu step %d\n",
576*fc8d29e2SArthur Grillo 						   size, n);
577*fc8d29e2SArthur Grillo 					goto out;
578*fc8d29e2SArthur Grillo 				}
579*fc8d29e2SArthur Grillo 
580*fc8d29e2SArthur Grillo 				if (tmp.start != nodes[n].start) {
581*fc8d29e2SArthur Grillo 					KUNIT_FAIL(test,
582*fc8d29e2SArthur Grillo 						   "replaced node mismatch location expected [%llx + %llx], found [%llx + %llx]\n",
583*fc8d29e2SArthur Grillo 						   tmp.start, size, nodes[n].start, nodes[n].size);
584*fc8d29e2SArthur Grillo 					goto out;
585*fc8d29e2SArthur Grillo 				}
586*fc8d29e2SArthur Grillo 			}
587*fc8d29e2SArthur Grillo 		}
588*fc8d29e2SArthur Grillo 
589*fc8d29e2SArthur Grillo 		/* After random insertion the nodes should be in order */
590*fc8d29e2SArthur Grillo 		if (!assert_continuous(test, &mm, size))
591*fc8d29e2SArthur Grillo 			goto out;
592*fc8d29e2SArthur Grillo 
593*fc8d29e2SArthur Grillo 		/* Repeated use should then fail */
594*fc8d29e2SArthur Grillo 		if (!expect_insert_fail(test, &mm, size))
595*fc8d29e2SArthur Grillo 			goto out;
596*fc8d29e2SArthur Grillo 
597*fc8d29e2SArthur Grillo 		/* Remove one and reinsert, as the only hole it should refill itself */
598*fc8d29e2SArthur Grillo 		for (n = 0; n < count; n++) {
599*fc8d29e2SArthur Grillo 			u64 addr = nodes[n].start;
600*fc8d29e2SArthur Grillo 
601*fc8d29e2SArthur Grillo 			drm_mm_remove_node(&nodes[n]);
602*fc8d29e2SArthur Grillo 			if (!expect_insert(test, &mm, &nodes[n], size, 0, n, mode)) {
603*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "%s reinsert failed, size %llu step %d\n",
604*fc8d29e2SArthur Grillo 					   mode->name, size, n);
605*fc8d29e2SArthur Grillo 				goto out;
606*fc8d29e2SArthur Grillo 			}
607*fc8d29e2SArthur Grillo 
608*fc8d29e2SArthur Grillo 			if (nodes[n].start != addr) {
609*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
610*fc8d29e2SArthur Grillo 					   "%s reinsert node moved, step %d, expected %llx, found %llx\n",
611*fc8d29e2SArthur Grillo 					   mode->name, n, addr, nodes[n].start);
612*fc8d29e2SArthur Grillo 				goto out;
613*fc8d29e2SArthur Grillo 			}
614*fc8d29e2SArthur Grillo 
615*fc8d29e2SArthur Grillo 			if (!assert_continuous(test, &mm, size))
616*fc8d29e2SArthur Grillo 				goto out;
617*fc8d29e2SArthur Grillo 		}
618*fc8d29e2SArthur Grillo 
619*fc8d29e2SArthur Grillo 		/* Remove several, reinsert, check full */
620*fc8d29e2SArthur Grillo 		for_each_prime_number(n, min(max_prime, count)) {
621*fc8d29e2SArthur Grillo 			for (m = 0; m < n; m++) {
622*fc8d29e2SArthur Grillo 				node = &nodes[order[(o + m) % count]];
623*fc8d29e2SArthur Grillo 				drm_mm_remove_node(node);
624*fc8d29e2SArthur Grillo 			}
625*fc8d29e2SArthur Grillo 
626*fc8d29e2SArthur Grillo 			for (m = 0; m < n; m++) {
627*fc8d29e2SArthur Grillo 				node = &nodes[order[(o + m) % count]];
628*fc8d29e2SArthur Grillo 				if (!expect_insert(test, &mm, node, size, 0, n, mode)) {
629*fc8d29e2SArthur Grillo 					KUNIT_FAIL(test,
630*fc8d29e2SArthur Grillo 						   "%s multiple reinsert failed, size %llu step %d\n",
631*fc8d29e2SArthur Grillo 							   mode->name, size, n);
632*fc8d29e2SArthur Grillo 					goto out;
633*fc8d29e2SArthur Grillo 				}
634*fc8d29e2SArthur Grillo 			}
635*fc8d29e2SArthur Grillo 
636*fc8d29e2SArthur Grillo 			o += n;
637*fc8d29e2SArthur Grillo 
638*fc8d29e2SArthur Grillo 			if (!assert_continuous(test, &mm, size))
639*fc8d29e2SArthur Grillo 				goto out;
640*fc8d29e2SArthur Grillo 
641*fc8d29e2SArthur Grillo 			if (!expect_insert_fail(test, &mm, size))
642*fc8d29e2SArthur Grillo 				goto out;
643*fc8d29e2SArthur Grillo 		}
644*fc8d29e2SArthur Grillo 
645*fc8d29e2SArthur Grillo 		drm_mm_for_each_node_safe(node, next, &mm)
646*fc8d29e2SArthur Grillo 			drm_mm_remove_node(node);
647*fc8d29e2SArthur Grillo 		DRM_MM_BUG_ON(!drm_mm_clean(&mm));
648*fc8d29e2SArthur Grillo 
649*fc8d29e2SArthur Grillo 		cond_resched();
650*fc8d29e2SArthur Grillo 	}
651*fc8d29e2SArthur Grillo 
652*fc8d29e2SArthur Grillo 	ret = 0;
653*fc8d29e2SArthur Grillo out:
654*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm)
655*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
656*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
657*fc8d29e2SArthur Grillo 	kfree(order);
658*fc8d29e2SArthur Grillo err_nodes:
659*fc8d29e2SArthur Grillo 	vfree(nodes);
660*fc8d29e2SArthur Grillo 	return ret;
661*fc8d29e2SArthur Grillo }
662*fc8d29e2SArthur Grillo 
663*fc8d29e2SArthur Grillo static void igt_mm_insert(struct kunit *test)
664*fc8d29e2SArthur Grillo {
665*fc8d29e2SArthur Grillo 	const unsigned int count = min_t(unsigned int, BIT(10), max_iterations);
666*fc8d29e2SArthur Grillo 	unsigned int n;
667*fc8d29e2SArthur Grillo 
668*fc8d29e2SArthur Grillo 	for_each_prime_number_from(n, 1, 54) {
669*fc8d29e2SArthur Grillo 		u64 size = BIT_ULL(n);
670*fc8d29e2SArthur Grillo 
671*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert(test, count, size - 1, false));
672*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert(test, count, size, false));
673*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert(test, count, size + 1, false));
674*fc8d29e2SArthur Grillo 
675*fc8d29e2SArthur Grillo 		cond_resched();
676*fc8d29e2SArthur Grillo 	}
677*fc8d29e2SArthur Grillo }
678*fc8d29e2SArthur Grillo 
679*fc8d29e2SArthur Grillo static void igt_mm_replace(struct kunit *test)
680*fc8d29e2SArthur Grillo {
681*fc8d29e2SArthur Grillo 	const unsigned int count = min_t(unsigned int, BIT(10), max_iterations);
682*fc8d29e2SArthur Grillo 	unsigned int n;
683*fc8d29e2SArthur Grillo 
684*fc8d29e2SArthur Grillo 	/* Reuse igt_insert to exercise replacement by inserting a dummy node,
685*fc8d29e2SArthur Grillo 	 * then replacing it with the intended node. We want to check that
686*fc8d29e2SArthur Grillo 	 * the tree is intact and all the information we need is carried
687*fc8d29e2SArthur Grillo 	 * across to the target node.
688*fc8d29e2SArthur Grillo 	 */
689*fc8d29e2SArthur Grillo 
690*fc8d29e2SArthur Grillo 	for_each_prime_number_from(n, 1, 54) {
691*fc8d29e2SArthur Grillo 		u64 size = BIT_ULL(n);
692*fc8d29e2SArthur Grillo 
693*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert(test, count, size - 1, true));
694*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert(test, count, size, true));
695*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert(test, count, size + 1, true));
696*fc8d29e2SArthur Grillo 
697*fc8d29e2SArthur Grillo 		cond_resched();
698*fc8d29e2SArthur Grillo 	}
699*fc8d29e2SArthur Grillo }
700*fc8d29e2SArthur Grillo 
701*fc8d29e2SArthur Grillo static bool expect_insert_in_range(struct kunit *test, struct drm_mm *mm, struct drm_mm_node *node,
702*fc8d29e2SArthur Grillo 				   u64 size, u64 alignment, unsigned long color,
703*fc8d29e2SArthur Grillo 				   u64 range_start, u64 range_end, const struct insert_mode *mode)
704*fc8d29e2SArthur Grillo {
705*fc8d29e2SArthur Grillo 	int err;
706*fc8d29e2SArthur Grillo 
707*fc8d29e2SArthur Grillo 	err = drm_mm_insert_node_in_range(mm, node,
708*fc8d29e2SArthur Grillo 					  size, alignment, color,
709*fc8d29e2SArthur Grillo 					  range_start, range_end,
710*fc8d29e2SArthur Grillo 					  mode->mode);
711*fc8d29e2SArthur Grillo 	if (err) {
712*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
713*fc8d29e2SArthur Grillo 			   "insert (size=%llu, alignment=%llu, color=%lu, mode=%s) nto range [%llx, %llx] failed with err=%d\n",
714*fc8d29e2SArthur Grillo 				   size, alignment, color, mode->name,
715*fc8d29e2SArthur Grillo 				   range_start, range_end, err);
716*fc8d29e2SArthur Grillo 		return false;
717*fc8d29e2SArthur Grillo 	}
718*fc8d29e2SArthur Grillo 
719*fc8d29e2SArthur Grillo 	if (!assert_node(test, node, mm, size, alignment, color)) {
720*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
721*fc8d29e2SArthur Grillo 		return false;
722*fc8d29e2SArthur Grillo 	}
723*fc8d29e2SArthur Grillo 
724*fc8d29e2SArthur Grillo 	return true;
725*fc8d29e2SArthur Grillo }
726*fc8d29e2SArthur Grillo 
727*fc8d29e2SArthur Grillo static bool expect_insert_in_range_fail(struct kunit *test, struct drm_mm *mm,
728*fc8d29e2SArthur Grillo 					u64 size, u64 range_start, u64 range_end)
729*fc8d29e2SArthur Grillo {
730*fc8d29e2SArthur Grillo 	struct drm_mm_node tmp = {};
731*fc8d29e2SArthur Grillo 	int err;
732*fc8d29e2SArthur Grillo 
733*fc8d29e2SArthur Grillo 	err = drm_mm_insert_node_in_range(mm, &tmp, size, 0, 0, range_start, range_end,
734*fc8d29e2SArthur Grillo 					  0);
735*fc8d29e2SArthur Grillo 	if (likely(err == -ENOSPC))
736*fc8d29e2SArthur Grillo 		return true;
737*fc8d29e2SArthur Grillo 
738*fc8d29e2SArthur Grillo 	if (!err) {
739*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
740*fc8d29e2SArthur Grillo 			   "impossible insert succeeded, node %llx + %llu, range [%llx, %llx]\n",
741*fc8d29e2SArthur Grillo 				   tmp.start, tmp.size, range_start, range_end);
742*fc8d29e2SArthur Grillo 		drm_mm_remove_node(&tmp);
743*fc8d29e2SArthur Grillo 	} else {
744*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
745*fc8d29e2SArthur Grillo 			   "impossible insert failed with wrong error %d [expected %d], size %llu, range [%llx, %llx]\n",
746*fc8d29e2SArthur Grillo 				   err, -ENOSPC, size, range_start, range_end);
747*fc8d29e2SArthur Grillo 	}
748*fc8d29e2SArthur Grillo 
749*fc8d29e2SArthur Grillo 	return false;
750*fc8d29e2SArthur Grillo }
751*fc8d29e2SArthur Grillo 
752*fc8d29e2SArthur Grillo static bool assert_contiguous_in_range(struct kunit *test, struct drm_mm *mm,
753*fc8d29e2SArthur Grillo 				       u64 size, u64 start, u64 end)
754*fc8d29e2SArthur Grillo {
755*fc8d29e2SArthur Grillo 	struct drm_mm_node *node;
756*fc8d29e2SArthur Grillo 	unsigned int n;
757*fc8d29e2SArthur Grillo 
758*fc8d29e2SArthur Grillo 	if (!expect_insert_in_range_fail(test, mm, size, start, end))
759*fc8d29e2SArthur Grillo 		return false;
760*fc8d29e2SArthur Grillo 
761*fc8d29e2SArthur Grillo 	n = div64_u64(start + size - 1, size);
762*fc8d29e2SArthur Grillo 	drm_mm_for_each_node(node, mm) {
763*fc8d29e2SArthur Grillo 		if (node->start < start || node->start + node->size > end) {
764*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test,
765*fc8d29e2SArthur Grillo 				   "node %d out of range, address [%llx + %llu], range [%llx, %llx]\n",
766*fc8d29e2SArthur Grillo 					   n, node->start, node->start + node->size, start, end);
767*fc8d29e2SArthur Grillo 			return false;
768*fc8d29e2SArthur Grillo 		}
769*fc8d29e2SArthur Grillo 
770*fc8d29e2SArthur Grillo 		if (node->start != n * size) {
771*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "node %d out of order, expected start %llx, found %llx\n",
772*fc8d29e2SArthur Grillo 				   n, n * size, node->start);
773*fc8d29e2SArthur Grillo 			return false;
774*fc8d29e2SArthur Grillo 		}
775*fc8d29e2SArthur Grillo 
776*fc8d29e2SArthur Grillo 		if (node->size != size) {
777*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "node %d has wrong size, expected size %llx, found %llx\n",
778*fc8d29e2SArthur Grillo 				   n, size, node->size);
779*fc8d29e2SArthur Grillo 			return false;
780*fc8d29e2SArthur Grillo 		}
781*fc8d29e2SArthur Grillo 
782*fc8d29e2SArthur Grillo 		if (drm_mm_hole_follows(node) && drm_mm_hole_node_end(node) < end) {
783*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "node %d is followed by a hole!\n", n);
784*fc8d29e2SArthur Grillo 			return false;
785*fc8d29e2SArthur Grillo 		}
786*fc8d29e2SArthur Grillo 
787*fc8d29e2SArthur Grillo 		n++;
788*fc8d29e2SArthur Grillo 	}
789*fc8d29e2SArthur Grillo 
790*fc8d29e2SArthur Grillo 	if (start > 0) {
791*fc8d29e2SArthur Grillo 		node = __drm_mm_interval_first(mm, 0, start - 1);
792*fc8d29e2SArthur Grillo 		if (drm_mm_node_allocated(node)) {
793*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "node before start: node=%llx+%llu, start=%llx\n",
794*fc8d29e2SArthur Grillo 				   node->start, node->size, start);
795*fc8d29e2SArthur Grillo 			return false;
796*fc8d29e2SArthur Grillo 		}
797*fc8d29e2SArthur Grillo 	}
798*fc8d29e2SArthur Grillo 
799*fc8d29e2SArthur Grillo 	if (end < U64_MAX) {
800*fc8d29e2SArthur Grillo 		node = __drm_mm_interval_first(mm, end, U64_MAX);
801*fc8d29e2SArthur Grillo 		if (drm_mm_node_allocated(node)) {
802*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "node after end: node=%llx+%llu, end=%llx\n",
803*fc8d29e2SArthur Grillo 				   node->start, node->size, end);
804*fc8d29e2SArthur Grillo 			return false;
805*fc8d29e2SArthur Grillo 		}
806*fc8d29e2SArthur Grillo 	}
807*fc8d29e2SArthur Grillo 
808*fc8d29e2SArthur Grillo 	return true;
809*fc8d29e2SArthur Grillo }
810*fc8d29e2SArthur Grillo 
811*fc8d29e2SArthur Grillo static int __igt_insert_range(struct kunit *test, unsigned int count, u64 size, u64 start, u64 end)
812*fc8d29e2SArthur Grillo {
813*fc8d29e2SArthur Grillo 	const struct insert_mode *mode;
814*fc8d29e2SArthur Grillo 	struct drm_mm mm;
815*fc8d29e2SArthur Grillo 	struct drm_mm_node *nodes, *node, *next;
816*fc8d29e2SArthur Grillo 	unsigned int n, start_n, end_n;
817*fc8d29e2SArthur Grillo 	int ret;
818*fc8d29e2SArthur Grillo 
819*fc8d29e2SArthur Grillo 	DRM_MM_BUG_ON(!count);
820*fc8d29e2SArthur Grillo 	DRM_MM_BUG_ON(!size);
821*fc8d29e2SArthur Grillo 	DRM_MM_BUG_ON(end <= start);
822*fc8d29e2SArthur Grillo 
823*fc8d29e2SArthur Grillo 	/* Very similar to __igt_insert(struct kunit *test, ), but now instead of populating the
824*fc8d29e2SArthur Grillo 	 * full range of the drm_mm, we try to fill a small portion of it.
825*fc8d29e2SArthur Grillo 	 */
826*fc8d29e2SArthur Grillo 
827*fc8d29e2SArthur Grillo 	ret = -ENOMEM;
828*fc8d29e2SArthur Grillo 	nodes = vzalloc(array_size(count, sizeof(*nodes)));
829*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_TRUE(test, nodes);
830*fc8d29e2SArthur Grillo 
831*fc8d29e2SArthur Grillo 	ret = -EINVAL;
832*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 0, count * size);
833*fc8d29e2SArthur Grillo 
834*fc8d29e2SArthur Grillo 	start_n = div64_u64(start + size - 1, size);
835*fc8d29e2SArthur Grillo 	end_n = div64_u64(end - size, size);
836*fc8d29e2SArthur Grillo 
837*fc8d29e2SArthur Grillo 	for (mode = insert_modes; mode->name; mode++) {
838*fc8d29e2SArthur Grillo 		for (n = start_n; n <= end_n; n++) {
839*fc8d29e2SArthur Grillo 			if (!expect_insert_in_range(test, &mm, &nodes[n], size, size, n,
840*fc8d29e2SArthur Grillo 						    start, end, mode)) {
841*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
842*fc8d29e2SArthur Grillo 					   "%s insert failed, size %llu, step %d [%d, %d], range [%llx, %llx]\n",
843*fc8d29e2SArthur Grillo 						   mode->name, size, n, start_n, end_n, start, end);
844*fc8d29e2SArthur Grillo 				goto out;
845*fc8d29e2SArthur Grillo 			}
846*fc8d29e2SArthur Grillo 		}
847*fc8d29e2SArthur Grillo 
848*fc8d29e2SArthur Grillo 		if (!assert_contiguous_in_range(test, &mm, size, start, end)) {
849*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test,
850*fc8d29e2SArthur Grillo 				   "%s: range [%llx, %llx] not full after initialisation, size=%llu\n",
851*fc8d29e2SArthur Grillo 				   mode->name, start, end, size);
852*fc8d29e2SArthur Grillo 			goto out;
853*fc8d29e2SArthur Grillo 		}
854*fc8d29e2SArthur Grillo 
855*fc8d29e2SArthur Grillo 		/* Remove one and reinsert, it should refill itself */
856*fc8d29e2SArthur Grillo 		for (n = start_n; n <= end_n; n++) {
857*fc8d29e2SArthur Grillo 			u64 addr = nodes[n].start;
858*fc8d29e2SArthur Grillo 
859*fc8d29e2SArthur Grillo 			drm_mm_remove_node(&nodes[n]);
860*fc8d29e2SArthur Grillo 			if (!expect_insert_in_range(test, &mm, &nodes[n], size, size, n,
861*fc8d29e2SArthur Grillo 						    start, end, mode)) {
862*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "%s reinsert failed, step %d\n", mode->name, n);
863*fc8d29e2SArthur Grillo 				goto out;
864*fc8d29e2SArthur Grillo 			}
865*fc8d29e2SArthur Grillo 
866*fc8d29e2SArthur Grillo 			if (nodes[n].start != addr) {
867*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
868*fc8d29e2SArthur Grillo 					   "%s reinsert node moved, step %d, expected %llx, found %llx\n",
869*fc8d29e2SArthur Grillo 					   mode->name, n, addr, nodes[n].start);
870*fc8d29e2SArthur Grillo 				goto out;
871*fc8d29e2SArthur Grillo 			}
872*fc8d29e2SArthur Grillo 		}
873*fc8d29e2SArthur Grillo 
874*fc8d29e2SArthur Grillo 		if (!assert_contiguous_in_range(test, &mm, size, start, end)) {
875*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test,
876*fc8d29e2SArthur Grillo 				   "%s: range [%llx, %llx] not full after reinsertion, size=%llu\n",
877*fc8d29e2SArthur Grillo 				   mode->name, start, end, size);
878*fc8d29e2SArthur Grillo 			goto out;
879*fc8d29e2SArthur Grillo 		}
880*fc8d29e2SArthur Grillo 
881*fc8d29e2SArthur Grillo 		drm_mm_for_each_node_safe(node, next, &mm)
882*fc8d29e2SArthur Grillo 			drm_mm_remove_node(node);
883*fc8d29e2SArthur Grillo 		DRM_MM_BUG_ON(!drm_mm_clean(&mm));
884*fc8d29e2SArthur Grillo 
885*fc8d29e2SArthur Grillo 		cond_resched();
886*fc8d29e2SArthur Grillo 	}
887*fc8d29e2SArthur Grillo 
888*fc8d29e2SArthur Grillo 	ret = 0;
889*fc8d29e2SArthur Grillo out:
890*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm)
891*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
892*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
893*fc8d29e2SArthur Grillo 	vfree(nodes);
894*fc8d29e2SArthur Grillo 	return ret;
895*fc8d29e2SArthur Grillo }
896*fc8d29e2SArthur Grillo 
897*fc8d29e2SArthur Grillo static int insert_outside_range(struct kunit *test)
898*fc8d29e2SArthur Grillo {
899*fc8d29e2SArthur Grillo 	struct drm_mm mm;
900*fc8d29e2SArthur Grillo 	const unsigned int start = 1024;
901*fc8d29e2SArthur Grillo 	const unsigned int end = 2048;
902*fc8d29e2SArthur Grillo 	const unsigned int size = end - start;
903*fc8d29e2SArthur Grillo 
904*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, start, size);
905*fc8d29e2SArthur Grillo 
906*fc8d29e2SArthur Grillo 	if (!expect_insert_in_range_fail(test, &mm, 1, 0, start))
907*fc8d29e2SArthur Grillo 		return -EINVAL;
908*fc8d29e2SArthur Grillo 
909*fc8d29e2SArthur Grillo 	if (!expect_insert_in_range_fail(test, &mm, size,
910*fc8d29e2SArthur Grillo 					 start - size / 2, start + (size + 1) / 2))
911*fc8d29e2SArthur Grillo 		return -EINVAL;
912*fc8d29e2SArthur Grillo 
913*fc8d29e2SArthur Grillo 	if (!expect_insert_in_range_fail(test, &mm, size,
914*fc8d29e2SArthur Grillo 					 end - (size + 1) / 2, end + size / 2))
915*fc8d29e2SArthur Grillo 		return -EINVAL;
916*fc8d29e2SArthur Grillo 
917*fc8d29e2SArthur Grillo 	if (!expect_insert_in_range_fail(test, &mm, 1, end, end + size))
918*fc8d29e2SArthur Grillo 		return -EINVAL;
919*fc8d29e2SArthur Grillo 
920*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
921*fc8d29e2SArthur Grillo 	return 0;
922*fc8d29e2SArthur Grillo }
923*fc8d29e2SArthur Grillo 
924*fc8d29e2SArthur Grillo static void igt_mm_insert_range(struct kunit *test)
925*fc8d29e2SArthur Grillo {
926*fc8d29e2SArthur Grillo 	const unsigned int count = min_t(unsigned int, BIT(13), max_iterations);
927*fc8d29e2SArthur Grillo 	unsigned int n;
928*fc8d29e2SArthur Grillo 
929*fc8d29e2SArthur Grillo 	/* Check that requests outside the bounds of drm_mm are rejected. */
930*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_FALSE(test, insert_outside_range(test));
931*fc8d29e2SArthur Grillo 
932*fc8d29e2SArthur Grillo 	for_each_prime_number_from(n, 1, 50) {
933*fc8d29e2SArthur Grillo 		const u64 size = BIT_ULL(n);
934*fc8d29e2SArthur Grillo 		const u64 max = count * size;
935*fc8d29e2SArthur Grillo 
936*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert_range(test, count, size, 0, max));
937*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert_range(test, count, size, 1, max));
938*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert_range(test, count, size, 0, max - 1));
939*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert_range(test, count, size, 0, max / 2));
940*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert_range(test, count, size, max / 2, max / 2));
941*fc8d29e2SArthur Grillo 		KUNIT_ASSERT_FALSE(test, __igt_insert_range(test, count, size,
942*fc8d29e2SArthur Grillo 							    max / 4 + 1, 3 * max / 4 - 1));
943*fc8d29e2SArthur Grillo 
944*fc8d29e2SArthur Grillo 		cond_resched();
945*fc8d29e2SArthur Grillo 	}
946*fc8d29e2SArthur Grillo }
947*fc8d29e2SArthur Grillo 
948*fc8d29e2SArthur Grillo static int prepare_igt_frag(struct kunit *test, struct drm_mm *mm,
949*fc8d29e2SArthur Grillo 			    struct drm_mm_node *nodes, unsigned int num_insert,
950*fc8d29e2SArthur Grillo 			    const struct insert_mode *mode)
951*fc8d29e2SArthur Grillo {
952*fc8d29e2SArthur Grillo 	unsigned int size = 4096;
953*fc8d29e2SArthur Grillo 	unsigned int i;
954*fc8d29e2SArthur Grillo 
955*fc8d29e2SArthur Grillo 	for (i = 0; i < num_insert; i++) {
956*fc8d29e2SArthur Grillo 		if (!expect_insert(test, mm, &nodes[i], size, 0, i, mode) != 0) {
957*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "%s insert failed\n", mode->name);
958*fc8d29e2SArthur Grillo 			return -EINVAL;
959*fc8d29e2SArthur Grillo 		}
960*fc8d29e2SArthur Grillo 	}
961*fc8d29e2SArthur Grillo 
962*fc8d29e2SArthur Grillo 	/* introduce fragmentation by freeing every other node */
963*fc8d29e2SArthur Grillo 	for (i = 0; i < num_insert; i++) {
964*fc8d29e2SArthur Grillo 		if (i % 2 == 0)
965*fc8d29e2SArthur Grillo 			drm_mm_remove_node(&nodes[i]);
966*fc8d29e2SArthur Grillo 	}
967*fc8d29e2SArthur Grillo 
968*fc8d29e2SArthur Grillo 	return 0;
969*fc8d29e2SArthur Grillo }
970*fc8d29e2SArthur Grillo 
971*fc8d29e2SArthur Grillo static u64 get_insert_time(struct kunit *test, struct drm_mm *mm,
972*fc8d29e2SArthur Grillo 			   unsigned int num_insert, struct drm_mm_node *nodes,
973*fc8d29e2SArthur Grillo 			   const struct insert_mode *mode)
974*fc8d29e2SArthur Grillo {
975*fc8d29e2SArthur Grillo 	unsigned int size = 8192;
976*fc8d29e2SArthur Grillo 	ktime_t start;
977*fc8d29e2SArthur Grillo 	unsigned int i;
978*fc8d29e2SArthur Grillo 
979*fc8d29e2SArthur Grillo 	start = ktime_get();
980*fc8d29e2SArthur Grillo 	for (i = 0; i < num_insert; i++) {
981*fc8d29e2SArthur Grillo 		if (!expect_insert(test, mm, &nodes[i], size, 0, i, mode) != 0) {
982*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "%s insert failed\n", mode->name);
983*fc8d29e2SArthur Grillo 			return 0;
984*fc8d29e2SArthur Grillo 		}
985*fc8d29e2SArthur Grillo 	}
986*fc8d29e2SArthur Grillo 
987*fc8d29e2SArthur Grillo 	return ktime_to_ns(ktime_sub(ktime_get(), start));
988*fc8d29e2SArthur Grillo }
989*fc8d29e2SArthur Grillo 
990*fc8d29e2SArthur Grillo static void igt_mm_frag(struct kunit *test)
991*fc8d29e2SArthur Grillo {
992*fc8d29e2SArthur Grillo 	struct drm_mm mm;
993*fc8d29e2SArthur Grillo 	const struct insert_mode *mode;
994*fc8d29e2SArthur Grillo 	struct drm_mm_node *nodes, *node, *next;
995*fc8d29e2SArthur Grillo 	unsigned int insert_size = 10000;
996*fc8d29e2SArthur Grillo 	unsigned int scale_factor = 4;
997*fc8d29e2SArthur Grillo 
998*fc8d29e2SArthur Grillo 	/* We need 4 * insert_size nodes to hold intermediate allocated
999*fc8d29e2SArthur Grillo 	 * drm_mm nodes.
1000*fc8d29e2SArthur Grillo 	 * 1 times for prepare_igt_frag(struct kunit *test, )
1001*fc8d29e2SArthur Grillo 	 * 1 times for get_insert_time(struct kunit *test, )
1002*fc8d29e2SArthur Grillo 	 * 2 times for get_insert_time(struct kunit *test, )
1003*fc8d29e2SArthur Grillo 	 */
1004*fc8d29e2SArthur Grillo 	nodes = vzalloc(array_size(insert_size * 4, sizeof(*nodes)));
1005*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_TRUE(test, nodes);
1006*fc8d29e2SArthur Grillo 
1007*fc8d29e2SArthur Grillo 	/* For BOTTOMUP and TOPDOWN, we first fragment the
1008*fc8d29e2SArthur Grillo 	 * address space using prepare_igt_frag(struct kunit *test, ) and then try to verify
1009*fc8d29e2SArthur Grillo 	 * that insertions scale quadratically from 10k to 20k insertions
1010*fc8d29e2SArthur Grillo 	 */
1011*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 1, U64_MAX - 2);
1012*fc8d29e2SArthur Grillo 	for (mode = insert_modes; mode->name; mode++) {
1013*fc8d29e2SArthur Grillo 		u64 insert_time1, insert_time2;
1014*fc8d29e2SArthur Grillo 
1015*fc8d29e2SArthur Grillo 		if (mode->mode != DRM_MM_INSERT_LOW &&
1016*fc8d29e2SArthur Grillo 		    mode->mode != DRM_MM_INSERT_HIGH)
1017*fc8d29e2SArthur Grillo 			continue;
1018*fc8d29e2SArthur Grillo 
1019*fc8d29e2SArthur Grillo 		if (prepare_igt_frag(test, &mm, nodes, insert_size, mode))
1020*fc8d29e2SArthur Grillo 			goto err;
1021*fc8d29e2SArthur Grillo 
1022*fc8d29e2SArthur Grillo 		insert_time1 = get_insert_time(test, &mm, insert_size,
1023*fc8d29e2SArthur Grillo 					       nodes + insert_size, mode);
1024*fc8d29e2SArthur Grillo 		if (insert_time1 == 0)
1025*fc8d29e2SArthur Grillo 			goto err;
1026*fc8d29e2SArthur Grillo 
1027*fc8d29e2SArthur Grillo 		insert_time2 = get_insert_time(test, &mm, (insert_size * 2),
1028*fc8d29e2SArthur Grillo 					       nodes + insert_size * 2, mode);
1029*fc8d29e2SArthur Grillo 		if (insert_time2 == 0)
1030*fc8d29e2SArthur Grillo 			goto err;
1031*fc8d29e2SArthur Grillo 
1032*fc8d29e2SArthur Grillo 		kunit_info(test, "%s fragmented insert of %u and %u insertions took %llu and %llu nsecs\n",
1033*fc8d29e2SArthur Grillo 			   mode->name, insert_size, insert_size * 2, insert_time1, insert_time2);
1034*fc8d29e2SArthur Grillo 
1035*fc8d29e2SArthur Grillo 		if (insert_time2 > (scale_factor * insert_time1)) {
1036*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "%s fragmented insert took %llu nsecs more\n",
1037*fc8d29e2SArthur Grillo 				   mode->name, insert_time2 - (scale_factor * insert_time1));
1038*fc8d29e2SArthur Grillo 			goto err;
1039*fc8d29e2SArthur Grillo 		}
1040*fc8d29e2SArthur Grillo 
1041*fc8d29e2SArthur Grillo 		drm_mm_for_each_node_safe(node, next, &mm)
1042*fc8d29e2SArthur Grillo 			drm_mm_remove_node(node);
1043*fc8d29e2SArthur Grillo 	}
1044*fc8d29e2SArthur Grillo 
1045*fc8d29e2SArthur Grillo err:
1046*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm)
1047*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
1048*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
1049*fc8d29e2SArthur Grillo 	vfree(nodes);
1050*fc8d29e2SArthur Grillo }
1051*fc8d29e2SArthur Grillo 
1052*fc8d29e2SArthur Grillo static void igt_mm_align(struct kunit *test)
1053*fc8d29e2SArthur Grillo {
1054*fc8d29e2SArthur Grillo 	const struct insert_mode *mode;
1055*fc8d29e2SArthur Grillo 	const unsigned int max_count = min(8192u, max_prime);
1056*fc8d29e2SArthur Grillo 	struct drm_mm mm;
1057*fc8d29e2SArthur Grillo 	struct drm_mm_node *nodes, *node, *next;
1058*fc8d29e2SArthur Grillo 	unsigned int prime;
1059*fc8d29e2SArthur Grillo 
1060*fc8d29e2SArthur Grillo 	/* For each of the possible insertion modes, we pick a few
1061*fc8d29e2SArthur Grillo 	 * arbitrary alignments and check that the inserted node
1062*fc8d29e2SArthur Grillo 	 * meets our requirements.
1063*fc8d29e2SArthur Grillo 	 */
1064*fc8d29e2SArthur Grillo 
1065*fc8d29e2SArthur Grillo 	nodes = vzalloc(array_size(max_count, sizeof(*nodes)));
1066*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_TRUE(test, nodes);
1067*fc8d29e2SArthur Grillo 
1068*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 1, U64_MAX - 2);
1069*fc8d29e2SArthur Grillo 
1070*fc8d29e2SArthur Grillo 	for (mode = insert_modes; mode->name; mode++) {
1071*fc8d29e2SArthur Grillo 		unsigned int i = 0;
1072*fc8d29e2SArthur Grillo 
1073*fc8d29e2SArthur Grillo 		for_each_prime_number_from(prime, 1, max_count) {
1074*fc8d29e2SArthur Grillo 			u64 size = next_prime_number(prime);
1075*fc8d29e2SArthur Grillo 
1076*fc8d29e2SArthur Grillo 			if (!expect_insert(test, &mm, &nodes[i], size, prime, i, mode)) {
1077*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "%s insert failed with alignment=%d",
1078*fc8d29e2SArthur Grillo 					   mode->name, prime);
1079*fc8d29e2SArthur Grillo 				goto out;
1080*fc8d29e2SArthur Grillo 			}
1081*fc8d29e2SArthur Grillo 
1082*fc8d29e2SArthur Grillo 			i++;
1083*fc8d29e2SArthur Grillo 		}
1084*fc8d29e2SArthur Grillo 
1085*fc8d29e2SArthur Grillo 		drm_mm_for_each_node_safe(node, next, &mm)
1086*fc8d29e2SArthur Grillo 			drm_mm_remove_node(node);
1087*fc8d29e2SArthur Grillo 		DRM_MM_BUG_ON(!drm_mm_clean(&mm));
1088*fc8d29e2SArthur Grillo 
1089*fc8d29e2SArthur Grillo 		cond_resched();
1090*fc8d29e2SArthur Grillo 	}
1091*fc8d29e2SArthur Grillo 
1092*fc8d29e2SArthur Grillo out:
1093*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm)
1094*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
1095*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
1096*fc8d29e2SArthur Grillo 	vfree(nodes);
1097*fc8d29e2SArthur Grillo }
1098*fc8d29e2SArthur Grillo 
1099*fc8d29e2SArthur Grillo static void igt_align_pot(struct kunit *test, int max)
1100*fc8d29e2SArthur Grillo {
1101*fc8d29e2SArthur Grillo 	struct drm_mm mm;
1102*fc8d29e2SArthur Grillo 	struct drm_mm_node *node, *next;
1103*fc8d29e2SArthur Grillo 	int bit;
1104*fc8d29e2SArthur Grillo 
1105*fc8d29e2SArthur Grillo 	/* Check that we can align to the full u64 address space */
1106*fc8d29e2SArthur Grillo 
1107*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 1, U64_MAX - 2);
1108*fc8d29e2SArthur Grillo 
1109*fc8d29e2SArthur Grillo 	for (bit = max - 1; bit; bit--) {
1110*fc8d29e2SArthur Grillo 		u64 align, size;
1111*fc8d29e2SArthur Grillo 
1112*fc8d29e2SArthur Grillo 		node = kzalloc(sizeof(*node), GFP_KERNEL);
1113*fc8d29e2SArthur Grillo 		if (!node) {
1114*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "failed to allocate node");
1115*fc8d29e2SArthur Grillo 			goto out;
1116*fc8d29e2SArthur Grillo 		}
1117*fc8d29e2SArthur Grillo 
1118*fc8d29e2SArthur Grillo 		align = BIT_ULL(bit);
1119*fc8d29e2SArthur Grillo 		size = BIT_ULL(bit - 1) + 1;
1120*fc8d29e2SArthur Grillo 		if (!expect_insert(test, &mm, node, size, align, bit, &insert_modes[0])) {
1121*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "insert failed with alignment=%llx [%d]", align, bit);
1122*fc8d29e2SArthur Grillo 			goto out;
1123*fc8d29e2SArthur Grillo 		}
1124*fc8d29e2SArthur Grillo 
1125*fc8d29e2SArthur Grillo 		cond_resched();
1126*fc8d29e2SArthur Grillo 	}
1127*fc8d29e2SArthur Grillo 
1128*fc8d29e2SArthur Grillo out:
1129*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm) {
1130*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
1131*fc8d29e2SArthur Grillo 		kfree(node);
1132*fc8d29e2SArthur Grillo 	}
1133*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
1134*fc8d29e2SArthur Grillo }
1135*fc8d29e2SArthur Grillo 
1136*fc8d29e2SArthur Grillo static void igt_mm_align32(struct kunit *test)
1137*fc8d29e2SArthur Grillo {
1138*fc8d29e2SArthur Grillo 	igt_align_pot(test, 32);
1139*fc8d29e2SArthur Grillo }
1140*fc8d29e2SArthur Grillo 
1141*fc8d29e2SArthur Grillo static void igt_mm_align64(struct kunit *test)
1142*fc8d29e2SArthur Grillo {
1143*fc8d29e2SArthur Grillo 	igt_align_pot(test, 64);
1144*fc8d29e2SArthur Grillo }
1145*fc8d29e2SArthur Grillo 
1146*fc8d29e2SArthur Grillo static void show_scan(struct kunit *test, const struct drm_mm_scan *scan)
1147*fc8d29e2SArthur Grillo {
1148*fc8d29e2SArthur Grillo 	kunit_info(test, "scan: hit [%llx, %llx], size=%lld, align=%lld, color=%ld\n",
1149*fc8d29e2SArthur Grillo 		   scan->hit_start, scan->hit_end, scan->size, scan->alignment, scan->color);
1150*fc8d29e2SArthur Grillo }
1151*fc8d29e2SArthur Grillo 
1152*fc8d29e2SArthur Grillo static void show_holes(struct kunit *test, const struct drm_mm *mm, int count)
1153*fc8d29e2SArthur Grillo {
1154*fc8d29e2SArthur Grillo 	u64 hole_start, hole_end;
1155*fc8d29e2SArthur Grillo 	struct drm_mm_node *hole;
1156*fc8d29e2SArthur Grillo 
1157*fc8d29e2SArthur Grillo 	drm_mm_for_each_hole(hole, mm, hole_start, hole_end) {
1158*fc8d29e2SArthur Grillo 		struct drm_mm_node *next = list_next_entry(hole, node_list);
1159*fc8d29e2SArthur Grillo 		const char *node1 = NULL, *node2 = NULL;
1160*fc8d29e2SArthur Grillo 
1161*fc8d29e2SArthur Grillo 		if (drm_mm_node_allocated(hole))
1162*fc8d29e2SArthur Grillo 			node1 = kasprintf(GFP_KERNEL, "[%llx + %lld, color=%ld], ",
1163*fc8d29e2SArthur Grillo 					  hole->start, hole->size, hole->color);
1164*fc8d29e2SArthur Grillo 
1165*fc8d29e2SArthur Grillo 		if (drm_mm_node_allocated(next))
1166*fc8d29e2SArthur Grillo 			node2 = kasprintf(GFP_KERNEL, ", [%llx + %lld, color=%ld]",
1167*fc8d29e2SArthur Grillo 					  next->start, next->size, next->color);
1168*fc8d29e2SArthur Grillo 
1169*fc8d29e2SArthur Grillo 		kunit_info(test, "%sHole [%llx - %llx, size %lld]%s\n", node1,
1170*fc8d29e2SArthur Grillo 			   hole_start, hole_end, hole_end - hole_start, node2);
1171*fc8d29e2SArthur Grillo 
1172*fc8d29e2SArthur Grillo 		kfree(node2);
1173*fc8d29e2SArthur Grillo 		kfree(node1);
1174*fc8d29e2SArthur Grillo 
1175*fc8d29e2SArthur Grillo 		if (!--count)
1176*fc8d29e2SArthur Grillo 			break;
1177*fc8d29e2SArthur Grillo 	}
1178*fc8d29e2SArthur Grillo }
1179*fc8d29e2SArthur Grillo 
1180*fc8d29e2SArthur Grillo struct evict_node {
1181*fc8d29e2SArthur Grillo 	struct drm_mm_node node;
1182*fc8d29e2SArthur Grillo 	struct list_head link;
1183*fc8d29e2SArthur Grillo };
1184*fc8d29e2SArthur Grillo 
1185*fc8d29e2SArthur Grillo static bool evict_nodes(struct kunit *test, struct drm_mm_scan *scan,
1186*fc8d29e2SArthur Grillo 			struct evict_node *nodes, unsigned int *order, unsigned int count,
1187*fc8d29e2SArthur Grillo 			bool use_color, struct list_head *evict_list)
1188*fc8d29e2SArthur Grillo {
1189*fc8d29e2SArthur Grillo 	struct evict_node *e, *en;
1190*fc8d29e2SArthur Grillo 	unsigned int i;
1191*fc8d29e2SArthur Grillo 
1192*fc8d29e2SArthur Grillo 	for (i = 0; i < count; i++) {
1193*fc8d29e2SArthur Grillo 		e = &nodes[order ? order[i] : i];
1194*fc8d29e2SArthur Grillo 		list_add(&e->link, evict_list);
1195*fc8d29e2SArthur Grillo 		if (drm_mm_scan_add_block(scan, &e->node))
1196*fc8d29e2SArthur Grillo 			break;
1197*fc8d29e2SArthur Grillo 	}
1198*fc8d29e2SArthur Grillo 	list_for_each_entry_safe(e, en, evict_list, link) {
1199*fc8d29e2SArthur Grillo 		if (!drm_mm_scan_remove_block(scan, &e->node))
1200*fc8d29e2SArthur Grillo 			list_del(&e->link);
1201*fc8d29e2SArthur Grillo 	}
1202*fc8d29e2SArthur Grillo 	if (list_empty(evict_list)) {
1203*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
1204*fc8d29e2SArthur Grillo 			   "Failed to find eviction: size=%lld [avail=%d], align=%lld (color=%lu)\n",
1205*fc8d29e2SArthur Grillo 			   scan->size, count, scan->alignment, scan->color);
1206*fc8d29e2SArthur Grillo 		return false;
1207*fc8d29e2SArthur Grillo 	}
1208*fc8d29e2SArthur Grillo 
1209*fc8d29e2SArthur Grillo 	list_for_each_entry(e, evict_list, link)
1210*fc8d29e2SArthur Grillo 		drm_mm_remove_node(&e->node);
1211*fc8d29e2SArthur Grillo 
1212*fc8d29e2SArthur Grillo 	if (use_color) {
1213*fc8d29e2SArthur Grillo 		struct drm_mm_node *node;
1214*fc8d29e2SArthur Grillo 
1215*fc8d29e2SArthur Grillo 		while ((node = drm_mm_scan_color_evict(scan))) {
1216*fc8d29e2SArthur Grillo 			e = container_of(node, typeof(*e), node);
1217*fc8d29e2SArthur Grillo 			drm_mm_remove_node(&e->node);
1218*fc8d29e2SArthur Grillo 			list_add(&e->link, evict_list);
1219*fc8d29e2SArthur Grillo 		}
1220*fc8d29e2SArthur Grillo 	} else {
1221*fc8d29e2SArthur Grillo 		if (drm_mm_scan_color_evict(scan)) {
1222*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test,
1223*fc8d29e2SArthur Grillo 				   "drm_mm_scan_color_evict unexpectedly reported overlapping nodes!\n");
1224*fc8d29e2SArthur Grillo 			return false;
1225*fc8d29e2SArthur Grillo 		}
1226*fc8d29e2SArthur Grillo 	}
1227*fc8d29e2SArthur Grillo 
1228*fc8d29e2SArthur Grillo 	return true;
1229*fc8d29e2SArthur Grillo }
1230*fc8d29e2SArthur Grillo 
1231*fc8d29e2SArthur Grillo static bool evict_nothing(struct kunit *test, struct drm_mm *mm,
1232*fc8d29e2SArthur Grillo 			  unsigned int total_size, struct evict_node *nodes)
1233*fc8d29e2SArthur Grillo {
1234*fc8d29e2SArthur Grillo 	struct drm_mm_scan scan;
1235*fc8d29e2SArthur Grillo 	LIST_HEAD(evict_list);
1236*fc8d29e2SArthur Grillo 	struct evict_node *e;
1237*fc8d29e2SArthur Grillo 	struct drm_mm_node *node;
1238*fc8d29e2SArthur Grillo 	unsigned int n;
1239*fc8d29e2SArthur Grillo 
1240*fc8d29e2SArthur Grillo 	drm_mm_scan_init(&scan, mm, 1, 0, 0, 0);
1241*fc8d29e2SArthur Grillo 	for (n = 0; n < total_size; n++) {
1242*fc8d29e2SArthur Grillo 		e = &nodes[n];
1243*fc8d29e2SArthur Grillo 		list_add(&e->link, &evict_list);
1244*fc8d29e2SArthur Grillo 		drm_mm_scan_add_block(&scan, &e->node);
1245*fc8d29e2SArthur Grillo 	}
1246*fc8d29e2SArthur Grillo 	list_for_each_entry(e, &evict_list, link)
1247*fc8d29e2SArthur Grillo 		drm_mm_scan_remove_block(&scan, &e->node);
1248*fc8d29e2SArthur Grillo 
1249*fc8d29e2SArthur Grillo 	for (n = 0; n < total_size; n++) {
1250*fc8d29e2SArthur Grillo 		e = &nodes[n];
1251*fc8d29e2SArthur Grillo 
1252*fc8d29e2SArthur Grillo 		if (!drm_mm_node_allocated(&e->node)) {
1253*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "node[%d] no longer allocated!\n", n);
1254*fc8d29e2SArthur Grillo 			return false;
1255*fc8d29e2SArthur Grillo 		}
1256*fc8d29e2SArthur Grillo 
1257*fc8d29e2SArthur Grillo 		e->link.next = NULL;
1258*fc8d29e2SArthur Grillo 	}
1259*fc8d29e2SArthur Grillo 
1260*fc8d29e2SArthur Grillo 	drm_mm_for_each_node(node, mm) {
1261*fc8d29e2SArthur Grillo 		e = container_of(node, typeof(*e), node);
1262*fc8d29e2SArthur Grillo 		e->link.next = &e->link;
1263*fc8d29e2SArthur Grillo 	}
1264*fc8d29e2SArthur Grillo 
1265*fc8d29e2SArthur Grillo 	for (n = 0; n < total_size; n++) {
1266*fc8d29e2SArthur Grillo 		e = &nodes[n];
1267*fc8d29e2SArthur Grillo 
1268*fc8d29e2SArthur Grillo 		if (!e->link.next) {
1269*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "node[%d] no longer connected!\n", n);
1270*fc8d29e2SArthur Grillo 			return false;
1271*fc8d29e2SArthur Grillo 		}
1272*fc8d29e2SArthur Grillo 	}
1273*fc8d29e2SArthur Grillo 
1274*fc8d29e2SArthur Grillo 	return assert_continuous(test, mm, nodes[0].node.size);
1275*fc8d29e2SArthur Grillo }
1276*fc8d29e2SArthur Grillo 
1277*fc8d29e2SArthur Grillo static bool evict_everything(struct kunit *test, struct drm_mm *mm,
1278*fc8d29e2SArthur Grillo 			     unsigned int total_size, struct evict_node *nodes)
1279*fc8d29e2SArthur Grillo {
1280*fc8d29e2SArthur Grillo 	struct drm_mm_scan scan;
1281*fc8d29e2SArthur Grillo 	LIST_HEAD(evict_list);
1282*fc8d29e2SArthur Grillo 	struct evict_node *e;
1283*fc8d29e2SArthur Grillo 	unsigned int n;
1284*fc8d29e2SArthur Grillo 	int err;
1285*fc8d29e2SArthur Grillo 
1286*fc8d29e2SArthur Grillo 	drm_mm_scan_init(&scan, mm, total_size, 0, 0, 0);
1287*fc8d29e2SArthur Grillo 	for (n = 0; n < total_size; n++) {
1288*fc8d29e2SArthur Grillo 		e = &nodes[n];
1289*fc8d29e2SArthur Grillo 		list_add(&e->link, &evict_list);
1290*fc8d29e2SArthur Grillo 		if (drm_mm_scan_add_block(&scan, &e->node))
1291*fc8d29e2SArthur Grillo 			break;
1292*fc8d29e2SArthur Grillo 	}
1293*fc8d29e2SArthur Grillo 
1294*fc8d29e2SArthur Grillo 	err = 0;
1295*fc8d29e2SArthur Grillo 	list_for_each_entry(e, &evict_list, link) {
1296*fc8d29e2SArthur Grillo 		if (!drm_mm_scan_remove_block(&scan, &e->node)) {
1297*fc8d29e2SArthur Grillo 			if (!err) {
1298*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "Node %lld not marked for eviction!\n",
1299*fc8d29e2SArthur Grillo 					   e->node.start);
1300*fc8d29e2SArthur Grillo 				err = -EINVAL;
1301*fc8d29e2SArthur Grillo 			}
1302*fc8d29e2SArthur Grillo 		}
1303*fc8d29e2SArthur Grillo 	}
1304*fc8d29e2SArthur Grillo 	if (err)
1305*fc8d29e2SArthur Grillo 		return false;
1306*fc8d29e2SArthur Grillo 
1307*fc8d29e2SArthur Grillo 	list_for_each_entry(e, &evict_list, link)
1308*fc8d29e2SArthur Grillo 		drm_mm_remove_node(&e->node);
1309*fc8d29e2SArthur Grillo 
1310*fc8d29e2SArthur Grillo 	if (!assert_one_hole(test, mm, 0, total_size))
1311*fc8d29e2SArthur Grillo 		return false;
1312*fc8d29e2SArthur Grillo 
1313*fc8d29e2SArthur Grillo 	list_for_each_entry(e, &evict_list, link) {
1314*fc8d29e2SArthur Grillo 		err = drm_mm_reserve_node(mm, &e->node);
1315*fc8d29e2SArthur Grillo 		if (err) {
1316*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "Failed to reinsert node after eviction: start=%llx\n",
1317*fc8d29e2SArthur Grillo 				   e->node.start);
1318*fc8d29e2SArthur Grillo 			return false;
1319*fc8d29e2SArthur Grillo 		}
1320*fc8d29e2SArthur Grillo 	}
1321*fc8d29e2SArthur Grillo 
1322*fc8d29e2SArthur Grillo 	return assert_continuous(test, mm, nodes[0].node.size);
1323*fc8d29e2SArthur Grillo }
1324*fc8d29e2SArthur Grillo 
1325*fc8d29e2SArthur Grillo static int evict_something(struct kunit *test, struct drm_mm *mm,
1326*fc8d29e2SArthur Grillo 			   u64 range_start, u64 range_end, struct evict_node *nodes,
1327*fc8d29e2SArthur Grillo 			   unsigned int *order, unsigned int count, unsigned int size,
1328*fc8d29e2SArthur Grillo 			   unsigned int alignment, const struct insert_mode *mode)
1329*fc8d29e2SArthur Grillo {
1330*fc8d29e2SArthur Grillo 	struct drm_mm_scan scan;
1331*fc8d29e2SArthur Grillo 	LIST_HEAD(evict_list);
1332*fc8d29e2SArthur Grillo 	struct evict_node *e;
1333*fc8d29e2SArthur Grillo 	struct drm_mm_node tmp;
1334*fc8d29e2SArthur Grillo 	int err;
1335*fc8d29e2SArthur Grillo 
1336*fc8d29e2SArthur Grillo 	drm_mm_scan_init_with_range(&scan, mm, size, alignment, 0, range_start,
1337*fc8d29e2SArthur Grillo 				    range_end, mode->mode);
1338*fc8d29e2SArthur Grillo 	if (!evict_nodes(test, &scan, nodes, order, count, false, &evict_list))
1339*fc8d29e2SArthur Grillo 		return -EINVAL;
1340*fc8d29e2SArthur Grillo 
1341*fc8d29e2SArthur Grillo 	memset(&tmp, 0, sizeof(tmp));
1342*fc8d29e2SArthur Grillo 	err = drm_mm_insert_node_generic(mm, &tmp, size, alignment, 0,
1343*fc8d29e2SArthur Grillo 					 DRM_MM_INSERT_EVICT);
1344*fc8d29e2SArthur Grillo 	if (err) {
1345*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "Failed to insert into eviction hole: size=%d, align=%d\n",
1346*fc8d29e2SArthur Grillo 			   size, alignment);
1347*fc8d29e2SArthur Grillo 		show_scan(test, &scan);
1348*fc8d29e2SArthur Grillo 		show_holes(test, mm, 3);
1349*fc8d29e2SArthur Grillo 		return err;
1350*fc8d29e2SArthur Grillo 	}
1351*fc8d29e2SArthur Grillo 
1352*fc8d29e2SArthur Grillo 	if (tmp.start < range_start || tmp.start + tmp.size > range_end) {
1353*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
1354*fc8d29e2SArthur Grillo 			   "Inserted [address=%llu + %llu] did not fit into the request range [%llu, %llu]\n",
1355*fc8d29e2SArthur Grillo 			   tmp.start, tmp.size, range_start, range_end);
1356*fc8d29e2SArthur Grillo 		err = -EINVAL;
1357*fc8d29e2SArthur Grillo 	}
1358*fc8d29e2SArthur Grillo 
1359*fc8d29e2SArthur Grillo 	if (!assert_node(test, &tmp, mm, size, alignment, 0) ||
1360*fc8d29e2SArthur Grillo 	    drm_mm_hole_follows(&tmp)) {
1361*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
1362*fc8d29e2SArthur Grillo 			   "Inserted did not fill the eviction hole: size=%lld [%d], align=%d [rem=%lld], start=%llx, hole-follows?=%d\n",
1363*fc8d29e2SArthur Grillo 			   tmp.size, size, alignment, misalignment(&tmp, alignment),
1364*fc8d29e2SArthur Grillo 			   tmp.start, drm_mm_hole_follows(&tmp));
1365*fc8d29e2SArthur Grillo 		err = -EINVAL;
1366*fc8d29e2SArthur Grillo 	}
1367*fc8d29e2SArthur Grillo 
1368*fc8d29e2SArthur Grillo 	drm_mm_remove_node(&tmp);
1369*fc8d29e2SArthur Grillo 	if (err)
1370*fc8d29e2SArthur Grillo 		return err;
1371*fc8d29e2SArthur Grillo 
1372*fc8d29e2SArthur Grillo 	list_for_each_entry(e, &evict_list, link) {
1373*fc8d29e2SArthur Grillo 		err = drm_mm_reserve_node(mm, &e->node);
1374*fc8d29e2SArthur Grillo 		if (err) {
1375*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "Failed to reinsert node after eviction: start=%llx\n",
1376*fc8d29e2SArthur Grillo 				   e->node.start);
1377*fc8d29e2SArthur Grillo 			return err;
1378*fc8d29e2SArthur Grillo 		}
1379*fc8d29e2SArthur Grillo 	}
1380*fc8d29e2SArthur Grillo 
1381*fc8d29e2SArthur Grillo 	if (!assert_continuous(test, mm, nodes[0].node.size)) {
1382*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "range is no longer continuous\n");
1383*fc8d29e2SArthur Grillo 		return -EINVAL;
1384*fc8d29e2SArthur Grillo 	}
1385*fc8d29e2SArthur Grillo 
1386*fc8d29e2SArthur Grillo 	return 0;
1387*fc8d29e2SArthur Grillo }
1388*fc8d29e2SArthur Grillo 
1389*fc8d29e2SArthur Grillo static void igt_mm_evict(struct kunit *test)
1390*fc8d29e2SArthur Grillo {
1391*fc8d29e2SArthur Grillo 	DRM_RND_STATE(prng, random_seed);
1392*fc8d29e2SArthur Grillo 	const unsigned int size = 8192;
1393*fc8d29e2SArthur Grillo 	const struct insert_mode *mode;
1394*fc8d29e2SArthur Grillo 	struct drm_mm mm;
1395*fc8d29e2SArthur Grillo 	struct evict_node *nodes;
1396*fc8d29e2SArthur Grillo 	struct drm_mm_node *node, *next;
1397*fc8d29e2SArthur Grillo 	unsigned int *order, n;
1398*fc8d29e2SArthur Grillo 
1399*fc8d29e2SArthur Grillo 	/* Here we populate a full drm_mm and then try and insert a new node
1400*fc8d29e2SArthur Grillo 	 * by evicting other nodes in a random order. The drm_mm_scan should
1401*fc8d29e2SArthur Grillo 	 * pick the first matching hole it finds from the random list. We
1402*fc8d29e2SArthur Grillo 	 * repeat that for different allocation strategies, alignments and
1403*fc8d29e2SArthur Grillo 	 * sizes to try and stress the hole finder.
1404*fc8d29e2SArthur Grillo 	 */
1405*fc8d29e2SArthur Grillo 
1406*fc8d29e2SArthur Grillo 	nodes = vzalloc(array_size(size, sizeof(*nodes)));
1407*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_TRUE(test, nodes);
1408*fc8d29e2SArthur Grillo 
1409*fc8d29e2SArthur Grillo 	order = drm_random_order(size, &prng);
1410*fc8d29e2SArthur Grillo 	if (!order)
1411*fc8d29e2SArthur Grillo 		goto err_nodes;
1412*fc8d29e2SArthur Grillo 
1413*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 0, size);
1414*fc8d29e2SArthur Grillo 	for (n = 0; n < size; n++) {
1415*fc8d29e2SArthur Grillo 		if (drm_mm_insert_node(&mm, &nodes[n].node, 1)) {
1416*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "insert failed, step %d\n", n);
1417*fc8d29e2SArthur Grillo 			goto out;
1418*fc8d29e2SArthur Grillo 		}
1419*fc8d29e2SArthur Grillo 	}
1420*fc8d29e2SArthur Grillo 
1421*fc8d29e2SArthur Grillo 	/* First check that using the scanner doesn't break the mm */
1422*fc8d29e2SArthur Grillo 	if (!evict_nothing(test, &mm, size, nodes)) {
1423*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "evict_nothing() failed\n");
1424*fc8d29e2SArthur Grillo 		goto out;
1425*fc8d29e2SArthur Grillo 	}
1426*fc8d29e2SArthur Grillo 	if (!evict_everything(test, &mm, size, nodes)) {
1427*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "evict_everything() failed\n");
1428*fc8d29e2SArthur Grillo 		goto out;
1429*fc8d29e2SArthur Grillo 	}
1430*fc8d29e2SArthur Grillo 
1431*fc8d29e2SArthur Grillo 	for (mode = evict_modes; mode->name; mode++) {
1432*fc8d29e2SArthur Grillo 		for (n = 1; n <= size; n <<= 1) {
1433*fc8d29e2SArthur Grillo 			drm_random_reorder(order, size, &prng);
1434*fc8d29e2SArthur Grillo 			if (evict_something(test, &mm, 0, U64_MAX, nodes, order, size, n, 1,
1435*fc8d29e2SArthur Grillo 					    mode)) {
1436*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "%s evict_something(size=%u) failed\n",
1437*fc8d29e2SArthur Grillo 					   mode->name, n);
1438*fc8d29e2SArthur Grillo 				goto out;
1439*fc8d29e2SArthur Grillo 			}
1440*fc8d29e2SArthur Grillo 		}
1441*fc8d29e2SArthur Grillo 
1442*fc8d29e2SArthur Grillo 		for (n = 1; n < size; n <<= 1) {
1443*fc8d29e2SArthur Grillo 			drm_random_reorder(order, size, &prng);
1444*fc8d29e2SArthur Grillo 			if (evict_something(test, &mm, 0, U64_MAX, nodes, order, size,
1445*fc8d29e2SArthur Grillo 					    size / 2, n, mode)) {
1446*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
1447*fc8d29e2SArthur Grillo 					   "%s evict_something(size=%u, alignment=%u) failed\n",
1448*fc8d29e2SArthur Grillo 					   mode->name, size / 2, n);
1449*fc8d29e2SArthur Grillo 				goto out;
1450*fc8d29e2SArthur Grillo 			}
1451*fc8d29e2SArthur Grillo 		}
1452*fc8d29e2SArthur Grillo 
1453*fc8d29e2SArthur Grillo 		for_each_prime_number_from(n, 1, min(size, max_prime)) {
1454*fc8d29e2SArthur Grillo 			unsigned int nsize = (size - n + 1) / 2;
1455*fc8d29e2SArthur Grillo 
1456*fc8d29e2SArthur Grillo 			DRM_MM_BUG_ON(!nsize);
1457*fc8d29e2SArthur Grillo 
1458*fc8d29e2SArthur Grillo 			drm_random_reorder(order, size, &prng);
1459*fc8d29e2SArthur Grillo 			if (evict_something(test, &mm, 0, U64_MAX, nodes, order, size,
1460*fc8d29e2SArthur Grillo 					    nsize, n, mode)) {
1461*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
1462*fc8d29e2SArthur Grillo 					   "%s evict_something(size=%u, alignment=%u) failed\n",
1463*fc8d29e2SArthur Grillo 					   mode->name, nsize, n);
1464*fc8d29e2SArthur Grillo 				goto out;
1465*fc8d29e2SArthur Grillo 			}
1466*fc8d29e2SArthur Grillo 		}
1467*fc8d29e2SArthur Grillo 
1468*fc8d29e2SArthur Grillo 		cond_resched();
1469*fc8d29e2SArthur Grillo 	}
1470*fc8d29e2SArthur Grillo 
1471*fc8d29e2SArthur Grillo out:
1472*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm)
1473*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
1474*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
1475*fc8d29e2SArthur Grillo 	kfree(order);
1476*fc8d29e2SArthur Grillo err_nodes:
1477*fc8d29e2SArthur Grillo 	vfree(nodes);
1478*fc8d29e2SArthur Grillo }
1479*fc8d29e2SArthur Grillo 
1480*fc8d29e2SArthur Grillo static void igt_mm_evict_range(struct kunit *test)
1481*fc8d29e2SArthur Grillo {
1482*fc8d29e2SArthur Grillo 	DRM_RND_STATE(prng, random_seed);
1483*fc8d29e2SArthur Grillo 	const unsigned int size = 8192;
1484*fc8d29e2SArthur Grillo 	const unsigned int range_size = size / 2;
1485*fc8d29e2SArthur Grillo 	const unsigned int range_start = size / 4;
1486*fc8d29e2SArthur Grillo 	const unsigned int range_end = range_start + range_size;
1487*fc8d29e2SArthur Grillo 	const struct insert_mode *mode;
1488*fc8d29e2SArthur Grillo 	struct drm_mm mm;
1489*fc8d29e2SArthur Grillo 	struct evict_node *nodes;
1490*fc8d29e2SArthur Grillo 	struct drm_mm_node *node, *next;
1491*fc8d29e2SArthur Grillo 	unsigned int *order, n;
1492*fc8d29e2SArthur Grillo 
1493*fc8d29e2SArthur Grillo 	/* Like igt_evict() but now we are limiting the search to a
1494*fc8d29e2SArthur Grillo 	 * small portion of the full drm_mm.
1495*fc8d29e2SArthur Grillo 	 */
1496*fc8d29e2SArthur Grillo 
1497*fc8d29e2SArthur Grillo 	nodes = vzalloc(array_size(size, sizeof(*nodes)));
1498*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_TRUE(test, nodes);
1499*fc8d29e2SArthur Grillo 
1500*fc8d29e2SArthur Grillo 	order = drm_random_order(size, &prng);
1501*fc8d29e2SArthur Grillo 	if (!order)
1502*fc8d29e2SArthur Grillo 		goto err_nodes;
1503*fc8d29e2SArthur Grillo 
1504*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 0, size);
1505*fc8d29e2SArthur Grillo 	for (n = 0; n < size; n++) {
1506*fc8d29e2SArthur Grillo 		if (drm_mm_insert_node(&mm, &nodes[n].node, 1)) {
1507*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "insert failed, step %d\n", n);
1508*fc8d29e2SArthur Grillo 			goto out;
1509*fc8d29e2SArthur Grillo 		}
1510*fc8d29e2SArthur Grillo 	}
1511*fc8d29e2SArthur Grillo 
1512*fc8d29e2SArthur Grillo 	for (mode = evict_modes; mode->name; mode++) {
1513*fc8d29e2SArthur Grillo 		for (n = 1; n <= range_size; n <<= 1) {
1514*fc8d29e2SArthur Grillo 			drm_random_reorder(order, size, &prng);
1515*fc8d29e2SArthur Grillo 			if (evict_something(test, &mm, range_start, range_end, nodes,
1516*fc8d29e2SArthur Grillo 					    order, size, n, 1, mode)) {
1517*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
1518*fc8d29e2SArthur Grillo 					   "%s evict_something(size=%u) failed with range [%u, %u]\n",
1519*fc8d29e2SArthur Grillo 					   mode->name, n, range_start, range_end);
1520*fc8d29e2SArthur Grillo 				goto out;
1521*fc8d29e2SArthur Grillo 			}
1522*fc8d29e2SArthur Grillo 		}
1523*fc8d29e2SArthur Grillo 
1524*fc8d29e2SArthur Grillo 		for (n = 1; n <= range_size; n <<= 1) {
1525*fc8d29e2SArthur Grillo 			drm_random_reorder(order, size, &prng);
1526*fc8d29e2SArthur Grillo 			if (evict_something(test, &mm, range_start, range_end, nodes,
1527*fc8d29e2SArthur Grillo 					    order, size, range_size / 2, n, mode)) {
1528*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
1529*fc8d29e2SArthur Grillo 					   "%s evict_something(size=%u, alignment=%u) failed with range [%u, %u]\n",
1530*fc8d29e2SArthur Grillo 					   mode->name, range_size / 2, n, range_start, range_end);
1531*fc8d29e2SArthur Grillo 				goto out;
1532*fc8d29e2SArthur Grillo 			}
1533*fc8d29e2SArthur Grillo 		}
1534*fc8d29e2SArthur Grillo 
1535*fc8d29e2SArthur Grillo 		for_each_prime_number_from(n, 1, min(range_size, max_prime)) {
1536*fc8d29e2SArthur Grillo 			unsigned int nsize = (range_size - n + 1) / 2;
1537*fc8d29e2SArthur Grillo 
1538*fc8d29e2SArthur Grillo 			DRM_MM_BUG_ON(!nsize);
1539*fc8d29e2SArthur Grillo 
1540*fc8d29e2SArthur Grillo 			drm_random_reorder(order, size, &prng);
1541*fc8d29e2SArthur Grillo 			if (evict_something(test, &mm, range_start, range_end, nodes,
1542*fc8d29e2SArthur Grillo 					    order, size, nsize, n, mode)) {
1543*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
1544*fc8d29e2SArthur Grillo 					   "%s evict_something(size=%u, alignment=%u) failed with range [%u, %u]\n",
1545*fc8d29e2SArthur Grillo 					   mode->name, nsize, n, range_start, range_end);
1546*fc8d29e2SArthur Grillo 				goto out;
1547*fc8d29e2SArthur Grillo 			}
1548*fc8d29e2SArthur Grillo 		}
1549*fc8d29e2SArthur Grillo 
1550*fc8d29e2SArthur Grillo 		cond_resched();
1551*fc8d29e2SArthur Grillo 	}
1552*fc8d29e2SArthur Grillo 
1553*fc8d29e2SArthur Grillo out:
1554*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm)
1555*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
1556*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
1557*fc8d29e2SArthur Grillo 	kfree(order);
1558*fc8d29e2SArthur Grillo err_nodes:
1559*fc8d29e2SArthur Grillo 	vfree(nodes);
1560*fc8d29e2SArthur Grillo }
1561*fc8d29e2SArthur Grillo 
1562*fc8d29e2SArthur Grillo static unsigned int node_index(const struct drm_mm_node *node)
1563*fc8d29e2SArthur Grillo {
1564*fc8d29e2SArthur Grillo 	return div64_u64(node->start, node->size);
1565*fc8d29e2SArthur Grillo }
1566*fc8d29e2SArthur Grillo 
1567*fc8d29e2SArthur Grillo static void igt_mm_topdown(struct kunit *test)
1568*fc8d29e2SArthur Grillo {
1569*fc8d29e2SArthur Grillo 	const struct insert_mode *topdown = &insert_modes[TOPDOWN];
1570*fc8d29e2SArthur Grillo 
1571*fc8d29e2SArthur Grillo 	DRM_RND_STATE(prng, random_seed);
1572*fc8d29e2SArthur Grillo 	const unsigned int count = 8192;
1573*fc8d29e2SArthur Grillo 	unsigned int size;
1574*fc8d29e2SArthur Grillo 	unsigned long *bitmap;
1575*fc8d29e2SArthur Grillo 	struct drm_mm mm;
1576*fc8d29e2SArthur Grillo 	struct drm_mm_node *nodes, *node, *next;
1577*fc8d29e2SArthur Grillo 	unsigned int *order, n, m, o = 0;
1578*fc8d29e2SArthur Grillo 
1579*fc8d29e2SArthur Grillo 	/* When allocating top-down, we expect to be returned a node
1580*fc8d29e2SArthur Grillo 	 * from a suitable hole at the top of the drm_mm. We check that
1581*fc8d29e2SArthur Grillo 	 * the returned node does match the highest available slot.
1582*fc8d29e2SArthur Grillo 	 */
1583*fc8d29e2SArthur Grillo 
1584*fc8d29e2SArthur Grillo 	nodes = vzalloc(array_size(count, sizeof(*nodes)));
1585*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_TRUE(test, nodes);
1586*fc8d29e2SArthur Grillo 
1587*fc8d29e2SArthur Grillo 	bitmap = bitmap_zalloc(count, GFP_KERNEL);
1588*fc8d29e2SArthur Grillo 	if (!bitmap)
1589*fc8d29e2SArthur Grillo 		goto err_nodes;
1590*fc8d29e2SArthur Grillo 
1591*fc8d29e2SArthur Grillo 	order = drm_random_order(count, &prng);
1592*fc8d29e2SArthur Grillo 	if (!order)
1593*fc8d29e2SArthur Grillo 		goto err_bitmap;
1594*fc8d29e2SArthur Grillo 
1595*fc8d29e2SArthur Grillo 	for (size = 1; size <= 64; size <<= 1) {
1596*fc8d29e2SArthur Grillo 		drm_mm_init(&mm, 0, size * count);
1597*fc8d29e2SArthur Grillo 		for (n = 0; n < count; n++) {
1598*fc8d29e2SArthur Grillo 			if (!expect_insert(test, &mm, &nodes[n], size, 0, n, topdown)) {
1599*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "insert failed, size %u step %d\n", size, n);
1600*fc8d29e2SArthur Grillo 				goto out;
1601*fc8d29e2SArthur Grillo 			}
1602*fc8d29e2SArthur Grillo 
1603*fc8d29e2SArthur Grillo 			if (drm_mm_hole_follows(&nodes[n])) {
1604*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
1605*fc8d29e2SArthur Grillo 					   "hole after topdown insert %d, start=%llx\n, size=%u",
1606*fc8d29e2SArthur Grillo 					   n, nodes[n].start, size);
1607*fc8d29e2SArthur Grillo 				goto out;
1608*fc8d29e2SArthur Grillo 			}
1609*fc8d29e2SArthur Grillo 
1610*fc8d29e2SArthur Grillo 			if (!assert_one_hole(test, &mm, 0, size * (count - n - 1)))
1611*fc8d29e2SArthur Grillo 				goto out;
1612*fc8d29e2SArthur Grillo 		}
1613*fc8d29e2SArthur Grillo 
1614*fc8d29e2SArthur Grillo 		if (!assert_continuous(test, &mm, size))
1615*fc8d29e2SArthur Grillo 			goto out;
1616*fc8d29e2SArthur Grillo 
1617*fc8d29e2SArthur Grillo 		drm_random_reorder(order, count, &prng);
1618*fc8d29e2SArthur Grillo 		for_each_prime_number_from(n, 1, min(count, max_prime)) {
1619*fc8d29e2SArthur Grillo 			for (m = 0; m < n; m++) {
1620*fc8d29e2SArthur Grillo 				node = &nodes[order[(o + m) % count]];
1621*fc8d29e2SArthur Grillo 				drm_mm_remove_node(node);
1622*fc8d29e2SArthur Grillo 				__set_bit(node_index(node), bitmap);
1623*fc8d29e2SArthur Grillo 			}
1624*fc8d29e2SArthur Grillo 
1625*fc8d29e2SArthur Grillo 			for (m = 0; m < n; m++) {
1626*fc8d29e2SArthur Grillo 				unsigned int last;
1627*fc8d29e2SArthur Grillo 
1628*fc8d29e2SArthur Grillo 				node = &nodes[order[(o + m) % count]];
1629*fc8d29e2SArthur Grillo 				if (!expect_insert(test, &mm, node, size, 0, 0, topdown)) {
1630*fc8d29e2SArthur Grillo 					KUNIT_FAIL(test, "insert failed, step %d/%d\n", m, n);
1631*fc8d29e2SArthur Grillo 					goto out;
1632*fc8d29e2SArthur Grillo 				}
1633*fc8d29e2SArthur Grillo 
1634*fc8d29e2SArthur Grillo 				if (drm_mm_hole_follows(node)) {
1635*fc8d29e2SArthur Grillo 					KUNIT_FAIL(test,
1636*fc8d29e2SArthur Grillo 						   "hole after topdown insert %d/%d, start=%llx\n",
1637*fc8d29e2SArthur Grillo 						   m, n, node->start);
1638*fc8d29e2SArthur Grillo 					goto out;
1639*fc8d29e2SArthur Grillo 				}
1640*fc8d29e2SArthur Grillo 
1641*fc8d29e2SArthur Grillo 				last = find_last_bit(bitmap, count);
1642*fc8d29e2SArthur Grillo 				if (node_index(node) != last) {
1643*fc8d29e2SArthur Grillo 					KUNIT_FAIL(test,
1644*fc8d29e2SArthur Grillo 						   "node %d/%d, size %d, not inserted into upmost hole, expected %d, found %d\n",
1645*fc8d29e2SArthur Grillo 						   m, n, size, last, node_index(node));
1646*fc8d29e2SArthur Grillo 					goto out;
1647*fc8d29e2SArthur Grillo 				}
1648*fc8d29e2SArthur Grillo 
1649*fc8d29e2SArthur Grillo 				__clear_bit(last, bitmap);
1650*fc8d29e2SArthur Grillo 			}
1651*fc8d29e2SArthur Grillo 
1652*fc8d29e2SArthur Grillo 			DRM_MM_BUG_ON(find_first_bit(bitmap, count) != count);
1653*fc8d29e2SArthur Grillo 
1654*fc8d29e2SArthur Grillo 			o += n;
1655*fc8d29e2SArthur Grillo 		}
1656*fc8d29e2SArthur Grillo 
1657*fc8d29e2SArthur Grillo 		drm_mm_for_each_node_safe(node, next, &mm)
1658*fc8d29e2SArthur Grillo 			drm_mm_remove_node(node);
1659*fc8d29e2SArthur Grillo 		DRM_MM_BUG_ON(!drm_mm_clean(&mm));
1660*fc8d29e2SArthur Grillo 		cond_resched();
1661*fc8d29e2SArthur Grillo 	}
1662*fc8d29e2SArthur Grillo 
1663*fc8d29e2SArthur Grillo out:
1664*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm)
1665*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
1666*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
1667*fc8d29e2SArthur Grillo 	kfree(order);
1668*fc8d29e2SArthur Grillo err_bitmap:
1669*fc8d29e2SArthur Grillo 	bitmap_free(bitmap);
1670*fc8d29e2SArthur Grillo err_nodes:
1671*fc8d29e2SArthur Grillo 	vfree(nodes);
1672*fc8d29e2SArthur Grillo }
1673*fc8d29e2SArthur Grillo 
1674*fc8d29e2SArthur Grillo static void igt_mm_bottomup(struct kunit *test)
1675*fc8d29e2SArthur Grillo {
1676*fc8d29e2SArthur Grillo 	const struct insert_mode *bottomup = &insert_modes[BOTTOMUP];
1677*fc8d29e2SArthur Grillo 
1678*fc8d29e2SArthur Grillo 	DRM_RND_STATE(prng, random_seed);
1679*fc8d29e2SArthur Grillo 	const unsigned int count = 8192;
1680*fc8d29e2SArthur Grillo 	unsigned int size;
1681*fc8d29e2SArthur Grillo 	unsigned long *bitmap;
1682*fc8d29e2SArthur Grillo 	struct drm_mm mm;
1683*fc8d29e2SArthur Grillo 	struct drm_mm_node *nodes, *node, *next;
1684*fc8d29e2SArthur Grillo 	unsigned int *order, n, m, o = 0;
1685*fc8d29e2SArthur Grillo 
1686*fc8d29e2SArthur Grillo 	/* Like igt_topdown, but instead of searching for the last hole,
1687*fc8d29e2SArthur Grillo 	 * we search for the first.
1688*fc8d29e2SArthur Grillo 	 */
1689*fc8d29e2SArthur Grillo 
1690*fc8d29e2SArthur Grillo 	nodes = vzalloc(array_size(count, sizeof(*nodes)));
1691*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_TRUE(test, nodes);
1692*fc8d29e2SArthur Grillo 
1693*fc8d29e2SArthur Grillo 	bitmap = bitmap_zalloc(count, GFP_KERNEL);
1694*fc8d29e2SArthur Grillo 	if (!bitmap)
1695*fc8d29e2SArthur Grillo 		goto err_nodes;
1696*fc8d29e2SArthur Grillo 
1697*fc8d29e2SArthur Grillo 	order = drm_random_order(count, &prng);
1698*fc8d29e2SArthur Grillo 	if (!order)
1699*fc8d29e2SArthur Grillo 		goto err_bitmap;
1700*fc8d29e2SArthur Grillo 
1701*fc8d29e2SArthur Grillo 	for (size = 1; size <= 64; size <<= 1) {
1702*fc8d29e2SArthur Grillo 		drm_mm_init(&mm, 0, size * count);
1703*fc8d29e2SArthur Grillo 		for (n = 0; n < count; n++) {
1704*fc8d29e2SArthur Grillo 			if (!expect_insert(test, &mm, &nodes[n], size, 0, n, bottomup)) {
1705*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
1706*fc8d29e2SArthur Grillo 					   "bottomup insert failed, size %u step %d\n", size, n);
1707*fc8d29e2SArthur Grillo 				goto out;
1708*fc8d29e2SArthur Grillo 			}
1709*fc8d29e2SArthur Grillo 
1710*fc8d29e2SArthur Grillo 			if (!assert_one_hole(test, &mm, size * (n + 1), size * count))
1711*fc8d29e2SArthur Grillo 				goto out;
1712*fc8d29e2SArthur Grillo 		}
1713*fc8d29e2SArthur Grillo 
1714*fc8d29e2SArthur Grillo 		if (!assert_continuous(test, &mm, size))
1715*fc8d29e2SArthur Grillo 			goto out;
1716*fc8d29e2SArthur Grillo 
1717*fc8d29e2SArthur Grillo 		drm_random_reorder(order, count, &prng);
1718*fc8d29e2SArthur Grillo 		for_each_prime_number_from(n, 1, min(count, max_prime)) {
1719*fc8d29e2SArthur Grillo 			for (m = 0; m < n; m++) {
1720*fc8d29e2SArthur Grillo 				node = &nodes[order[(o + m) % count]];
1721*fc8d29e2SArthur Grillo 				drm_mm_remove_node(node);
1722*fc8d29e2SArthur Grillo 				__set_bit(node_index(node), bitmap);
1723*fc8d29e2SArthur Grillo 			}
1724*fc8d29e2SArthur Grillo 
1725*fc8d29e2SArthur Grillo 			for (m = 0; m < n; m++) {
1726*fc8d29e2SArthur Grillo 				unsigned int first;
1727*fc8d29e2SArthur Grillo 
1728*fc8d29e2SArthur Grillo 				node = &nodes[order[(o + m) % count]];
1729*fc8d29e2SArthur Grillo 				if (!expect_insert(test, &mm, node, size, 0, 0, bottomup)) {
1730*fc8d29e2SArthur Grillo 					KUNIT_FAIL(test, "insert failed, step %d/%d\n", m, n);
1731*fc8d29e2SArthur Grillo 					goto out;
1732*fc8d29e2SArthur Grillo 				}
1733*fc8d29e2SArthur Grillo 
1734*fc8d29e2SArthur Grillo 				first = find_first_bit(bitmap, count);
1735*fc8d29e2SArthur Grillo 				if (node_index(node) != first) {
1736*fc8d29e2SArthur Grillo 					KUNIT_FAIL(test,
1737*fc8d29e2SArthur Grillo 						   "node %d/%d not inserted into bottom hole, expected %d, found %d\n",
1738*fc8d29e2SArthur Grillo 						   m, n, first, node_index(node));
1739*fc8d29e2SArthur Grillo 					goto out;
1740*fc8d29e2SArthur Grillo 				}
1741*fc8d29e2SArthur Grillo 				__clear_bit(first, bitmap);
1742*fc8d29e2SArthur Grillo 			}
1743*fc8d29e2SArthur Grillo 
1744*fc8d29e2SArthur Grillo 			DRM_MM_BUG_ON(find_first_bit(bitmap, count) != count);
1745*fc8d29e2SArthur Grillo 
1746*fc8d29e2SArthur Grillo 			o += n;
1747*fc8d29e2SArthur Grillo 		}
1748*fc8d29e2SArthur Grillo 
1749*fc8d29e2SArthur Grillo 		drm_mm_for_each_node_safe(node, next, &mm)
1750*fc8d29e2SArthur Grillo 			drm_mm_remove_node(node);
1751*fc8d29e2SArthur Grillo 		DRM_MM_BUG_ON(!drm_mm_clean(&mm));
1752*fc8d29e2SArthur Grillo 		cond_resched();
1753*fc8d29e2SArthur Grillo 	}
1754*fc8d29e2SArthur Grillo 
1755*fc8d29e2SArthur Grillo out:
1756*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm)
1757*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
1758*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
1759*fc8d29e2SArthur Grillo 	kfree(order);
1760*fc8d29e2SArthur Grillo err_bitmap:
1761*fc8d29e2SArthur Grillo 	bitmap_free(bitmap);
1762*fc8d29e2SArthur Grillo err_nodes:
1763*fc8d29e2SArthur Grillo 	vfree(nodes);
1764*fc8d29e2SArthur Grillo }
1765*fc8d29e2SArthur Grillo 
1766*fc8d29e2SArthur Grillo static void __igt_once(struct kunit *test, unsigned int mode)
1767*fc8d29e2SArthur Grillo {
1768*fc8d29e2SArthur Grillo 	struct drm_mm mm;
1769*fc8d29e2SArthur Grillo 	struct drm_mm_node rsvd_lo, rsvd_hi, node;
1770*fc8d29e2SArthur Grillo 
1771*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 0, 7);
1772*fc8d29e2SArthur Grillo 
1773*fc8d29e2SArthur Grillo 	memset(&rsvd_lo, 0, sizeof(rsvd_lo));
1774*fc8d29e2SArthur Grillo 	rsvd_lo.start = 1;
1775*fc8d29e2SArthur Grillo 	rsvd_lo.size = 1;
1776*fc8d29e2SArthur Grillo 	if (drm_mm_reserve_node(&mm, &rsvd_lo)) {
1777*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "Could not reserve low node\n");
1778*fc8d29e2SArthur Grillo 		goto err;
1779*fc8d29e2SArthur Grillo 	}
1780*fc8d29e2SArthur Grillo 
1781*fc8d29e2SArthur Grillo 	memset(&rsvd_hi, 0, sizeof(rsvd_hi));
1782*fc8d29e2SArthur Grillo 	rsvd_hi.start = 5;
1783*fc8d29e2SArthur Grillo 	rsvd_hi.size = 1;
1784*fc8d29e2SArthur Grillo 	if (drm_mm_reserve_node(&mm, &rsvd_hi)) {
1785*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "Could not reserve low node\n");
1786*fc8d29e2SArthur Grillo 		goto err_lo;
1787*fc8d29e2SArthur Grillo 	}
1788*fc8d29e2SArthur Grillo 
1789*fc8d29e2SArthur Grillo 	if (!drm_mm_hole_follows(&rsvd_lo) || !drm_mm_hole_follows(&rsvd_hi)) {
1790*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "Expected a hole after lo and high nodes!\n");
1791*fc8d29e2SArthur Grillo 		goto err_hi;
1792*fc8d29e2SArthur Grillo 	}
1793*fc8d29e2SArthur Grillo 
1794*fc8d29e2SArthur Grillo 	memset(&node, 0, sizeof(node));
1795*fc8d29e2SArthur Grillo 	if (drm_mm_insert_node_generic(&mm, &node, 2, 0, 0, mode)) {
1796*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "Could not insert the node into the available hole!\n");
1797*fc8d29e2SArthur Grillo 		goto err_hi;
1798*fc8d29e2SArthur Grillo 	}
1799*fc8d29e2SArthur Grillo 
1800*fc8d29e2SArthur Grillo 	drm_mm_remove_node(&node);
1801*fc8d29e2SArthur Grillo err_hi:
1802*fc8d29e2SArthur Grillo 	drm_mm_remove_node(&rsvd_hi);
1803*fc8d29e2SArthur Grillo err_lo:
1804*fc8d29e2SArthur Grillo 	drm_mm_remove_node(&rsvd_lo);
1805*fc8d29e2SArthur Grillo err:
1806*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
1807*fc8d29e2SArthur Grillo }
1808*fc8d29e2SArthur Grillo 
1809*fc8d29e2SArthur Grillo static void igt_mm_lowest(struct kunit *test)
1810*fc8d29e2SArthur Grillo {
1811*fc8d29e2SArthur Grillo 	__igt_once(test, DRM_MM_INSERT_LOW);
1812*fc8d29e2SArthur Grillo }
1813*fc8d29e2SArthur Grillo 
1814*fc8d29e2SArthur Grillo static void igt_mm_highest(struct kunit *test)
1815*fc8d29e2SArthur Grillo {
1816*fc8d29e2SArthur Grillo 	__igt_once(test, DRM_MM_INSERT_HIGH);
1817*fc8d29e2SArthur Grillo }
1818*fc8d29e2SArthur Grillo 
1819*fc8d29e2SArthur Grillo static void separate_adjacent_colors(const struct drm_mm_node *node,
1820*fc8d29e2SArthur Grillo 				     unsigned long color, u64 *start, u64 *end)
1821*fc8d29e2SArthur Grillo {
1822*fc8d29e2SArthur Grillo 	if (drm_mm_node_allocated(node) && node->color != color)
1823*fc8d29e2SArthur Grillo 		++*start;
1824*fc8d29e2SArthur Grillo 
1825*fc8d29e2SArthur Grillo 	node = list_next_entry(node, node_list);
1826*fc8d29e2SArthur Grillo 	if (drm_mm_node_allocated(node) && node->color != color)
1827*fc8d29e2SArthur Grillo 		--*end;
1828*fc8d29e2SArthur Grillo }
1829*fc8d29e2SArthur Grillo 
1830*fc8d29e2SArthur Grillo static bool colors_abutt(struct kunit *test, const struct drm_mm_node *node)
1831*fc8d29e2SArthur Grillo {
1832*fc8d29e2SArthur Grillo 	if (!drm_mm_hole_follows(node) &&
1833*fc8d29e2SArthur Grillo 	    drm_mm_node_allocated(list_next_entry(node, node_list))) {
1834*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test, "colors abutt; %ld [%llx + %llx] is next to %ld [%llx + %llx]!\n",
1835*fc8d29e2SArthur Grillo 			   node->color, node->start, node->size,
1836*fc8d29e2SArthur Grillo 		       list_next_entry(node, node_list)->color,
1837*fc8d29e2SArthur Grillo 		       list_next_entry(node, node_list)->start,
1838*fc8d29e2SArthur Grillo 		       list_next_entry(node, node_list)->size);
1839*fc8d29e2SArthur Grillo 		return true;
1840*fc8d29e2SArthur Grillo 	}
1841*fc8d29e2SArthur Grillo 
1842*fc8d29e2SArthur Grillo 	return false;
1843*fc8d29e2SArthur Grillo }
1844*fc8d29e2SArthur Grillo 
1845*fc8d29e2SArthur Grillo static void igt_mm_color(struct kunit *test)
1846*fc8d29e2SArthur Grillo {
1847*fc8d29e2SArthur Grillo 	const unsigned int count = min(4096u, max_iterations);
1848*fc8d29e2SArthur Grillo 	const struct insert_mode *mode;
1849*fc8d29e2SArthur Grillo 	struct drm_mm mm;
1850*fc8d29e2SArthur Grillo 	struct drm_mm_node *node, *nn;
1851*fc8d29e2SArthur Grillo 	unsigned int n;
1852*fc8d29e2SArthur Grillo 
1853*fc8d29e2SArthur Grillo 	/* Color adjustment complicates everything. First we just check
1854*fc8d29e2SArthur Grillo 	 * that when we insert a node we apply any color_adjustment callback.
1855*fc8d29e2SArthur Grillo 	 * The callback we use should ensure that there is a gap between
1856*fc8d29e2SArthur Grillo 	 * any two nodes, and so after each insertion we check that those
1857*fc8d29e2SArthur Grillo 	 * holes are inserted and that they are preserved.
1858*fc8d29e2SArthur Grillo 	 */
1859*fc8d29e2SArthur Grillo 
1860*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 0, U64_MAX);
1861*fc8d29e2SArthur Grillo 
1862*fc8d29e2SArthur Grillo 	for (n = 1; n <= count; n++) {
1863*fc8d29e2SArthur Grillo 		node = kzalloc(sizeof(*node), GFP_KERNEL);
1864*fc8d29e2SArthur Grillo 		if (!node)
1865*fc8d29e2SArthur Grillo 			goto out;
1866*fc8d29e2SArthur Grillo 
1867*fc8d29e2SArthur Grillo 		if (!expect_insert(test, &mm, node, n, 0, n, &insert_modes[0])) {
1868*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "insert failed, step %d\n", n);
1869*fc8d29e2SArthur Grillo 			kfree(node);
1870*fc8d29e2SArthur Grillo 			goto out;
1871*fc8d29e2SArthur Grillo 		}
1872*fc8d29e2SArthur Grillo 	}
1873*fc8d29e2SArthur Grillo 
1874*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, nn, &mm) {
1875*fc8d29e2SArthur Grillo 		if (node->color != node->size) {
1876*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "invalid color stored: expected %lld, found %ld\n",
1877*fc8d29e2SArthur Grillo 				   node->size, node->color);
1878*fc8d29e2SArthur Grillo 
1879*fc8d29e2SArthur Grillo 			goto out;
1880*fc8d29e2SArthur Grillo 		}
1881*fc8d29e2SArthur Grillo 
1882*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
1883*fc8d29e2SArthur Grillo 		kfree(node);
1884*fc8d29e2SArthur Grillo 	}
1885*fc8d29e2SArthur Grillo 
1886*fc8d29e2SArthur Grillo 	/* Now, let's start experimenting with applying a color callback */
1887*fc8d29e2SArthur Grillo 	mm.color_adjust = separate_adjacent_colors;
1888*fc8d29e2SArthur Grillo 	for (mode = insert_modes; mode->name; mode++) {
1889*fc8d29e2SArthur Grillo 		u64 last;
1890*fc8d29e2SArthur Grillo 
1891*fc8d29e2SArthur Grillo 		node = kzalloc(sizeof(*node), GFP_KERNEL);
1892*fc8d29e2SArthur Grillo 		if (!node)
1893*fc8d29e2SArthur Grillo 			goto out;
1894*fc8d29e2SArthur Grillo 
1895*fc8d29e2SArthur Grillo 		node->size = 1 + 2 * count;
1896*fc8d29e2SArthur Grillo 		node->color = node->size;
1897*fc8d29e2SArthur Grillo 
1898*fc8d29e2SArthur Grillo 		if (drm_mm_reserve_node(&mm, node)) {
1899*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "initial reserve failed!\n");
1900*fc8d29e2SArthur Grillo 			goto out;
1901*fc8d29e2SArthur Grillo 		}
1902*fc8d29e2SArthur Grillo 
1903*fc8d29e2SArthur Grillo 		last = node->start + node->size;
1904*fc8d29e2SArthur Grillo 
1905*fc8d29e2SArthur Grillo 		for (n = 1; n <= count; n++) {
1906*fc8d29e2SArthur Grillo 			int rem;
1907*fc8d29e2SArthur Grillo 
1908*fc8d29e2SArthur Grillo 			node = kzalloc(sizeof(*node), GFP_KERNEL);
1909*fc8d29e2SArthur Grillo 			if (!node)
1910*fc8d29e2SArthur Grillo 				goto out;
1911*fc8d29e2SArthur Grillo 
1912*fc8d29e2SArthur Grillo 			node->start = last;
1913*fc8d29e2SArthur Grillo 			node->size = n + count;
1914*fc8d29e2SArthur Grillo 			node->color = node->size;
1915*fc8d29e2SArthur Grillo 
1916*fc8d29e2SArthur Grillo 			if (drm_mm_reserve_node(&mm, node) != -ENOSPC) {
1917*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "reserve %d did not report color overlap!", n);
1918*fc8d29e2SArthur Grillo 				goto out;
1919*fc8d29e2SArthur Grillo 			}
1920*fc8d29e2SArthur Grillo 
1921*fc8d29e2SArthur Grillo 			node->start += n + 1;
1922*fc8d29e2SArthur Grillo 			rem = misalignment(node, n + count);
1923*fc8d29e2SArthur Grillo 			node->start += n + count - rem;
1924*fc8d29e2SArthur Grillo 
1925*fc8d29e2SArthur Grillo 			if (drm_mm_reserve_node(&mm, node)) {
1926*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "reserve %d failed", n);
1927*fc8d29e2SArthur Grillo 				goto out;
1928*fc8d29e2SArthur Grillo 			}
1929*fc8d29e2SArthur Grillo 
1930*fc8d29e2SArthur Grillo 			last = node->start + node->size;
1931*fc8d29e2SArthur Grillo 		}
1932*fc8d29e2SArthur Grillo 
1933*fc8d29e2SArthur Grillo 		for (n = 1; n <= count; n++) {
1934*fc8d29e2SArthur Grillo 			node = kzalloc(sizeof(*node), GFP_KERNEL);
1935*fc8d29e2SArthur Grillo 			if (!node)
1936*fc8d29e2SArthur Grillo 				goto out;
1937*fc8d29e2SArthur Grillo 
1938*fc8d29e2SArthur Grillo 			if (!expect_insert(test, &mm, node, n, n, n, mode)) {
1939*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "%s insert failed, step %d\n", mode->name, n);
1940*fc8d29e2SArthur Grillo 				kfree(node);
1941*fc8d29e2SArthur Grillo 				goto out;
1942*fc8d29e2SArthur Grillo 			}
1943*fc8d29e2SArthur Grillo 		}
1944*fc8d29e2SArthur Grillo 
1945*fc8d29e2SArthur Grillo 		drm_mm_for_each_node_safe(node, nn, &mm) {
1946*fc8d29e2SArthur Grillo 			u64 rem;
1947*fc8d29e2SArthur Grillo 
1948*fc8d29e2SArthur Grillo 			if (node->color != node->size) {
1949*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
1950*fc8d29e2SArthur Grillo 					   "%s invalid color stored: expected %lld, found %ld\n",
1951*fc8d29e2SArthur Grillo 					   mode->name, node->size, node->color);
1952*fc8d29e2SArthur Grillo 
1953*fc8d29e2SArthur Grillo 				goto out;
1954*fc8d29e2SArthur Grillo 			}
1955*fc8d29e2SArthur Grillo 
1956*fc8d29e2SArthur Grillo 			if (colors_abutt(test, node))
1957*fc8d29e2SArthur Grillo 				goto out;
1958*fc8d29e2SArthur Grillo 
1959*fc8d29e2SArthur Grillo 			div64_u64_rem(node->start, node->size, &rem);
1960*fc8d29e2SArthur Grillo 			if (rem) {
1961*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
1962*fc8d29e2SArthur Grillo 					   "%s colored node misaligned, start=%llx expected alignment=%lld [rem=%lld]\n",
1963*fc8d29e2SArthur Grillo 					   mode->name, node->start, node->size, rem);
1964*fc8d29e2SArthur Grillo 				goto out;
1965*fc8d29e2SArthur Grillo 			}
1966*fc8d29e2SArthur Grillo 
1967*fc8d29e2SArthur Grillo 			drm_mm_remove_node(node);
1968*fc8d29e2SArthur Grillo 			kfree(node);
1969*fc8d29e2SArthur Grillo 		}
1970*fc8d29e2SArthur Grillo 
1971*fc8d29e2SArthur Grillo 		cond_resched();
1972*fc8d29e2SArthur Grillo 	}
1973*fc8d29e2SArthur Grillo 
1974*fc8d29e2SArthur Grillo out:
1975*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, nn, &mm) {
1976*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
1977*fc8d29e2SArthur Grillo 		kfree(node);
1978*fc8d29e2SArthur Grillo 	}
1979*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
1980*fc8d29e2SArthur Grillo }
1981*fc8d29e2SArthur Grillo 
1982*fc8d29e2SArthur Grillo static int evict_color(struct kunit *test, struct drm_mm *mm, u64 range_start,
1983*fc8d29e2SArthur Grillo 		       u64 range_end, struct evict_node *nodes, unsigned int *order,
1984*fc8d29e2SArthur Grillo 		unsigned int count, unsigned int size, unsigned int alignment,
1985*fc8d29e2SArthur Grillo 		unsigned long color, const struct insert_mode *mode)
1986*fc8d29e2SArthur Grillo {
1987*fc8d29e2SArthur Grillo 	struct drm_mm_scan scan;
1988*fc8d29e2SArthur Grillo 	LIST_HEAD(evict_list);
1989*fc8d29e2SArthur Grillo 	struct evict_node *e;
1990*fc8d29e2SArthur Grillo 	struct drm_mm_node tmp;
1991*fc8d29e2SArthur Grillo 	int err;
1992*fc8d29e2SArthur Grillo 
1993*fc8d29e2SArthur Grillo 	drm_mm_scan_init_with_range(&scan, mm, size, alignment, color, range_start,
1994*fc8d29e2SArthur Grillo 				    range_end, mode->mode);
1995*fc8d29e2SArthur Grillo 	if (!evict_nodes(test, &scan, nodes, order, count, true, &evict_list))
1996*fc8d29e2SArthur Grillo 		return -EINVAL;
1997*fc8d29e2SArthur Grillo 
1998*fc8d29e2SArthur Grillo 	memset(&tmp, 0, sizeof(tmp));
1999*fc8d29e2SArthur Grillo 	err = drm_mm_insert_node_generic(mm, &tmp, size, alignment, color,
2000*fc8d29e2SArthur Grillo 					 DRM_MM_INSERT_EVICT);
2001*fc8d29e2SArthur Grillo 	if (err) {
2002*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
2003*fc8d29e2SArthur Grillo 			   "Failed to insert into eviction hole: size=%d, align=%d, color=%lu, err=%d\n",
2004*fc8d29e2SArthur Grillo 			   size, alignment, color, err);
2005*fc8d29e2SArthur Grillo 		show_scan(test, &scan);
2006*fc8d29e2SArthur Grillo 		show_holes(test, mm, 3);
2007*fc8d29e2SArthur Grillo 		return err;
2008*fc8d29e2SArthur Grillo 	}
2009*fc8d29e2SArthur Grillo 
2010*fc8d29e2SArthur Grillo 	if (tmp.start < range_start || tmp.start + tmp.size > range_end) {
2011*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
2012*fc8d29e2SArthur Grillo 			   "Inserted [address=%llu + %llu] did not fit into the request range [%llu, %llu]\n",
2013*fc8d29e2SArthur Grillo 			   tmp.start, tmp.size, range_start, range_end);
2014*fc8d29e2SArthur Grillo 		err = -EINVAL;
2015*fc8d29e2SArthur Grillo 	}
2016*fc8d29e2SArthur Grillo 
2017*fc8d29e2SArthur Grillo 	if (colors_abutt(test, &tmp))
2018*fc8d29e2SArthur Grillo 		err = -EINVAL;
2019*fc8d29e2SArthur Grillo 
2020*fc8d29e2SArthur Grillo 	if (!assert_node(test, &tmp, mm, size, alignment, color)) {
2021*fc8d29e2SArthur Grillo 		KUNIT_FAIL(test,
2022*fc8d29e2SArthur Grillo 			   "Inserted did not fit the eviction hole: size=%lld [%d], align=%d [rem=%lld], start=%llx\n",
2023*fc8d29e2SArthur Grillo 			   tmp.size, size, alignment, misalignment(&tmp, alignment), tmp.start);
2024*fc8d29e2SArthur Grillo 		err = -EINVAL;
2025*fc8d29e2SArthur Grillo 	}
2026*fc8d29e2SArthur Grillo 
2027*fc8d29e2SArthur Grillo 	drm_mm_remove_node(&tmp);
2028*fc8d29e2SArthur Grillo 	if (err)
2029*fc8d29e2SArthur Grillo 		return err;
2030*fc8d29e2SArthur Grillo 
2031*fc8d29e2SArthur Grillo 	list_for_each_entry(e, &evict_list, link) {
2032*fc8d29e2SArthur Grillo 		err = drm_mm_reserve_node(mm, &e->node);
2033*fc8d29e2SArthur Grillo 		if (err) {
2034*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "Failed to reinsert node after eviction: start=%llx\n",
2035*fc8d29e2SArthur Grillo 				   e->node.start);
2036*fc8d29e2SArthur Grillo 			return err;
2037*fc8d29e2SArthur Grillo 		}
2038*fc8d29e2SArthur Grillo 	}
2039*fc8d29e2SArthur Grillo 
2040*fc8d29e2SArthur Grillo 	cond_resched();
2041*fc8d29e2SArthur Grillo 	return 0;
2042*fc8d29e2SArthur Grillo }
2043*fc8d29e2SArthur Grillo 
2044*fc8d29e2SArthur Grillo static void igt_mm_color_evict(struct kunit *test)
2045*fc8d29e2SArthur Grillo {
2046*fc8d29e2SArthur Grillo 	DRM_RND_STATE(prng, random_seed);
2047*fc8d29e2SArthur Grillo 	const unsigned int total_size = min(8192u, max_iterations);
2048*fc8d29e2SArthur Grillo 	const struct insert_mode *mode;
2049*fc8d29e2SArthur Grillo 	unsigned long color = 0;
2050*fc8d29e2SArthur Grillo 	struct drm_mm mm;
2051*fc8d29e2SArthur Grillo 	struct evict_node *nodes;
2052*fc8d29e2SArthur Grillo 	struct drm_mm_node *node, *next;
2053*fc8d29e2SArthur Grillo 	unsigned int *order, n;
2054*fc8d29e2SArthur Grillo 
2055*fc8d29e2SArthur Grillo 	/* Check that the drm_mm_scan also honours color adjustment when
2056*fc8d29e2SArthur Grillo 	 * choosing its victims to create a hole. Our color_adjust does not
2057*fc8d29e2SArthur Grillo 	 * allow two nodes to be placed together without an intervening hole
2058*fc8d29e2SArthur Grillo 	 * enlarging the set of victims that must be evicted.
2059*fc8d29e2SArthur Grillo 	 */
2060*fc8d29e2SArthur Grillo 
2061*fc8d29e2SArthur Grillo 	nodes = vzalloc(array_size(total_size, sizeof(*nodes)));
2062*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_TRUE(test, nodes);
2063*fc8d29e2SArthur Grillo 
2064*fc8d29e2SArthur Grillo 	order = drm_random_order(total_size, &prng);
2065*fc8d29e2SArthur Grillo 	if (!order)
2066*fc8d29e2SArthur Grillo 		goto err_nodes;
2067*fc8d29e2SArthur Grillo 
2068*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 0, 2 * total_size - 1);
2069*fc8d29e2SArthur Grillo 	mm.color_adjust = separate_adjacent_colors;
2070*fc8d29e2SArthur Grillo 	for (n = 0; n < total_size; n++) {
2071*fc8d29e2SArthur Grillo 		if (!expect_insert(test, &mm, &nodes[n].node,
2072*fc8d29e2SArthur Grillo 				   1, 0, color++,
2073*fc8d29e2SArthur Grillo 				   &insert_modes[0])) {
2074*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "insert failed, step %d\n", n);
2075*fc8d29e2SArthur Grillo 			goto out;
2076*fc8d29e2SArthur Grillo 		}
2077*fc8d29e2SArthur Grillo 	}
2078*fc8d29e2SArthur Grillo 
2079*fc8d29e2SArthur Grillo 	for (mode = evict_modes; mode->name; mode++) {
2080*fc8d29e2SArthur Grillo 		for (n = 1; n <= total_size; n <<= 1) {
2081*fc8d29e2SArthur Grillo 			drm_random_reorder(order, total_size, &prng);
2082*fc8d29e2SArthur Grillo 			if (evict_color(test, &mm, 0, U64_MAX, nodes, order, total_size,
2083*fc8d29e2SArthur Grillo 					n, 1, color++, mode)) {
2084*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "%s evict_color(size=%u) failed\n", mode->name, n);
2085*fc8d29e2SArthur Grillo 				goto out;
2086*fc8d29e2SArthur Grillo 			}
2087*fc8d29e2SArthur Grillo 		}
2088*fc8d29e2SArthur Grillo 
2089*fc8d29e2SArthur Grillo 		for (n = 1; n < total_size; n <<= 1) {
2090*fc8d29e2SArthur Grillo 			drm_random_reorder(order, total_size, &prng);
2091*fc8d29e2SArthur Grillo 			if (evict_color(test, &mm, 0, U64_MAX, nodes, order, total_size,
2092*fc8d29e2SArthur Grillo 					total_size / 2, n, color++, mode)) {
2093*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "%s evict_color(size=%u, alignment=%u) failed\n",
2094*fc8d29e2SArthur Grillo 					   mode->name, total_size / 2, n);
2095*fc8d29e2SArthur Grillo 				goto out;
2096*fc8d29e2SArthur Grillo 			}
2097*fc8d29e2SArthur Grillo 		}
2098*fc8d29e2SArthur Grillo 
2099*fc8d29e2SArthur Grillo 		for_each_prime_number_from(n, 1, min(total_size, max_prime)) {
2100*fc8d29e2SArthur Grillo 			unsigned int nsize = (total_size - n + 1) / 2;
2101*fc8d29e2SArthur Grillo 
2102*fc8d29e2SArthur Grillo 			DRM_MM_BUG_ON(!nsize);
2103*fc8d29e2SArthur Grillo 
2104*fc8d29e2SArthur Grillo 			drm_random_reorder(order, total_size, &prng);
2105*fc8d29e2SArthur Grillo 			if (evict_color(test, &mm, 0, U64_MAX, nodes, order, total_size,
2106*fc8d29e2SArthur Grillo 					nsize, n, color++, mode)) {
2107*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test, "%s evict_color(size=%u, alignment=%u) failed\n",
2108*fc8d29e2SArthur Grillo 					   mode->name, nsize, n);
2109*fc8d29e2SArthur Grillo 				goto out;
2110*fc8d29e2SArthur Grillo 			}
2111*fc8d29e2SArthur Grillo 		}
2112*fc8d29e2SArthur Grillo 
2113*fc8d29e2SArthur Grillo 		cond_resched();
2114*fc8d29e2SArthur Grillo 	}
2115*fc8d29e2SArthur Grillo 
2116*fc8d29e2SArthur Grillo out:
2117*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm)
2118*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
2119*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
2120*fc8d29e2SArthur Grillo 	kfree(order);
2121*fc8d29e2SArthur Grillo err_nodes:
2122*fc8d29e2SArthur Grillo 	vfree(nodes);
2123*fc8d29e2SArthur Grillo }
2124*fc8d29e2SArthur Grillo 
2125*fc8d29e2SArthur Grillo static void igt_mm_color_evict_range(struct kunit *test)
2126*fc8d29e2SArthur Grillo {
2127*fc8d29e2SArthur Grillo 	DRM_RND_STATE(prng, random_seed);
2128*fc8d29e2SArthur Grillo 	const unsigned int total_size = 8192;
2129*fc8d29e2SArthur Grillo 	const unsigned int range_size = total_size / 2;
2130*fc8d29e2SArthur Grillo 	const unsigned int range_start = total_size / 4;
2131*fc8d29e2SArthur Grillo 	const unsigned int range_end = range_start + range_size;
2132*fc8d29e2SArthur Grillo 	const struct insert_mode *mode;
2133*fc8d29e2SArthur Grillo 	unsigned long color = 0;
2134*fc8d29e2SArthur Grillo 	struct drm_mm mm;
2135*fc8d29e2SArthur Grillo 	struct evict_node *nodes;
2136*fc8d29e2SArthur Grillo 	struct drm_mm_node *node, *next;
2137*fc8d29e2SArthur Grillo 	unsigned int *order, n;
2138*fc8d29e2SArthur Grillo 
2139*fc8d29e2SArthur Grillo 	/* Like igt_color_evict(), but limited to small portion of the full
2140*fc8d29e2SArthur Grillo 	 * drm_mm range.
2141*fc8d29e2SArthur Grillo 	 */
2142*fc8d29e2SArthur Grillo 
2143*fc8d29e2SArthur Grillo 	nodes = vzalloc(array_size(total_size, sizeof(*nodes)));
2144*fc8d29e2SArthur Grillo 	KUNIT_ASSERT_TRUE(test, nodes);
2145*fc8d29e2SArthur Grillo 
2146*fc8d29e2SArthur Grillo 	order = drm_random_order(total_size, &prng);
2147*fc8d29e2SArthur Grillo 	if (!order)
2148*fc8d29e2SArthur Grillo 		goto err_nodes;
2149*fc8d29e2SArthur Grillo 
2150*fc8d29e2SArthur Grillo 	drm_mm_init(&mm, 0, 2 * total_size - 1);
2151*fc8d29e2SArthur Grillo 	mm.color_adjust = separate_adjacent_colors;
2152*fc8d29e2SArthur Grillo 	for (n = 0; n < total_size; n++) {
2153*fc8d29e2SArthur Grillo 		if (!expect_insert(test, &mm, &nodes[n].node,
2154*fc8d29e2SArthur Grillo 				   1, 0, color++,
2155*fc8d29e2SArthur Grillo 				   &insert_modes[0])) {
2156*fc8d29e2SArthur Grillo 			KUNIT_FAIL(test, "insert failed, step %d\n", n);
2157*fc8d29e2SArthur Grillo 			goto out;
2158*fc8d29e2SArthur Grillo 		}
2159*fc8d29e2SArthur Grillo 	}
2160*fc8d29e2SArthur Grillo 
2161*fc8d29e2SArthur Grillo 	for (mode = evict_modes; mode->name; mode++) {
2162*fc8d29e2SArthur Grillo 		for (n = 1; n <= range_size; n <<= 1) {
2163*fc8d29e2SArthur Grillo 			drm_random_reorder(order, range_size, &prng);
2164*fc8d29e2SArthur Grillo 			if (evict_color(test, &mm, range_start, range_end, nodes, order,
2165*fc8d29e2SArthur Grillo 					total_size, n, 1, color++, mode)) {
2166*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
2167*fc8d29e2SArthur Grillo 					   "%s evict_color(size=%u) failed for range [%x, %x]\n",
2168*fc8d29e2SArthur Grillo 						mode->name, n, range_start, range_end);
2169*fc8d29e2SArthur Grillo 				goto out;
2170*fc8d29e2SArthur Grillo 			}
2171*fc8d29e2SArthur Grillo 		}
2172*fc8d29e2SArthur Grillo 
2173*fc8d29e2SArthur Grillo 		for (n = 1; n < range_size; n <<= 1) {
2174*fc8d29e2SArthur Grillo 			drm_random_reorder(order, total_size, &prng);
2175*fc8d29e2SArthur Grillo 			if (evict_color(test, &mm, range_start, range_end, nodes, order,
2176*fc8d29e2SArthur Grillo 					total_size, range_size / 2, n, color++, mode)) {
2177*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
2178*fc8d29e2SArthur Grillo 					   "%s evict_color(size=%u, alignment=%u) failed for range [%x, %x]\n",
2179*fc8d29e2SArthur Grillo 					   mode->name, total_size / 2, n, range_start, range_end);
2180*fc8d29e2SArthur Grillo 				goto out;
2181*fc8d29e2SArthur Grillo 			}
2182*fc8d29e2SArthur Grillo 		}
2183*fc8d29e2SArthur Grillo 
2184*fc8d29e2SArthur Grillo 		for_each_prime_number_from(n, 1, min(range_size, max_prime)) {
2185*fc8d29e2SArthur Grillo 			unsigned int nsize = (range_size - n + 1) / 2;
2186*fc8d29e2SArthur Grillo 
2187*fc8d29e2SArthur Grillo 			DRM_MM_BUG_ON(!nsize);
2188*fc8d29e2SArthur Grillo 
2189*fc8d29e2SArthur Grillo 			drm_random_reorder(order, total_size, &prng);
2190*fc8d29e2SArthur Grillo 			if (evict_color(test, &mm, range_start, range_end, nodes, order,
2191*fc8d29e2SArthur Grillo 					total_size, nsize, n, color++, mode)) {
2192*fc8d29e2SArthur Grillo 				KUNIT_FAIL(test,
2193*fc8d29e2SArthur Grillo 					   "%s evict_color(size=%u, alignment=%u) failed for range [%x, %x]\n",
2194*fc8d29e2SArthur Grillo 					   mode->name, nsize, n, range_start, range_end);
2195*fc8d29e2SArthur Grillo 				goto out;
2196*fc8d29e2SArthur Grillo 			}
2197*fc8d29e2SArthur Grillo 		}
2198*fc8d29e2SArthur Grillo 
2199*fc8d29e2SArthur Grillo 		cond_resched();
2200*fc8d29e2SArthur Grillo 	}
2201*fc8d29e2SArthur Grillo 
2202*fc8d29e2SArthur Grillo out:
2203*fc8d29e2SArthur Grillo 	drm_mm_for_each_node_safe(node, next, &mm)
2204*fc8d29e2SArthur Grillo 		drm_mm_remove_node(node);
2205*fc8d29e2SArthur Grillo 	drm_mm_takedown(&mm);
2206*fc8d29e2SArthur Grillo 	kfree(order);
2207*fc8d29e2SArthur Grillo err_nodes:
2208*fc8d29e2SArthur Grillo 	vfree(nodes);
2209*fc8d29e2SArthur Grillo }
2210*fc8d29e2SArthur Grillo 
2211*fc8d29e2SArthur Grillo static int drm_mm_init_test(struct kunit *test)
2212*fc8d29e2SArthur Grillo {
2213*fc8d29e2SArthur Grillo 	while (!random_seed)
2214*fc8d29e2SArthur Grillo 		random_seed = get_random_int();
2215*fc8d29e2SArthur Grillo 
2216*fc8d29e2SArthur Grillo 	return 0;
2217*fc8d29e2SArthur Grillo }
2218*fc8d29e2SArthur Grillo 
2219*fc8d29e2SArthur Grillo module_param(random_seed, uint, 0400);
2220*fc8d29e2SArthur Grillo module_param(max_iterations, uint, 0400);
2221*fc8d29e2SArthur Grillo module_param(max_prime, uint, 0400);
2222*fc8d29e2SArthur Grillo 
2223*fc8d29e2SArthur Grillo static struct kunit_case drm_mm_tests[] = {
2224*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_init),
2225*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_debug),
2226*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_reserve),
2227*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_insert),
2228*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_replace),
2229*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_insert_range),
2230*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_frag),
2231*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_align),
2232*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_align32),
2233*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_align64),
2234*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_evict),
2235*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_evict_range),
2236*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_topdown),
2237*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_bottomup),
2238*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_lowest),
2239*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_highest),
2240*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_color),
2241*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_color_evict),
2242*fc8d29e2SArthur Grillo 	KUNIT_CASE(igt_mm_color_evict_range),
2243*fc8d29e2SArthur Grillo 	{}
2244*fc8d29e2SArthur Grillo };
2245*fc8d29e2SArthur Grillo 
2246*fc8d29e2SArthur Grillo static struct kunit_suite drm_mm_test_suite = {
2247*fc8d29e2SArthur Grillo 	.name = "drm_mm",
2248*fc8d29e2SArthur Grillo 	.init = drm_mm_init_test,
2249*fc8d29e2SArthur Grillo 	.test_cases = drm_mm_tests,
2250*fc8d29e2SArthur Grillo };
2251*fc8d29e2SArthur Grillo 
2252*fc8d29e2SArthur Grillo kunit_test_suite(drm_mm_test_suite);
2253*fc8d29e2SArthur Grillo 
2254*fc8d29e2SArthur Grillo MODULE_AUTHOR("Intel Corporation");
2255*fc8d29e2SArthur Grillo MODULE_LICENSE("GPL");
2256