1 // SPDX-License-Identifier: GPL-2.0-or-later
2 #include "alloc_api.h"
3 
4 static int alloc_test_flags = TEST_F_NONE;
5 
get_memblock_alloc_name(int flags)6 static inline const char * const get_memblock_alloc_name(int flags)
7 {
8 	if (flags & TEST_F_RAW)
9 		return "memblock_alloc_raw";
10 	return "memblock_alloc";
11 }
12 
run_memblock_alloc(phys_addr_t size,phys_addr_t align)13 static inline void *run_memblock_alloc(phys_addr_t size, phys_addr_t align)
14 {
15 	if (alloc_test_flags & TEST_F_RAW)
16 		return memblock_alloc_raw(size, align);
17 	return memblock_alloc(size, align);
18 }
19 
20 /*
21  * A simple test that tries to allocate a small memory region.
22  * Expect to allocate an aligned region near the end of the available memory.
23  */
alloc_top_down_simple_check(void)24 static int alloc_top_down_simple_check(void)
25 {
26 	struct memblock_region *rgn = &memblock.reserved.regions[0];
27 	void *allocated_ptr = NULL;
28 	phys_addr_t size = SZ_2;
29 	phys_addr_t expected_start;
30 
31 	PREFIX_PUSH();
32 	setup_memblock();
33 
34 	expected_start = memblock_end_of_DRAM() - SMP_CACHE_BYTES;
35 
36 	allocated_ptr = run_memblock_alloc(size, SMP_CACHE_BYTES);
37 
38 	ASSERT_NE(allocated_ptr, NULL);
39 	assert_mem_content(allocated_ptr, size, alloc_test_flags);
40 
41 	ASSERT_EQ(rgn->size, size);
42 	ASSERT_EQ(rgn->base, expected_start);
43 
44 	ASSERT_EQ(memblock.reserved.cnt, 1);
45 	ASSERT_EQ(memblock.reserved.total_size, size);
46 
47 	test_pass_pop();
48 
49 	return 0;
50 }
51 
52 /*
53  * A test that tries to allocate memory next to a reserved region that starts at
54  * the misaligned address. Expect to create two separate entries, with the new
55  * entry aligned to the provided alignment:
56  *
57  *              +
58  * |            +--------+         +--------|
59  * |            |  rgn2  |         |  rgn1  |
60  * +------------+--------+---------+--------+
61  *              ^
62  *              |
63  *              Aligned address boundary
64  *
65  * The allocation direction is top-down and region arrays are sorted from lower
66  * to higher addresses, so the new region will be the first entry in
67  * memory.reserved array. The previously reserved region does not get modified.
68  * Region counter and total size get updated.
69  */
alloc_top_down_disjoint_check(void)70 static int alloc_top_down_disjoint_check(void)
71 {
72 	/* After allocation, this will point to the "old" region */
73 	struct memblock_region *rgn1 = &memblock.reserved.regions[1];
74 	struct memblock_region *rgn2 = &memblock.reserved.regions[0];
75 	struct region r1;
76 	void *allocated_ptr = NULL;
77 	phys_addr_t r2_size = SZ_16;
78 	/* Use custom alignment */
79 	phys_addr_t alignment = SMP_CACHE_BYTES * 2;
80 	phys_addr_t total_size;
81 	phys_addr_t expected_start;
82 
83 	PREFIX_PUSH();
84 	setup_memblock();
85 
86 	r1.base = memblock_end_of_DRAM() - SZ_2;
87 	r1.size = SZ_2;
88 
89 	total_size = r1.size + r2_size;
90 	expected_start = memblock_end_of_DRAM() - alignment;
91 
92 	memblock_reserve(r1.base, r1.size);
93 
94 	allocated_ptr = run_memblock_alloc(r2_size, alignment);
95 
96 	ASSERT_NE(allocated_ptr, NULL);
97 	assert_mem_content(allocated_ptr, r2_size, alloc_test_flags);
98 
99 	ASSERT_EQ(rgn1->size, r1.size);
100 	ASSERT_EQ(rgn1->base, r1.base);
101 
102 	ASSERT_EQ(rgn2->size, r2_size);
103 	ASSERT_EQ(rgn2->base, expected_start);
104 
105 	ASSERT_EQ(memblock.reserved.cnt, 2);
106 	ASSERT_EQ(memblock.reserved.total_size, total_size);
107 
108 	test_pass_pop();
109 
110 	return 0;
111 }
112 
113 /*
114  * A test that tries to allocate memory when there is enough space at the end
115  * of the previously reserved block (i.e. first fit):
116  *
117  *  |              +--------+--------------|
118  *  |              |   r1   |      r2      |
119  *  +--------------+--------+--------------+
120  *
121  * Expect a merge of both regions. Only the region size gets updated.
122  */
alloc_top_down_before_check(void)123 static int alloc_top_down_before_check(void)
124 {
125 	struct memblock_region *rgn = &memblock.reserved.regions[0];
126 	void *allocated_ptr = NULL;
127 	/*
128 	 * The first region ends at the aligned address to test region merging
129 	 */
130 	phys_addr_t r1_size = SMP_CACHE_BYTES;
131 	phys_addr_t r2_size = SZ_512;
132 	phys_addr_t total_size = r1_size + r2_size;
133 
134 	PREFIX_PUSH();
135 	setup_memblock();
136 
137 	memblock_reserve(memblock_end_of_DRAM() - total_size, r1_size);
138 
139 	allocated_ptr = run_memblock_alloc(r2_size, SMP_CACHE_BYTES);
140 
141 	ASSERT_NE(allocated_ptr, NULL);
142 	assert_mem_content(allocated_ptr, r2_size, alloc_test_flags);
143 
144 	ASSERT_EQ(rgn->size, total_size);
145 	ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - total_size);
146 
147 	ASSERT_EQ(memblock.reserved.cnt, 1);
148 	ASSERT_EQ(memblock.reserved.total_size, total_size);
149 
150 	test_pass_pop();
151 
152 	return 0;
153 }
154 
155 /*
156  * A test that tries to allocate memory when there is not enough space at the
157  * end of the previously reserved block (i.e. second fit):
158  *
159  *  |            +-----------+------+     |
160  *  |            |     r2    |  r1  |     |
161  *  +------------+-----------+------+-----+
162  *
163  * Expect a merge of both regions. Both the base address and size of the region
164  * get updated.
165  */
alloc_top_down_after_check(void)166 static int alloc_top_down_after_check(void)
167 {
168 	struct memblock_region *rgn = &memblock.reserved.regions[0];
169 	struct region r1;
170 	void *allocated_ptr = NULL;
171 	phys_addr_t r2_size = SZ_512;
172 	phys_addr_t total_size;
173 
174 	PREFIX_PUSH();
175 	setup_memblock();
176 
177 	/*
178 	 * The first region starts at the aligned address to test region merging
179 	 */
180 	r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES;
181 	r1.size = SZ_8;
182 
183 	total_size = r1.size + r2_size;
184 
185 	memblock_reserve(r1.base, r1.size);
186 
187 	allocated_ptr = run_memblock_alloc(r2_size, SMP_CACHE_BYTES);
188 
189 	ASSERT_NE(allocated_ptr, NULL);
190 	assert_mem_content(allocated_ptr, r2_size, alloc_test_flags);
191 
192 	ASSERT_EQ(rgn->size, total_size);
193 	ASSERT_EQ(rgn->base, r1.base - r2_size);
194 
195 	ASSERT_EQ(memblock.reserved.cnt, 1);
196 	ASSERT_EQ(memblock.reserved.total_size, total_size);
197 
198 	test_pass_pop();
199 
200 	return 0;
201 }
202 
203 /*
204  * A test that tries to allocate memory when there are two reserved regions with
205  * a gap too small to fit the new region:
206  *
207  *  |       +--------+----------+   +------|
208  *  |       |   r3   |    r2    |   |  r1  |
209  *  +-------+--------+----------+---+------+
210  *
211  * Expect to allocate a region before the one that starts at the lower address,
212  * and merge them into one. The region counter and total size fields get
213  * updated.
214  */
alloc_top_down_second_fit_check(void)215 static int alloc_top_down_second_fit_check(void)
216 {
217 	struct memblock_region *rgn = &memblock.reserved.regions[0];
218 	struct region r1, r2;
219 	void *allocated_ptr = NULL;
220 	phys_addr_t r3_size = SZ_1K;
221 	phys_addr_t total_size;
222 
223 	PREFIX_PUSH();
224 	setup_memblock();
225 
226 	r1.base = memblock_end_of_DRAM() - SZ_512;
227 	r1.size = SZ_512;
228 
229 	r2.base = r1.base - SZ_512;
230 	r2.size = SZ_256;
231 
232 	total_size = r1.size + r2.size + r3_size;
233 
234 	memblock_reserve(r1.base, r1.size);
235 	memblock_reserve(r2.base, r2.size);
236 
237 	allocated_ptr = run_memblock_alloc(r3_size, SMP_CACHE_BYTES);
238 
239 	ASSERT_NE(allocated_ptr, NULL);
240 	assert_mem_content(allocated_ptr, r3_size, alloc_test_flags);
241 
242 	ASSERT_EQ(rgn->size, r2.size + r3_size);
243 	ASSERT_EQ(rgn->base, r2.base - r3_size);
244 
245 	ASSERT_EQ(memblock.reserved.cnt, 2);
246 	ASSERT_EQ(memblock.reserved.total_size, total_size);
247 
248 	test_pass_pop();
249 
250 	return 0;
251 }
252 
253 /*
254  * A test that tries to allocate memory when there are two reserved regions with
255  * a gap big enough to accommodate the new region:
256  *
257  *  |     +--------+--------+--------+     |
258  *  |     |   r2   |   r3   |   r1   |     |
259  *  +-----+--------+--------+--------+-----+
260  *
261  * Expect to merge all of them, creating one big entry in memblock.reserved
262  * array. The region counter and total size fields get updated.
263  */
alloc_in_between_generic_check(void)264 static int alloc_in_between_generic_check(void)
265 {
266 	struct memblock_region *rgn = &memblock.reserved.regions[0];
267 	struct region r1, r2;
268 	void *allocated_ptr = NULL;
269 	phys_addr_t gap_size = SMP_CACHE_BYTES;
270 	phys_addr_t r3_size = SZ_64;
271 	/*
272 	 * Calculate regions size so there's just enough space for the new entry
273 	 */
274 	phys_addr_t rgn_size = (MEM_SIZE - (2 * gap_size + r3_size)) / 2;
275 	phys_addr_t total_size;
276 
277 	PREFIX_PUSH();
278 	setup_memblock();
279 
280 	r1.size = rgn_size;
281 	r1.base = memblock_end_of_DRAM() - (gap_size + rgn_size);
282 
283 	r2.size = rgn_size;
284 	r2.base = memblock_start_of_DRAM() + gap_size;
285 
286 	total_size = r1.size + r2.size + r3_size;
287 
288 	memblock_reserve(r1.base, r1.size);
289 	memblock_reserve(r2.base, r2.size);
290 
291 	allocated_ptr = run_memblock_alloc(r3_size, SMP_CACHE_BYTES);
292 
293 	ASSERT_NE(allocated_ptr, NULL);
294 	assert_mem_content(allocated_ptr, r3_size, alloc_test_flags);
295 
296 	ASSERT_EQ(rgn->size, total_size);
297 	ASSERT_EQ(rgn->base, r1.base - r2.size - r3_size);
298 
299 	ASSERT_EQ(memblock.reserved.cnt, 1);
300 	ASSERT_EQ(memblock.reserved.total_size, total_size);
301 
302 	test_pass_pop();
303 
304 	return 0;
305 }
306 
307 /*
308  * A test that tries to allocate memory when the memory is filled with reserved
309  * regions with memory gaps too small to fit the new region:
310  *
311  * +-------+
312  * |  new  |
313  * +--+----+
314  *    |    +-----+    +-----+    +-----+    |
315  *    |    | res |    | res |    | res |    |
316  *    +----+-----+----+-----+----+-----+----+
317  *
318  * Expect no allocation to happen.
319  */
alloc_small_gaps_generic_check(void)320 static int alloc_small_gaps_generic_check(void)
321 {
322 	void *allocated_ptr = NULL;
323 	phys_addr_t region_size = SZ_1K;
324 	phys_addr_t gap_size = SZ_256;
325 	phys_addr_t region_end;
326 
327 	PREFIX_PUSH();
328 	setup_memblock();
329 
330 	region_end = memblock_start_of_DRAM();
331 
332 	while (region_end < memblock_end_of_DRAM()) {
333 		memblock_reserve(region_end + gap_size, region_size);
334 		region_end += gap_size + region_size;
335 	}
336 
337 	allocated_ptr = run_memblock_alloc(region_size, SMP_CACHE_BYTES);
338 
339 	ASSERT_EQ(allocated_ptr, NULL);
340 
341 	test_pass_pop();
342 
343 	return 0;
344 }
345 
346 /*
347  * A test that tries to allocate memory when all memory is reserved.
348  * Expect no allocation to happen.
349  */
alloc_all_reserved_generic_check(void)350 static int alloc_all_reserved_generic_check(void)
351 {
352 	void *allocated_ptr = NULL;
353 
354 	PREFIX_PUSH();
355 	setup_memblock();
356 
357 	/* Simulate full memory */
358 	memblock_reserve(memblock_start_of_DRAM(), MEM_SIZE);
359 
360 	allocated_ptr = run_memblock_alloc(SZ_256, SMP_CACHE_BYTES);
361 
362 	ASSERT_EQ(allocated_ptr, NULL);
363 
364 	test_pass_pop();
365 
366 	return 0;
367 }
368 
369 /*
370  * A test that tries to allocate memory when the memory is almost full,
371  * with not enough space left for the new region:
372  *
373  *                                +-------+
374  *                                |  new  |
375  *                                +-------+
376  *  |-----------------------------+   |
377  *  |          reserved           |   |
378  *  +-----------------------------+---+
379  *
380  * Expect no allocation to happen.
381  */
alloc_no_space_generic_check(void)382 static int alloc_no_space_generic_check(void)
383 {
384 	void *allocated_ptr = NULL;
385 	phys_addr_t available_size = SZ_256;
386 	phys_addr_t reserved_size = MEM_SIZE - available_size;
387 
388 	PREFIX_PUSH();
389 	setup_memblock();
390 
391 	/* Simulate almost-full memory */
392 	memblock_reserve(memblock_start_of_DRAM(), reserved_size);
393 
394 	allocated_ptr = run_memblock_alloc(SZ_1K, SMP_CACHE_BYTES);
395 
396 	ASSERT_EQ(allocated_ptr, NULL);
397 
398 	test_pass_pop();
399 
400 	return 0;
401 }
402 
403 /*
404  * A test that tries to allocate memory when the memory is almost full,
405  * but there is just enough space left:
406  *
407  *  |---------------------------+---------|
408  *  |          reserved         |   new   |
409  *  +---------------------------+---------+
410  *
411  * Expect to allocate memory and merge all the regions. The total size field
412  * gets updated.
413  */
alloc_limited_space_generic_check(void)414 static int alloc_limited_space_generic_check(void)
415 {
416 	struct memblock_region *rgn = &memblock.reserved.regions[0];
417 	void *allocated_ptr = NULL;
418 	phys_addr_t available_size = SZ_256;
419 	phys_addr_t reserved_size = MEM_SIZE - available_size;
420 
421 	PREFIX_PUSH();
422 	setup_memblock();
423 
424 	/* Simulate almost-full memory */
425 	memblock_reserve(memblock_start_of_DRAM(), reserved_size);
426 
427 	allocated_ptr = run_memblock_alloc(available_size, SMP_CACHE_BYTES);
428 
429 	ASSERT_NE(allocated_ptr, NULL);
430 	assert_mem_content(allocated_ptr, available_size, alloc_test_flags);
431 
432 	ASSERT_EQ(rgn->size, MEM_SIZE);
433 	ASSERT_EQ(rgn->base, memblock_start_of_DRAM());
434 
435 	ASSERT_EQ(memblock.reserved.cnt, 1);
436 	ASSERT_EQ(memblock.reserved.total_size, MEM_SIZE);
437 
438 	test_pass_pop();
439 
440 	return 0;
441 }
442 
443 /*
444  * A test that tries to allocate memory when there is no available memory
445  * registered (i.e. memblock.memory has only a dummy entry).
446  * Expect no allocation to happen.
447  */
alloc_no_memory_generic_check(void)448 static int alloc_no_memory_generic_check(void)
449 {
450 	struct memblock_region *rgn = &memblock.reserved.regions[0];
451 	void *allocated_ptr = NULL;
452 
453 	PREFIX_PUSH();
454 
455 	reset_memblock_regions();
456 
457 	allocated_ptr = run_memblock_alloc(SZ_1K, SMP_CACHE_BYTES);
458 
459 	ASSERT_EQ(allocated_ptr, NULL);
460 	ASSERT_EQ(rgn->size, 0);
461 	ASSERT_EQ(rgn->base, 0);
462 	ASSERT_EQ(memblock.reserved.total_size, 0);
463 
464 	test_pass_pop();
465 
466 	return 0;
467 }
468 
469 /*
470  * A test that tries to allocate a region that is larger than the total size of
471  * available memory (memblock.memory):
472  *
473  *  +-----------------------------------+
474  *  |                 new               |
475  *  +-----------------------------------+
476  *  |                                 |
477  *  |                                 |
478  *  +---------------------------------+
479  *
480  * Expect no allocation to happen.
481  */
alloc_too_large_generic_check(void)482 static int alloc_too_large_generic_check(void)
483 {
484 	struct memblock_region *rgn = &memblock.reserved.regions[0];
485 	void *allocated_ptr = NULL;
486 
487 	PREFIX_PUSH();
488 	setup_memblock();
489 
490 	allocated_ptr = run_memblock_alloc(MEM_SIZE + SZ_2, SMP_CACHE_BYTES);
491 
492 	ASSERT_EQ(allocated_ptr, NULL);
493 	ASSERT_EQ(rgn->size, 0);
494 	ASSERT_EQ(rgn->base, 0);
495 	ASSERT_EQ(memblock.reserved.total_size, 0);
496 
497 	test_pass_pop();
498 
499 	return 0;
500 }
501 
502 /*
503  * A simple test that tries to allocate a small memory region.
504  * Expect to allocate an aligned region at the beginning of the available
505  * memory.
506  */
alloc_bottom_up_simple_check(void)507 static int alloc_bottom_up_simple_check(void)
508 {
509 	struct memblock_region *rgn = &memblock.reserved.regions[0];
510 	void *allocated_ptr = NULL;
511 
512 	PREFIX_PUSH();
513 	setup_memblock();
514 
515 	allocated_ptr = run_memblock_alloc(SZ_2, SMP_CACHE_BYTES);
516 
517 	ASSERT_NE(allocated_ptr, NULL);
518 	assert_mem_content(allocated_ptr, SZ_2, alloc_test_flags);
519 
520 	ASSERT_EQ(rgn->size, SZ_2);
521 	ASSERT_EQ(rgn->base, memblock_start_of_DRAM());
522 
523 	ASSERT_EQ(memblock.reserved.cnt, 1);
524 	ASSERT_EQ(memblock.reserved.total_size, SZ_2);
525 
526 	test_pass_pop();
527 
528 	return 0;
529 }
530 
531 /*
532  * A test that tries to allocate memory next to a reserved region that starts at
533  * the misaligned address. Expect to create two separate entries, with the new
534  * entry aligned to the provided alignment:
535  *
536  *                      +
537  *  |    +----------+   +----------+     |
538  *  |    |   rgn1   |   |   rgn2   |     |
539  *  +----+----------+---+----------+-----+
540  *                      ^
541  *                      |
542  *                      Aligned address boundary
543  *
544  * The allocation direction is bottom-up, so the new region will be the second
545  * entry in memory.reserved array. The previously reserved region does not get
546  * modified. Region counter and total size get updated.
547  */
alloc_bottom_up_disjoint_check(void)548 static int alloc_bottom_up_disjoint_check(void)
549 {
550 	struct memblock_region *rgn1 = &memblock.reserved.regions[0];
551 	struct memblock_region *rgn2 = &memblock.reserved.regions[1];
552 	struct region r1;
553 	void *allocated_ptr = NULL;
554 	phys_addr_t r2_size = SZ_16;
555 	/* Use custom alignment */
556 	phys_addr_t alignment = SMP_CACHE_BYTES * 2;
557 	phys_addr_t total_size;
558 	phys_addr_t expected_start;
559 
560 	PREFIX_PUSH();
561 	setup_memblock();
562 
563 	r1.base = memblock_start_of_DRAM() + SZ_2;
564 	r1.size = SZ_2;
565 
566 	total_size = r1.size + r2_size;
567 	expected_start = memblock_start_of_DRAM() + alignment;
568 
569 	memblock_reserve(r1.base, r1.size);
570 
571 	allocated_ptr = run_memblock_alloc(r2_size, alignment);
572 
573 	ASSERT_NE(allocated_ptr, NULL);
574 	assert_mem_content(allocated_ptr, r2_size, alloc_test_flags);
575 
576 	ASSERT_EQ(rgn1->size, r1.size);
577 	ASSERT_EQ(rgn1->base, r1.base);
578 
579 	ASSERT_EQ(rgn2->size, r2_size);
580 	ASSERT_EQ(rgn2->base, expected_start);
581 
582 	ASSERT_EQ(memblock.reserved.cnt, 2);
583 	ASSERT_EQ(memblock.reserved.total_size, total_size);
584 
585 	test_pass_pop();
586 
587 	return 0;
588 }
589 
590 /*
591  * A test that tries to allocate memory when there is enough space at
592  * the beginning of the previously reserved block (i.e. first fit):
593  *
594  *  |------------------+--------+         |
595  *  |        r1        |   r2   |         |
596  *  +------------------+--------+---------+
597  *
598  * Expect a merge of both regions. Only the region size gets updated.
599  */
alloc_bottom_up_before_check(void)600 static int alloc_bottom_up_before_check(void)
601 {
602 	struct memblock_region *rgn = &memblock.reserved.regions[0];
603 	void *allocated_ptr = NULL;
604 	phys_addr_t r1_size = SZ_512;
605 	phys_addr_t r2_size = SZ_128;
606 	phys_addr_t total_size = r1_size + r2_size;
607 
608 	PREFIX_PUSH();
609 	setup_memblock();
610 
611 	memblock_reserve(memblock_start_of_DRAM() + r1_size, r2_size);
612 
613 	allocated_ptr = run_memblock_alloc(r1_size, SMP_CACHE_BYTES);
614 
615 	ASSERT_NE(allocated_ptr, NULL);
616 	assert_mem_content(allocated_ptr, r1_size, alloc_test_flags);
617 
618 	ASSERT_EQ(rgn->size, total_size);
619 	ASSERT_EQ(rgn->base, memblock_start_of_DRAM());
620 
621 	ASSERT_EQ(memblock.reserved.cnt, 1);
622 	ASSERT_EQ(memblock.reserved.total_size, total_size);
623 
624 	test_pass_pop();
625 
626 	return 0;
627 }
628 
629 /*
630  * A test that tries to allocate memory when there is not enough space at
631  * the beginning of the previously reserved block (i.e. second fit):
632  *
633  *  |    +--------+--------------+         |
634  *  |    |   r1   |      r2      |         |
635  *  +----+--------+--------------+---------+
636  *
637  * Expect a merge of both regions. Only the region size gets updated.
638  */
alloc_bottom_up_after_check(void)639 static int alloc_bottom_up_after_check(void)
640 {
641 	struct memblock_region *rgn = &memblock.reserved.regions[0];
642 	struct region r1;
643 	void *allocated_ptr = NULL;
644 	phys_addr_t r2_size = SZ_512;
645 	phys_addr_t total_size;
646 
647 	PREFIX_PUSH();
648 	setup_memblock();
649 
650 	/*
651 	 * The first region starts at the aligned address to test region merging
652 	 */
653 	r1.base = memblock_start_of_DRAM() + SMP_CACHE_BYTES;
654 	r1.size = SZ_64;
655 
656 	total_size = r1.size + r2_size;
657 
658 	memblock_reserve(r1.base, r1.size);
659 
660 	allocated_ptr = run_memblock_alloc(r2_size, SMP_CACHE_BYTES);
661 
662 	ASSERT_NE(allocated_ptr, NULL);
663 	assert_mem_content(allocated_ptr, r2_size, alloc_test_flags);
664 
665 	ASSERT_EQ(rgn->size, total_size);
666 	ASSERT_EQ(rgn->base, r1.base);
667 
668 	ASSERT_EQ(memblock.reserved.cnt, 1);
669 	ASSERT_EQ(memblock.reserved.total_size, total_size);
670 
671 	test_pass_pop();
672 
673 	return 0;
674 }
675 
676 /*
677  * A test that tries to allocate memory when there are two reserved regions, the
678  * first one starting at the beginning of the available memory, with a gap too
679  * small to fit the new region:
680  *
681  *  |------------+     +--------+--------+  |
682  *  |     r1     |     |   r2   |   r3   |  |
683  *  +------------+-----+--------+--------+--+
684  *
685  * Expect to allocate after the second region, which starts at the higher
686  * address, and merge them into one. The region counter and total size fields
687  * get updated.
688  */
alloc_bottom_up_second_fit_check(void)689 static int alloc_bottom_up_second_fit_check(void)
690 {
691 	struct memblock_region *rgn  = &memblock.reserved.regions[1];
692 	struct region r1, r2;
693 	void *allocated_ptr = NULL;
694 	phys_addr_t r3_size = SZ_1K;
695 	phys_addr_t total_size;
696 
697 	PREFIX_PUSH();
698 	setup_memblock();
699 
700 	r1.base = memblock_start_of_DRAM();
701 	r1.size = SZ_512;
702 
703 	r2.base = r1.base + r1.size + SZ_512;
704 	r2.size = SZ_256;
705 
706 	total_size = r1.size + r2.size + r3_size;
707 
708 	memblock_reserve(r1.base, r1.size);
709 	memblock_reserve(r2.base, r2.size);
710 
711 	allocated_ptr = run_memblock_alloc(r3_size, SMP_CACHE_BYTES);
712 
713 	ASSERT_NE(allocated_ptr, NULL);
714 	assert_mem_content(allocated_ptr, r3_size, alloc_test_flags);
715 
716 	ASSERT_EQ(rgn->size, r2.size + r3_size);
717 	ASSERT_EQ(rgn->base, r2.base);
718 
719 	ASSERT_EQ(memblock.reserved.cnt, 2);
720 	ASSERT_EQ(memblock.reserved.total_size, total_size);
721 
722 	test_pass_pop();
723 
724 	return 0;
725 }
726 
727 /* Test case wrappers */
alloc_simple_check(void)728 static int alloc_simple_check(void)
729 {
730 	test_print("\tRunning %s...\n", __func__);
731 	memblock_set_bottom_up(false);
732 	alloc_top_down_simple_check();
733 	memblock_set_bottom_up(true);
734 	alloc_bottom_up_simple_check();
735 
736 	return 0;
737 }
738 
alloc_disjoint_check(void)739 static int alloc_disjoint_check(void)
740 {
741 	test_print("\tRunning %s...\n", __func__);
742 	memblock_set_bottom_up(false);
743 	alloc_top_down_disjoint_check();
744 	memblock_set_bottom_up(true);
745 	alloc_bottom_up_disjoint_check();
746 
747 	return 0;
748 }
749 
alloc_before_check(void)750 static int alloc_before_check(void)
751 {
752 	test_print("\tRunning %s...\n", __func__);
753 	memblock_set_bottom_up(false);
754 	alloc_top_down_before_check();
755 	memblock_set_bottom_up(true);
756 	alloc_bottom_up_before_check();
757 
758 	return 0;
759 }
760 
alloc_after_check(void)761 static int alloc_after_check(void)
762 {
763 	test_print("\tRunning %s...\n", __func__);
764 	memblock_set_bottom_up(false);
765 	alloc_top_down_after_check();
766 	memblock_set_bottom_up(true);
767 	alloc_bottom_up_after_check();
768 
769 	return 0;
770 }
771 
alloc_in_between_check(void)772 static int alloc_in_between_check(void)
773 {
774 	test_print("\tRunning %s...\n", __func__);
775 	run_top_down(alloc_in_between_generic_check);
776 	run_bottom_up(alloc_in_between_generic_check);
777 
778 	return 0;
779 }
780 
alloc_second_fit_check(void)781 static int alloc_second_fit_check(void)
782 {
783 	test_print("\tRunning %s...\n", __func__);
784 	memblock_set_bottom_up(false);
785 	alloc_top_down_second_fit_check();
786 	memblock_set_bottom_up(true);
787 	alloc_bottom_up_second_fit_check();
788 
789 	return 0;
790 }
791 
alloc_small_gaps_check(void)792 static int alloc_small_gaps_check(void)
793 {
794 	test_print("\tRunning %s...\n", __func__);
795 	run_top_down(alloc_small_gaps_generic_check);
796 	run_bottom_up(alloc_small_gaps_generic_check);
797 
798 	return 0;
799 }
800 
alloc_all_reserved_check(void)801 static int alloc_all_reserved_check(void)
802 {
803 	test_print("\tRunning %s...\n", __func__);
804 	run_top_down(alloc_all_reserved_generic_check);
805 	run_bottom_up(alloc_all_reserved_generic_check);
806 
807 	return 0;
808 }
809 
alloc_no_space_check(void)810 static int alloc_no_space_check(void)
811 {
812 	test_print("\tRunning %s...\n", __func__);
813 	run_top_down(alloc_no_space_generic_check);
814 	run_bottom_up(alloc_no_space_generic_check);
815 
816 	return 0;
817 }
818 
alloc_limited_space_check(void)819 static int alloc_limited_space_check(void)
820 {
821 	test_print("\tRunning %s...\n", __func__);
822 	run_top_down(alloc_limited_space_generic_check);
823 	run_bottom_up(alloc_limited_space_generic_check);
824 
825 	return 0;
826 }
827 
alloc_no_memory_check(void)828 static int alloc_no_memory_check(void)
829 {
830 	test_print("\tRunning %s...\n", __func__);
831 	run_top_down(alloc_no_memory_generic_check);
832 	run_bottom_up(alloc_no_memory_generic_check);
833 
834 	return 0;
835 }
836 
alloc_too_large_check(void)837 static int alloc_too_large_check(void)
838 {
839 	test_print("\tRunning %s...\n", __func__);
840 	run_top_down(alloc_too_large_generic_check);
841 	run_bottom_up(alloc_too_large_generic_check);
842 
843 	return 0;
844 }
845 
memblock_alloc_checks_internal(int flags)846 static int memblock_alloc_checks_internal(int flags)
847 {
848 	const char *func = get_memblock_alloc_name(flags);
849 
850 	alloc_test_flags = flags;
851 	prefix_reset();
852 	prefix_push(func);
853 	test_print("Running %s tests...\n", func);
854 
855 	reset_memblock_attributes();
856 	dummy_physical_memory_init();
857 
858 	alloc_simple_check();
859 	alloc_disjoint_check();
860 	alloc_before_check();
861 	alloc_after_check();
862 	alloc_second_fit_check();
863 	alloc_small_gaps_check();
864 	alloc_in_between_check();
865 	alloc_all_reserved_check();
866 	alloc_no_space_check();
867 	alloc_limited_space_check();
868 	alloc_no_memory_check();
869 	alloc_too_large_check();
870 
871 	dummy_physical_memory_cleanup();
872 
873 	prefix_pop();
874 
875 	return 0;
876 }
877 
memblock_alloc_checks(void)878 int memblock_alloc_checks(void)
879 {
880 	memblock_alloc_checks_internal(TEST_F_NONE);
881 	memblock_alloc_checks_internal(TEST_F_RAW);
882 
883 	return 0;
884 }
885