Lines Matching full:region

25 		      (unsigned long long)lmb->memory.region[i].base);  in lmb_dump_all()
27 (unsigned long long)lmb->memory.region[i].size); in lmb_dump_all()
36 (unsigned long long)lmb->reserved.region[i].base); in lmb_dump_all()
38 (unsigned long long)lmb->reserved.region[i].size); in lmb_dump_all()
66 phys_addr_t base1 = rgn->region[r1].base; in lmb_regions_adjacent()
67 phys_size_t size1 = rgn->region[r1].size; in lmb_regions_adjacent()
68 phys_addr_t base2 = rgn->region[r2].base; in lmb_regions_adjacent()
69 phys_size_t size2 = rgn->region[r2].size; in lmb_regions_adjacent()
79 rgn->region[i].base = rgn->region[i + 1].base; in lmb_remove_region()
80 rgn->region[i].size = rgn->region[i + 1].size; in lmb_remove_region()
85 /* Assumption: base addr of region 1 < base addr of region 2 */
89 rgn->region[r1].size += rgn->region[r2].size; in lmb_coalesce_regions()
148 rgn->region[0].base = base; in lmb_add_region()
149 rgn->region[0].size = size; in lmb_add_region()
156 phys_addr_t rgnbase = rgn->region[i].base; in lmb_add_region()
157 phys_size_t rgnsize = rgn->region[i].size; in lmb_add_region()
160 /* Already have this region, so we're done */ in lmb_add_region()
165 rgn->region[i].base -= size; in lmb_add_region()
166 rgn->region[i].size += size; in lmb_add_region()
170 rgn->region[i].size += size; in lmb_add_region()
191 if (base < rgn->region[i].base) { in lmb_add_region()
192 rgn->region[i + 1].base = rgn->region[i].base; in lmb_add_region()
193 rgn->region[i + 1].size = rgn->region[i].size; in lmb_add_region()
195 rgn->region[i + 1].base = base; in lmb_add_region()
196 rgn->region[i + 1].size = size; in lmb_add_region()
201 if (base < rgn->region[0].base) { in lmb_add_region()
202 rgn->region[0].base = base; in lmb_add_region()
203 rgn->region[0].size = size; in lmb_add_region()
228 /* Find the region where (base, size) belongs to */ in lmb_free()
230 rgnbegin = rgn->region[i].base; in lmb_free()
231 rgnend = rgnbegin + rgn->region[i].size - 1; in lmb_free()
237 /* Didn't find the region */ in lmb_free()
241 /* Check to see if we are removing entire region */ in lmb_free()
247 /* Check to see if region is matching at the front */ in lmb_free()
249 rgn->region[i].base = end + 1; in lmb_free()
250 rgn->region[i].size -= size; in lmb_free()
254 /* Check to see if the region is matching at the end */ in lmb_free()
256 rgn->region[i].size -= size; in lmb_free()
262 * beginging of the hole and add the region after hole. in lmb_free()
264 rgn->region[i].size = base - rgn->region[i].base; in lmb_free()
281 phys_addr_t rgnbase = rgn->region[i].base; in lmb_overlaps_region()
282 phys_size_t rgnsize = rgn->region[i].size; in lmb_overlaps_region()
320 phys_addr_t lmbbase = lmb->memory.region[i].base; in __lmb_alloc_base()
321 phys_size_t lmbsize = lmb->memory.region[i].size; in __lmb_alloc_base()
345 res_base = lmb->reserved.region[rgn].base; in __lmb_alloc_base()
367 * region we found. in lmb_alloc_addr()
369 if (lmb_addrs_overlap(lmb->memory.region[rgn].base, in lmb_alloc_addr()
370 lmb->memory.region[rgn].size, in lmb_alloc_addr()
390 if (addr < lmb->reserved.region[i].base) { in lmb_get_free_size()
392 return lmb->reserved.region[i].base - addr; in lmb_get_free_size()
394 if (lmb->reserved.region[i].base + in lmb_get_free_size()
395 lmb->reserved.region[i].size > addr) { in lmb_get_free_size()
401 return lmb->memory.region[lmb->memory.cnt - 1].base + in lmb_get_free_size()
402 lmb->memory.region[lmb->memory.cnt - 1].size - addr; in lmb_get_free_size()
412 phys_addr_t upper = lmb->reserved.region[i].base + in lmb_is_reserved()
413 lmb->reserved.region[i].size - 1; in lmb_is_reserved()
414 if ((addr >= lmb->reserved.region[i].base) && (addr <= upper)) in lmb_is_reserved()