Lines Matching refs:va_start

778 	return (va->va_end - va->va_start);  in va_size()
819 if (tmp->va_start <= addr) in find_vmap_area_exceed_addr()
840 if (addr < va->va_start) in __find_vmap_area()
890 if (va->va_end <= tmp_va->va_start) in find_va_links()
892 else if (va->va_start >= tmp_va->va_end) in find_va_links()
896 va->va_start, va->va_end, tmp_va->va_start, tmp_va->va_end); in find_va_links()
1153 if (sibling->va_start == va->va_end) { in __merge_or_add_vmap_area()
1154 sibling->va_start = va->va_start; in __merge_or_add_vmap_area()
1174 if (sibling->va_end == va->va_start) { in __merge_or_add_vmap_area()
1227 if (va->va_start > vstart) in is_within_this_va()
1228 nva_start_addr = ALIGN(va->va_start, align); in is_within_this_va()
1265 vstart < va->va_start) { in find_vmap_lowest_match()
1293 vstart <= va->va_start) { in find_vmap_lowest_match()
1300 vstart = va->va_start + 1; in find_vmap_lowest_match()
1365 if (nva_start_addr < va->va_start || in classify_va_fit_type()
1370 if (va->va_start == nva_start_addr) { in classify_va_fit_type()
1410 va->va_start += size; in adjust_va_to_fit_type()
1463 lva->va_start = va->va_start; in adjust_va_to_fit_type()
1469 va->va_start = nva_start_addr + size; in adjust_va_to_fit_type()
1514 if (va->va_start > vstart) in __alloc_vmap_area()
1515 nva_start_addr = ALIGN(va->va_start, align); in __alloc_vmap_area()
1628 va->va_start = addr; in alloc_vmap_area()
1637 BUG_ON(!IS_ALIGNED(va->va_start, align)); in alloc_vmap_area()
1638 BUG_ON(va->va_start < vstart); in alloc_vmap_area()
1743 struct vmap_area, list)->va_start); in __purge_vmap_area_lazy()
1754 unsigned long nr = (va->va_end - va->va_start) >> PAGE_SHIFT; in __purge_vmap_area_lazy()
1755 unsigned long orig_start = va->va_start; in __purge_vmap_area_lazy()
1771 va->va_start, va->va_end); in __purge_vmap_area_lazy()
1820 unsigned long va_start = va->va_start; in free_vmap_area_noflush() local
1826 nr_lazy = atomic_long_add_return((va->va_end - va->va_start) >> in free_vmap_area_noflush()
1837 trace_free_vmap_area_noflush(va_start, nr_lazy, nr_lazy_max); in free_vmap_area_noflush()
1849 flush_cache_vunmap(va->va_start, va->va_end); in free_unmap_vmap_area()
1850 vunmap_range_noflush(va->va_start, va->va_end); in free_unmap_vmap_area()
1852 flush_tlb_kernel_range(va->va_start, va->va_end); in free_unmap_vmap_area()
2005 static void *vmap_block_vaddr(unsigned long va_start, unsigned long pages_off) in vmap_block_vaddr() argument
2009 addr = va_start + (pages_off << PAGE_SHIFT); in vmap_block_vaddr()
2010 BUG_ON(addr_to_vb_idx(addr) != addr_to_vb_idx(va_start)); in vmap_block_vaddr()
2048 vaddr = vmap_block_vaddr(va->va_start, 0); in new_vmap_block()
2061 xa = addr_to_vb_xa(va->va_start); in new_vmap_block()
2062 vb_idx = addr_to_vb_idx(va->va_start); in new_vmap_block()
2083 xa = addr_to_vb_xa(vb->va->va_start); in free_vmap_block()
2084 tmp = xa_erase(xa, addr_to_vb_idx(vb->va->va_start)); in free_vmap_block()
2195 vaddr = vmap_block_vaddr(vb->va->va_start, pages_off); in vb_alloc()
2285 unsigned long va_start = vb->va->va_start; in _vm_unmap_aliases() local
2288 s = va_start + (vb->dirty_min << PAGE_SHIFT); in _vm_unmap_aliases()
2289 e = va_start + (vb->dirty_max << PAGE_SHIFT); in _vm_unmap_aliases()
2362 debug_check_no_locks_freed((void *)va->va_start, in vm_unmap_ram()
2363 (va->va_end - va->va_start)); in vm_unmap_ram()
2401 addr = va->va_start; in vm_map_ram()
2513 if (busy->va_start - vmap_start > 0) { in vmap_init_free_space()
2516 free->va_start = vmap_start; in vmap_init_free_space()
2517 free->va_end = busy->va_start; in vmap_init_free_space()
2531 free->va_start = vmap_start; in vmap_init_free_space()
2545 vm->addr = (void *)va->va_start; in setup_vmalloc_vm_locked()
2546 vm->size = va->va_end - va->va_start; in setup_vmalloc_vm_locked()
3678 start = vmap_block_vaddr(vb->va->va_start, rs); in vmap_ram_vread_iter()
3762 if ((unsigned long)addr + remains <= va->va_start) in vread_iter()
3788 vaddr = (char *) va->va_start; in vread_iter()
3955 if (tmp->va_start <= addr) { in pvm_find_va_enclose_addr()
3989 if ((*va)->va_start < addr) in pvm_determine_end_from_reverse()
4110 if (base + start < va->va_start) { in pcpu_get_vm_areas()
4151 va->va_start = start; in pcpu_get_vm_areas()
4159 if (kasan_populate_vmalloc(vas[area]->va_start, sizes[area])) in pcpu_get_vm_areas()
4194 orig_start = vas[area]->va_start; in pcpu_get_vm_areas()
4200 va->va_start, va->va_end); in pcpu_get_vm_areas()
4244 orig_start = vas[area]->va_start; in pcpu_get_vm_areas()
4250 va->va_start, va->va_end); in pcpu_get_vm_areas()
4365 (void *)va->va_start, (void *)va->va_end, in show_purge_info()
4366 va->va_end - va->va_start); in show_purge_info()
4381 (void *)va->va_start, (void *)va->va_end, in s_show()
4382 va->va_end - va->va_start); in s_show()
4483 va->va_start = (unsigned long)tmp->addr; in vmalloc_init()
4484 va->va_end = va->va_start + tmp->size; in vmalloc_init()