Lines Matching refs:inuse

748 	return s->offset >= s->inuse;  in freeptr_outside_object()
758 return s->inuse + sizeof(void *); in get_info_end()
760 return s->inuse; in get_info_end()
859 slab, slab->objects, slab->inuse, slab->freelist, in print_slab_info()
964 s->inuse - s->object_size); in print_trailer()
1051 memset(p + poison_size, val, s->inuse - poison_size); in init_object()
1203 endobject, val, s->inuse - s->object_size)) in check_object()
1217 if ((s->flags & SLAB_POISON) && s->object_size < s->inuse) { in check_object()
1220 s->inuse - s->object_size); in check_object()
1273 if (slab->inuse > slab->objects) { in check_slab()
1275 slab->inuse, slab->objects); in check_slab()
1311 slab->inuse = slab->objects; in on_freelist()
1332 if (slab->inuse != slab->objects - nr) { in on_freelist()
1334 slab->inuse, slab->objects - nr); in on_freelist()
1335 slab->inuse = slab->objects - nr; in on_freelist()
1348 object, slab->inuse, in trace()
1469 slab->inuse = slab->objects; in alloc_debug_processing()
1802 memset((char *)kasan_reset_tag(x) + s->inuse, 0, in slab_free_hook()
1803 s->size - s->inuse - rsize); in slab_free_hook()
2038 slab->inuse = 0; in allocate_slab()
2168 slab->inuse++; in alloc_single_from_partial()
2176 if (slab->inuse == slab->objects) { in alloc_single_from_partial()
2200 slab->inuse = 1; in alloc_single_from_new_slab()
2212 if (slab->inuse == slab->objects) in alloc_single_from_new_slab()
2248 new.inuse = slab->objects; in acquire_slab()
2561 new.inuse -= free_delta; in deactivate_slab()
2569 if (!new.inuse && n->nr_partial >= s->min_partial) { in deactivate_slab()
2645 if (unlikely(!new.inuse && n->nr_partial >= s->min_partial)) { in __unfreeze_partials()
2905 return slab->objects - slab->inuse; in count_free()
2927 if (slab->inuse < *bulk_cnt) { in free_debug_processing()
2929 slab->inuse, *bulk_cnt); in free_debug_processing()
3072 new.inuse = slab->objects; in get_freelist()
3258 slab->inuse = slab->objects; in ___slab_alloc()
3558 slab->inuse -= cnt; in free_to_partial_list()
3567 if (slab->inuse == 0 && n->nr_partial >= s->min_partial) in free_to_partial_list()
3639 new.inuse -= cnt; in __slab_free()
3640 if ((!new.inuse || !prior) && !was_frozen) { in __slab_free()
3693 if (unlikely(!new.inuse && n->nr_partial >= s->min_partial)) in __slab_free()
4285 slab->inuse = 1; in early_kmem_cache_node_alloc()
4418 s->inuse = size; in calculate_sizes()
4611 if (!slab->inuse) { in free_partial()
4832 int free = slab->objects - slab->inuse; in __kmem_cache_do_shrink()
5110 s->inuse = max(s->inuse, ALIGN(size, sizeof(void *))); in __kmem_cache_alias()
5143 return slab->inuse; in count_inuse()
5443 x = slab->inuse; in show_slab_objects()