Lines Matching full:slots

83 	 * be enough slots to hold all possible variants
99 * @slots: pointer to the structure holding buddy slots
113 struct z3fold_buddy_slots *slots; member
197 struct z3fold_buddy_slots *slots = kmem_cache_zalloc(pool->c_handle, in alloc_slots() local
200 if (slots) { in alloc_slots()
202 kmemleak_not_leak(slots); in alloc_slots()
203 slots->pool = (unsigned long)pool; in alloc_slots()
204 rwlock_init(&slots->lock); in alloc_slots()
207 return slots; in alloc_slots()
241 struct z3fold_buddy_slots *slots; in get_z3fold_header() local
246 slots = handle_to_slots(handle); in get_z3fold_header()
250 read_lock(&slots->lock); in get_z3fold_header()
254 read_unlock(&slots->lock); in get_z3fold_header()
281 struct z3fold_buddy_slots *slots; in free_handle() local
288 slots = handle_to_slots(handle); in free_handle()
289 write_lock(&slots->lock); in free_handle()
292 if (test_bit(HANDLES_NOFREE, &slots->pool)) { in free_handle()
293 write_unlock(&slots->lock); in free_handle()
297 if (zhdr->slots != slots) in free_handle()
302 if (slots->slot[i]) { in free_handle()
307 write_unlock(&slots->lock); in free_handle()
310 struct z3fold_pool *pool = slots_to_pool(slots); in free_handle()
312 if (zhdr->slots == slots) in free_handle()
313 zhdr->slots = NULL; in free_handle()
314 kmem_cache_free(pool->c_handle, slots); in free_handle()
323 struct z3fold_buddy_slots *slots; in init_z3fold_page() local
334 slots = alloc_slots(pool, gfp); in init_z3fold_page()
335 if (!slots) in init_z3fold_page()
342 zhdr->slots = slots; in init_z3fold_page()
371 struct z3fold_buddy_slots *slots, in __encode_handle() argument
390 write_lock(&slots->lock); in __encode_handle()
391 slots->slot[idx] = h; in __encode_handle()
392 write_unlock(&slots->lock); in __encode_handle()
393 return (unsigned long)&slots->slot[idx]; in __encode_handle()
398 return __encode_handle(zhdr, zhdr->slots, bud); in encode_handle()
404 struct z3fold_buddy_slots *slots = handle_to_slots(handle); in handle_to_chunks() local
407 read_lock(&slots->lock); in handle_to_chunks()
409 read_unlock(&slots->lock); in handle_to_chunks()
421 struct z3fold_buddy_slots *slots = handle_to_slots(handle); in handle_to_buddy() local
424 read_lock(&slots->lock); in handle_to_buddy()
427 read_unlock(&slots->lock); in handle_to_buddy()
607 * No need to protect slots here -- all the slots are "local" and in compact_single_buddy()
610 if (zhdr->first_chunks && zhdr->slots->slot[first_idx]) { in compact_single_buddy()
613 old_handle = (unsigned long)&zhdr->slots->slot[first_idx]; in compact_single_buddy()
615 } else if (zhdr->middle_chunks && zhdr->slots->slot[middle_idx]) { in compact_single_buddy()
618 old_handle = (unsigned long)&zhdr->slots->slot[middle_idx]; in compact_single_buddy()
620 } else if (zhdr->last_chunks && zhdr->slots->slot[last_idx]) { in compact_single_buddy()
623 old_handle = (unsigned long)&zhdr->slots->slot[last_idx]; in compact_single_buddy()
661 write_lock(&zhdr->slots->lock); in compact_single_buddy()
667 write_unlock(&zhdr->slots->lock); in compact_single_buddy()
881 if (zhdr && !zhdr->slots) { in __z3fold_alloc()
882 zhdr->slots = alloc_slots(pool, GFP_ATOMIC); in __z3fold_alloc()
883 if (!zhdr->slots) in __z3fold_alloc()