Lines Matching refs:cache

115 	struct swap_slots_cache *cache;  in alloc_swap_slot_cache()  local
136 cache = &per_cpu(swp_slots, cpu); in alloc_swap_slot_cache()
137 if (cache->slots || cache->slots_ret) { in alloc_swap_slot_cache()
147 if (!cache->lock_initialized) { in alloc_swap_slot_cache()
148 mutex_init(&cache->alloc_lock); in alloc_swap_slot_cache()
149 spin_lock_init(&cache->free_lock); in alloc_swap_slot_cache()
150 cache->lock_initialized = true; in alloc_swap_slot_cache()
152 cache->nr = 0; in alloc_swap_slot_cache()
153 cache->cur = 0; in alloc_swap_slot_cache()
154 cache->n_ret = 0; in alloc_swap_slot_cache()
162 cache->slots = slots; in alloc_swap_slot_cache()
163 cache->slots_ret = slots_ret; in alloc_swap_slot_cache()
171 struct swap_slots_cache *cache; in drain_slots_cache_cpu() local
174 cache = &per_cpu(swp_slots, cpu); in drain_slots_cache_cpu()
175 if ((type & SLOTS_CACHE) && cache->slots) { in drain_slots_cache_cpu()
176 mutex_lock(&cache->alloc_lock); in drain_slots_cache_cpu()
177 swapcache_free_entries(cache->slots + cache->cur, cache->nr); in drain_slots_cache_cpu()
178 cache->cur = 0; in drain_slots_cache_cpu()
179 cache->nr = 0; in drain_slots_cache_cpu()
180 if (free_slots && cache->slots) { in drain_slots_cache_cpu()
181 kvfree(cache->slots); in drain_slots_cache_cpu()
182 cache->slots = NULL; in drain_slots_cache_cpu()
184 mutex_unlock(&cache->alloc_lock); in drain_slots_cache_cpu()
186 if ((type & SLOTS_CACHE_RET) && cache->slots_ret) { in drain_slots_cache_cpu()
187 spin_lock_irq(&cache->free_lock); in drain_slots_cache_cpu()
188 swapcache_free_entries(cache->slots_ret, cache->n_ret); in drain_slots_cache_cpu()
189 cache->n_ret = 0; in drain_slots_cache_cpu()
190 if (free_slots && cache->slots_ret) { in drain_slots_cache_cpu()
191 slots = cache->slots_ret; in drain_slots_cache_cpu()
192 cache->slots_ret = NULL; in drain_slots_cache_cpu()
194 spin_unlock_irq(&cache->free_lock); in drain_slots_cache_cpu()
259 static int refill_swap_slots_cache(struct swap_slots_cache *cache) in refill_swap_slots_cache() argument
264 cache->cur = 0; in refill_swap_slots_cache()
266 cache->nr = get_swap_pages(SWAP_SLOTS_CACHE_SIZE, in refill_swap_slots_cache()
267 cache->slots, 1); in refill_swap_slots_cache()
269 return cache->nr; in refill_swap_slots_cache()
274 struct swap_slots_cache *cache; in free_swap_slot() local
276 cache = raw_cpu_ptr(&swp_slots); in free_swap_slot()
277 if (likely(use_swap_slot_cache && cache->slots_ret)) { in free_swap_slot()
278 spin_lock_irq(&cache->free_lock); in free_swap_slot()
280 if (!use_swap_slot_cache || !cache->slots_ret) { in free_swap_slot()
281 spin_unlock_irq(&cache->free_lock); in free_swap_slot()
284 if (cache->n_ret >= SWAP_SLOTS_CACHE_SIZE) { in free_swap_slot()
291 swapcache_free_entries(cache->slots_ret, cache->n_ret); in free_swap_slot()
292 cache->n_ret = 0; in free_swap_slot()
294 cache->slots_ret[cache->n_ret++] = entry; in free_swap_slot()
295 spin_unlock_irq(&cache->free_lock); in free_swap_slot()
305 struct swap_slots_cache *cache; in folio_alloc_swap() local
324 cache = raw_cpu_ptr(&swp_slots); in folio_alloc_swap()
326 if (likely(check_cache_active() && cache->slots)) { in folio_alloc_swap()
327 mutex_lock(&cache->alloc_lock); in folio_alloc_swap()
328 if (cache->slots) { in folio_alloc_swap()
330 if (cache->nr) { in folio_alloc_swap()
331 entry = cache->slots[cache->cur]; in folio_alloc_swap()
332 cache->slots[cache->cur++].val = 0; in folio_alloc_swap()
333 cache->nr--; in folio_alloc_swap()
334 } else if (refill_swap_slots_cache(cache)) { in folio_alloc_swap()
338 mutex_unlock(&cache->alloc_lock); in folio_alloc_swap()