Lines Matching refs:lru

22 static inline bool list_lru_memcg_aware(struct list_lru *lru)  in list_lru_memcg_aware()  argument
24 return lru->memcg_aware; in list_lru_memcg_aware()
27 static void list_lru_register(struct list_lru *lru) in list_lru_register() argument
29 if (!list_lru_memcg_aware(lru)) in list_lru_register()
33 list_add(&lru->list, &memcg_list_lrus); in list_lru_register()
37 static void list_lru_unregister(struct list_lru *lru) in list_lru_unregister() argument
39 if (!list_lru_memcg_aware(lru)) in list_lru_unregister()
43 list_del(&lru->list); in list_lru_unregister()
47 static int lru_shrinker_id(struct list_lru *lru) in lru_shrinker_id() argument
49 return lru->shrinker_id; in lru_shrinker_id()
53 list_lru_from_memcg_idx(struct list_lru *lru, int nid, int idx) in list_lru_from_memcg_idx() argument
55 if (list_lru_memcg_aware(lru) && idx >= 0) { in list_lru_from_memcg_idx()
56 struct list_lru_memcg *mlru = xa_load(&lru->xa, idx); in list_lru_from_memcg_idx()
60 return &lru->node[nid].lru; in list_lru_from_memcg_idx()
64 list_lru_from_kmem(struct list_lru *lru, int nid, void *ptr, in list_lru_from_kmem() argument
67 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_from_kmem()
68 struct list_lru_one *l = &nlru->lru; in list_lru_from_kmem()
71 if (!list_lru_memcg_aware(lru)) in list_lru_from_kmem()
78 l = list_lru_from_memcg_idx(lru, nid, memcg_kmem_id(memcg)); in list_lru_from_kmem()
85 static void list_lru_register(struct list_lru *lru) in list_lru_register() argument
89 static void list_lru_unregister(struct list_lru *lru) in list_lru_unregister() argument
93 static int lru_shrinker_id(struct list_lru *lru) in lru_shrinker_id() argument
98 static inline bool list_lru_memcg_aware(struct list_lru *lru) in list_lru_memcg_aware() argument
104 list_lru_from_memcg_idx(struct list_lru *lru, int nid, int idx) in list_lru_from_memcg_idx() argument
106 return &lru->node[nid].lru; in list_lru_from_memcg_idx()
110 list_lru_from_kmem(struct list_lru *lru, int nid, void *ptr, in list_lru_from_kmem() argument
115 return &lru->node[nid].lru; in list_lru_from_kmem()
119 bool list_lru_add(struct list_lru *lru, struct list_head *item) in list_lru_add() argument
122 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_add()
128 l = list_lru_from_kmem(lru, nid, item, &memcg); in list_lru_add()
133 lru_shrinker_id(lru)); in list_lru_add()
143 bool list_lru_del(struct list_lru *lru, struct list_head *item) in list_lru_del() argument
146 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_del()
151 l = list_lru_from_kmem(lru, nid, item, NULL); in list_lru_del()
178 unsigned long list_lru_count_one(struct list_lru *lru, in list_lru_count_one() argument
185 l = list_lru_from_memcg_idx(lru, nid, memcg_kmem_id(memcg)); in list_lru_count_one()
196 unsigned long list_lru_count_node(struct list_lru *lru, int nid) in list_lru_count_node() argument
200 nlru = &lru->node[nid]; in list_lru_count_node()
206 __list_lru_walk_one(struct list_lru *lru, int nid, int memcg_idx, in __list_lru_walk_one() argument
210 struct list_lru_node *nlru = &lru->node[nid]; in __list_lru_walk_one()
216 l = list_lru_from_memcg_idx(lru, nid, memcg_idx); in __list_lru_walk_one()
268 list_lru_walk_one(struct list_lru *lru, int nid, struct mem_cgroup *memcg, in list_lru_walk_one() argument
272 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_walk_one()
276 ret = __list_lru_walk_one(lru, nid, memcg_kmem_id(memcg), isolate, in list_lru_walk_one()
284 list_lru_walk_one_irq(struct list_lru *lru, int nid, struct mem_cgroup *memcg, in list_lru_walk_one_irq() argument
288 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_walk_one_irq()
292 ret = __list_lru_walk_one(lru, nid, memcg_kmem_id(memcg), isolate, in list_lru_walk_one_irq()
298 unsigned long list_lru_walk_node(struct list_lru *lru, int nid, in list_lru_walk_node() argument
304 isolated += list_lru_walk_one(lru, nid, NULL, isolate, cb_arg, in list_lru_walk_node()
308 if (*nr_to_walk > 0 && list_lru_memcg_aware(lru)) { in list_lru_walk_node()
312 xa_for_each(&lru->xa, index, mlru) { in list_lru_walk_node()
313 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_walk_node()
316 isolated += __list_lru_walk_one(lru, nid, index, in list_lru_walk_node()
353 static void memcg_list_lru_free(struct list_lru *lru, int src_idx) in memcg_list_lru_free() argument
355 struct list_lru_memcg *mlru = xa_erase_irq(&lru->xa, src_idx); in memcg_list_lru_free()
367 static inline void memcg_init_list_lru(struct list_lru *lru, bool memcg_aware) in memcg_init_list_lru() argument
370 xa_init_flags(&lru->xa, XA_FLAGS_LOCK_IRQ); in memcg_init_list_lru()
371 lru->memcg_aware = memcg_aware; in memcg_init_list_lru()
374 static void memcg_destroy_list_lru(struct list_lru *lru) in memcg_destroy_list_lru() argument
376 XA_STATE(xas, &lru->xa, 0); in memcg_destroy_list_lru()
379 if (!list_lru_memcg_aware(lru)) in memcg_destroy_list_lru()
390 static void memcg_reparent_list_lru_node(struct list_lru *lru, int nid, in memcg_reparent_list_lru_node() argument
393 struct list_lru_node *nlru = &lru->node[nid]; in memcg_reparent_list_lru_node()
403 src = list_lru_from_memcg_idx(lru, nid, src_idx); in memcg_reparent_list_lru_node()
406 dst = list_lru_from_memcg_idx(lru, nid, dst_idx); in memcg_reparent_list_lru_node()
412 set_shrinker_bit(dst_memcg, nid, lru_shrinker_id(lru)); in memcg_reparent_list_lru_node()
419 static void memcg_reparent_list_lru(struct list_lru *lru, in memcg_reparent_list_lru() argument
425 memcg_reparent_list_lru_node(lru, i, src_idx, dst_memcg); in memcg_reparent_list_lru()
427 memcg_list_lru_free(lru, src_idx); in memcg_reparent_list_lru()
433 struct list_lru *lru; in memcg_reparent_list_lrus() local
459 list_for_each_entry(lru, &memcg_list_lrus, list) in memcg_reparent_list_lrus()
460 memcg_reparent_list_lru(lru, src_idx, parent); in memcg_reparent_list_lrus()
465 struct list_lru *lru) in memcg_list_lru_allocated() argument
469 return idx < 0 || xa_load(&lru->xa, idx); in memcg_list_lru_allocated()
472 int memcg_list_lru_alloc(struct mem_cgroup *memcg, struct list_lru *lru, in memcg_list_lru_alloc() argument
481 XA_STATE(xas, &lru->xa, 0); in memcg_list_lru_alloc()
483 if (!list_lru_memcg_aware(lru) || memcg_list_lru_allocated(memcg, lru)) in memcg_list_lru_alloc()
497 if (memcg_list_lru_allocated(memcg, lru)) in memcg_list_lru_alloc()
550 static inline void memcg_init_list_lru(struct list_lru *lru, bool memcg_aware) in memcg_init_list_lru() argument
554 static void memcg_destroy_list_lru(struct list_lru *lru) in memcg_destroy_list_lru() argument
559 int __list_lru_init(struct list_lru *lru, bool memcg_aware, in __list_lru_init() argument
566 lru->shrinker_id = shrinker->id; in __list_lru_init()
568 lru->shrinker_id = -1; in __list_lru_init()
571 lru->node = kcalloc(nr_node_ids, sizeof(*lru->node), GFP_KERNEL); in __list_lru_init()
572 if (!lru->node) in __list_lru_init()
576 spin_lock_init(&lru->node[i].lock); in __list_lru_init()
578 lockdep_set_class(&lru->node[i].lock, key); in __list_lru_init()
579 init_one_lru(&lru->node[i].lru); in __list_lru_init()
582 memcg_init_list_lru(lru, memcg_aware); in __list_lru_init()
583 list_lru_register(lru); in __list_lru_init()
589 void list_lru_destroy(struct list_lru *lru) in list_lru_destroy() argument
592 if (!lru->node) in list_lru_destroy()
595 list_lru_unregister(lru); in list_lru_destroy()
597 memcg_destroy_list_lru(lru); in list_lru_destroy()
598 kfree(lru->node); in list_lru_destroy()
599 lru->node = NULL; in list_lru_destroy()
602 lru->shrinker_id = -1; in list_lru_destroy()