Lines Matching refs:smap
21 select_bucket(struct bpf_local_storage_map *smap, in select_bucket() argument
24 return &smap->buckets[hash_ptr(selem, smap->bucket_log)]; in select_bucket()
27 static int mem_charge(struct bpf_local_storage_map *smap, void *owner, u32 size) in mem_charge() argument
29 struct bpf_map *map = &smap->map; in mem_charge()
34 return map->ops->map_local_storage_charge(smap, owner, size); in mem_charge()
37 static void mem_uncharge(struct bpf_local_storage_map *smap, void *owner, in mem_uncharge() argument
40 struct bpf_map *map = &smap->map; in mem_uncharge()
43 map->ops->map_local_storage_uncharge(smap, owner, size); in mem_uncharge()
47 owner_storage(struct bpf_local_storage_map *smap, void *owner) in owner_storage() argument
49 struct bpf_map *map = &smap->map; in owner_storage()
75 bpf_selem_alloc(struct bpf_local_storage_map *smap, void *owner, in bpf_selem_alloc() argument
80 if (charge_mem && mem_charge(smap, owner, smap->elem_size)) in bpf_selem_alloc()
83 if (smap->bpf_ma) { in bpf_selem_alloc()
85 selem = bpf_mem_cache_alloc_flags(&smap->selem_ma, gfp_flags); in bpf_selem_alloc()
95 memset(SDATA(selem)->data, 0, smap->map.value_size); in bpf_selem_alloc()
97 selem = bpf_map_kzalloc(&smap->map, smap->elem_size, in bpf_selem_alloc()
103 copy_map_value(&smap->map, SDATA(selem)->data, value); in bpf_selem_alloc()
109 mem_uncharge(smap, owner, smap->elem_size); in bpf_selem_alloc()
157 struct bpf_local_storage_map *smap, in bpf_local_storage_free() argument
174 if (smap) { in bpf_local_storage_free()
176 bpf_mem_cache_free(&smap->storage_ma, local_storage); in bpf_local_storage_free()
226 struct bpf_local_storage_map *smap, in bpf_selem_free() argument
229 bpf_obj_free_fields(smap->map.record, SDATA(selem)->data); in bpf_selem_free()
231 if (!smap->bpf_ma) { in bpf_selem_free()
244 bpf_mem_cache_free(&smap->selem_ma, selem); in bpf_selem_free()
257 struct bpf_local_storage_map *smap; in bpf_selem_unlink_storage_nolock() local
261 smap = rcu_dereference_check(SDATA(selem)->smap, bpf_rcu_lock_held()); in bpf_selem_unlink_storage_nolock()
269 mem_uncharge(smap, owner, smap->elem_size); in bpf_selem_unlink_storage_nolock()
274 mem_uncharge(smap, owner, sizeof(struct bpf_local_storage)); in bpf_selem_unlink_storage_nolock()
278 RCU_INIT_POINTER(*owner_storage(smap, owner), NULL); in bpf_selem_unlink_storage_nolock()
295 if (rcu_access_pointer(local_storage->cache[smap->cache_idx]) == in bpf_selem_unlink_storage_nolock()
297 RCU_INIT_POINTER(local_storage->cache[smap->cache_idx], NULL); in bpf_selem_unlink_storage_nolock()
299 bpf_selem_free(selem, smap, reuse_now); in bpf_selem_unlink_storage_nolock()
301 if (rcu_access_pointer(local_storage->smap) == smap) in bpf_selem_unlink_storage_nolock()
302 RCU_INIT_POINTER(local_storage->smap, NULL); in bpf_selem_unlink_storage_nolock()
337 selem_smap = rcu_dereference_check(SDATA(selem)->smap, bpf_rcu_lock_held()); in check_storage_bpf_ma()
356 storage_smap = rcu_dereference_check(local_storage->smap, in bpf_selem_unlink_storage()
379 struct bpf_local_storage_map *smap; in bpf_selem_unlink_map() local
387 smap = rcu_dereference_check(SDATA(selem)->smap, bpf_rcu_lock_held()); in bpf_selem_unlink_map()
388 b = select_bucket(smap, selem); in bpf_selem_unlink_map()
395 void bpf_selem_link_map(struct bpf_local_storage_map *smap, in bpf_selem_link_map() argument
398 struct bpf_local_storage_map_bucket *b = select_bucket(smap, selem); in bpf_selem_link_map()
402 RCU_INIT_POINTER(SDATA(selem)->smap, smap); in bpf_selem_link_map()
420 struct bpf_local_storage_map *smap, in bpf_local_storage_lookup() argument
427 sdata = rcu_dereference_check(local_storage->cache[smap->cache_idx], in bpf_local_storage_lookup()
429 if (sdata && rcu_access_pointer(sdata->smap) == smap) in bpf_local_storage_lookup()
435 if (rcu_access_pointer(SDATA(selem)->smap) == smap) in bpf_local_storage_lookup()
452 rcu_assign_pointer(local_storage->cache[smap->cache_idx], in bpf_local_storage_lookup()
475 struct bpf_local_storage_map *smap, in bpf_local_storage_alloc() argument
483 err = mem_charge(smap, owner, sizeof(*storage)); in bpf_local_storage_alloc()
487 if (smap->bpf_ma) { in bpf_local_storage_alloc()
489 storage = bpf_mem_cache_alloc_flags(&smap->storage_ma, gfp_flags); in bpf_local_storage_alloc()
492 storage = bpf_map_kzalloc(&smap->map, sizeof(*storage), in bpf_local_storage_alloc()
501 RCU_INIT_POINTER(storage->smap, smap); in bpf_local_storage_alloc()
507 bpf_selem_link_map(smap, first_selem); in bpf_local_storage_alloc()
510 (struct bpf_local_storage **)owner_storage(smap, owner); in bpf_local_storage_alloc()
541 bpf_local_storage_free(storage, smap, smap->bpf_ma, true); in bpf_local_storage_alloc()
542 mem_uncharge(smap, owner, sizeof(*storage)); in bpf_local_storage_alloc()
552 bpf_local_storage_update(void *owner, struct bpf_local_storage_map *smap, in bpf_local_storage_update() argument
565 !btf_record_has_field(smap->map.record, BPF_SPIN_LOCK))) in bpf_local_storage_update()
571 local_storage = rcu_dereference_check(*owner_storage(smap, owner), in bpf_local_storage_update()
579 selem = bpf_selem_alloc(smap, owner, value, true, gfp_flags); in bpf_local_storage_update()
583 err = bpf_local_storage_alloc(owner, smap, selem, gfp_flags); in bpf_local_storage_update()
585 bpf_selem_free(selem, smap, true); in bpf_local_storage_update()
586 mem_uncharge(smap, owner, smap->elem_size); in bpf_local_storage_update()
599 bpf_local_storage_lookup(local_storage, smap, false); in bpf_local_storage_update()
604 copy_map_value_locked(&smap->map, old_sdata->data, in bpf_local_storage_update()
613 alloc_selem = selem = bpf_selem_alloc(smap, owner, value, true, gfp_flags); in bpf_local_storage_update()
630 old_sdata = bpf_local_storage_lookup(local_storage, smap, false); in bpf_local_storage_update()
636 copy_map_value_locked(&smap->map, old_sdata->data, value, in bpf_local_storage_update()
644 bpf_selem_link_map(smap, selem); in bpf_local_storage_update()
659 mem_uncharge(smap, owner, smap->elem_size); in bpf_local_storage_update()
660 bpf_selem_free(alloc_selem, smap, true); in bpf_local_storage_update()
738 storage_smap = rcu_dereference_check(local_storage->smap, bpf_rcu_lock_held()); in bpf_local_storage_destroy()
773 struct bpf_local_storage_map *smap = (struct bpf_local_storage_map *)map; in bpf_local_storage_map_mem_usage() local
774 u64 usage = sizeof(*smap); in bpf_local_storage_map_mem_usage()
777 usage += sizeof(*smap->buckets) * (1ULL << smap->bucket_log); in bpf_local_storage_map_mem_usage()
796 struct bpf_local_storage_map *smap; in bpf_local_storage_map_alloc() local
801 smap = bpf_map_area_alloc(sizeof(*smap), NUMA_NO_NODE); in bpf_local_storage_map_alloc()
802 if (!smap) in bpf_local_storage_map_alloc()
804 bpf_map_init_from_attr(&smap->map, attr); in bpf_local_storage_map_alloc()
809 smap->bucket_log = ilog2(nbuckets); in bpf_local_storage_map_alloc()
811 smap->buckets = bpf_map_kvcalloc(&smap->map, nbuckets, in bpf_local_storage_map_alloc()
812 sizeof(*smap->buckets), GFP_USER | __GFP_NOWARN); in bpf_local_storage_map_alloc()
813 if (!smap->buckets) { in bpf_local_storage_map_alloc()
819 INIT_HLIST_HEAD(&smap->buckets[i].list); in bpf_local_storage_map_alloc()
820 raw_spin_lock_init(&smap->buckets[i].lock); in bpf_local_storage_map_alloc()
823 smap->elem_size = offsetof(struct bpf_local_storage_elem, in bpf_local_storage_map_alloc()
830 smap->bpf_ma = IS_ENABLED(CONFIG_PREEMPT_RT) ? true : bpf_ma; in bpf_local_storage_map_alloc()
831 if (smap->bpf_ma) { in bpf_local_storage_map_alloc()
832 err = bpf_mem_alloc_init(&smap->selem_ma, smap->elem_size, false); in bpf_local_storage_map_alloc()
836 err = bpf_mem_alloc_init(&smap->storage_ma, sizeof(struct bpf_local_storage), false); in bpf_local_storage_map_alloc()
838 bpf_mem_alloc_destroy(&smap->selem_ma); in bpf_local_storage_map_alloc()
843 smap->cache_idx = bpf_local_storage_cache_idx_get(cache); in bpf_local_storage_map_alloc()
844 return &smap->map; in bpf_local_storage_map_alloc()
847 kvfree(smap->buckets); in bpf_local_storage_map_alloc()
848 bpf_map_area_free(smap); in bpf_local_storage_map_alloc()
858 struct bpf_local_storage_map *smap; in bpf_local_storage_map_free() local
861 smap = (struct bpf_local_storage_map *)map; in bpf_local_storage_map_free()
862 bpf_local_storage_cache_idx_free(cache, smap->cache_idx); in bpf_local_storage_map_free()
879 for (i = 0; i < (1U << smap->bucket_log); i++) { in bpf_local_storage_map_free()
880 b = &smap->buckets[i]; in bpf_local_storage_map_free()
915 if (smap->bpf_ma) { in bpf_local_storage_map_free()
916 bpf_mem_alloc_destroy(&smap->selem_ma); in bpf_local_storage_map_free()
917 bpf_mem_alloc_destroy(&smap->storage_ma); in bpf_local_storage_map_free()
919 kvfree(smap->buckets); in bpf_local_storage_map_free()
920 bpf_map_area_free(smap); in bpf_local_storage_map_free()