Lines Matching +full:128 +full:ma

20  * 16 32 64 96 128 196 256 512 1024 2048 4096
23 * 16 32 64 96 128 196 256 512 1024 2048 4096
55 6, /* 128 */
455 * 64*16 + 64*32 + 64*64 + 64*96 + 64*128 + 64*196 + 64*256 + 32*512 + 16*1024 + 8*2048 + 4*4096
496 int bpf_mem_alloc_init(struct bpf_mem_alloc *ma, int size, bool percpu) in bpf_mem_alloc_init() argument
498 static u16 sizes[NUM_CACHES] = {96, 192, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096}; in bpf_mem_alloc_init()
504 ma->percpu = percpu; in bpf_mem_alloc_init()
531 ma->cache = pc; in bpf_mem_alloc_init()
558 ma->caches = pcc; in bpf_mem_alloc_init()
593 static void check_leaked_objs(struct bpf_mem_alloc *ma) in check_leaked_objs() argument
599 if (ma->cache) { in check_leaked_objs()
601 c = per_cpu_ptr(ma->cache, cpu); in check_leaked_objs()
605 if (ma->caches) { in check_leaked_objs()
607 cc = per_cpu_ptr(ma->caches, cpu); in check_leaked_objs()
616 static void free_mem_alloc_no_barrier(struct bpf_mem_alloc *ma) in free_mem_alloc_no_barrier() argument
618 check_leaked_objs(ma); in free_mem_alloc_no_barrier()
619 free_percpu(ma->cache); in free_mem_alloc_no_barrier()
620 free_percpu(ma->caches); in free_mem_alloc_no_barrier()
621 ma->cache = NULL; in free_mem_alloc_no_barrier()
622 ma->caches = NULL; in free_mem_alloc_no_barrier()
625 static void free_mem_alloc(struct bpf_mem_alloc *ma) in free_mem_alloc() argument
641 free_mem_alloc_no_barrier(ma); in free_mem_alloc()
646 struct bpf_mem_alloc *ma = container_of(work, struct bpf_mem_alloc, work); in free_mem_alloc_deferred() local
648 free_mem_alloc(ma); in free_mem_alloc_deferred()
649 kfree(ma); in free_mem_alloc_deferred()
652 static void destroy_mem_alloc(struct bpf_mem_alloc *ma, int rcu_in_progress) in destroy_mem_alloc() argument
660 free_mem_alloc_no_barrier(ma); in destroy_mem_alloc()
664 copy = kmemdup(ma, sizeof(*ma), GFP_KERNEL); in destroy_mem_alloc()
667 free_mem_alloc(ma); in destroy_mem_alloc()
672 memset(ma, 0, sizeof(*ma)); in destroy_mem_alloc()
677 void bpf_mem_alloc_destroy(struct bpf_mem_alloc *ma) in bpf_mem_alloc_destroy() argument
683 if (ma->cache) { in bpf_mem_alloc_destroy()
686 c = per_cpu_ptr(ma->cache, cpu); in bpf_mem_alloc_destroy()
696 destroy_mem_alloc(ma, rcu_in_progress); in bpf_mem_alloc_destroy()
698 if (ma->caches) { in bpf_mem_alloc_destroy()
701 cc = per_cpu_ptr(ma->caches, cpu); in bpf_mem_alloc_destroy()
713 destroy_mem_alloc(ma, rcu_in_progress); in bpf_mem_alloc_destroy()
817 void notrace *bpf_mem_alloc(struct bpf_mem_alloc *ma, size_t size) in bpf_mem_alloc() argument
829 ret = unit_alloc(this_cpu_ptr(ma->caches)->cache + idx); in bpf_mem_alloc()
833 void notrace bpf_mem_free(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_free() argument
846 unit_free(this_cpu_ptr(ma->caches)->cache + idx, ptr); in bpf_mem_free()
849 void notrace bpf_mem_free_rcu(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_free_rcu() argument
862 unit_free_rcu(this_cpu_ptr(ma->caches)->cache + idx, ptr); in bpf_mem_free_rcu()
865 void notrace *bpf_mem_cache_alloc(struct bpf_mem_alloc *ma) in bpf_mem_cache_alloc() argument
869 ret = unit_alloc(this_cpu_ptr(ma->cache)); in bpf_mem_cache_alloc()
873 void notrace bpf_mem_cache_free(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_cache_free() argument
878 unit_free(this_cpu_ptr(ma->cache), ptr); in bpf_mem_cache_free()
881 void notrace bpf_mem_cache_free_rcu(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_cache_free_rcu() argument
886 unit_free_rcu(this_cpu_ptr(ma->cache), ptr); in bpf_mem_cache_free_rcu()
911 void notrace *bpf_mem_cache_alloc_flags(struct bpf_mem_alloc *ma, gfp_t flags) in bpf_mem_cache_alloc_flags() argument
916 c = this_cpu_ptr(ma->cache); in bpf_mem_cache_alloc_flags()