Searched refs:radix_tree_preload (Results 1 – 14 of 14) sorted by relevance
28 struct radix_tree_preload { struct34 DECLARE_PER_CPU(struct radix_tree_preload, radix_tree_preloads); argument239 int radix_tree_preload(gfp_t gfp_mask);
62 DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = {246 struct radix_tree_preload *rtp; in radix_tree_node_alloc()324 struct radix_tree_preload *rtp; in __radix_tree_preload()365 int radix_tree_preload(gfp_t gfp_mask) in radix_tree_preload() function371 EXPORT_SYMBOL(radix_tree_preload);1580 struct radix_tree_preload *rtp; in radix_tree_cpu_dead()
59 extern struct radix_tree_preload radix_tree_preloads;
430 if (radix_tree_preload(GFP_NOFS)) in xfs_mru_cache_insert()
665 if (radix_tree_preload(GFP_NOFS)) { in xfs_iget_cache_miss()
607 rc = radix_tree_preload(GFP_KERNEL_ACCOUNT); in __gmap_link()1249 rc = radix_tree_preload(GFP_KERNEL_ACCOUNT); in gmap_protect_rmap()2161 rc = radix_tree_preload(GFP_KERNEL_ACCOUNT); in gmap_shadow_page()
894 if (radix_tree_preload(GFP_KERNEL)) { in blkg_conf_prep()1472 preloaded = !radix_tree_preload(GFP_KERNEL); in blkcg_init_disk()
392 error = radix_tree_preload(GFP_NOFS); in xfs_initialize_perag()
137 ret = radix_tree_preload(GFP_NOFS); in btrfs_get_or_create_delayed_node()
3583 ret = radix_tree_preload(GFP_NOFS); in alloc_test_extent_buffer()3779 ret = radix_tree_preload(GFP_NOFS); in alloc_extent_buffer()
1192 ret = radix_tree_preload(GFP_NOFS); in btrfs_insert_fs_root()
518 radix_tree_preload(GFP_NOFS | __GFP_NOFAIL); in __add_ino_entry()
2324 radix_tree_preload(GFP_NOFS | __GFP_NOFAIL); in add_free_nid()
1035 if (radix_tree_preload(GFP_NOIO)) in null_insert_page()