/openbmc/linux/mm/ |
H A D | page_alloc.c | 1560 unsigned int alloc_flags) in prep_new_page() argument 1573 if (alloc_flags & ALLOC_NO_WATERMARKS) in prep_new_page() 1787 unsigned int alloc_flags, int start_type, bool whole_block) in steal_suitable_fallback() argument 1813 if (boost_watermark(zone) && (alloc_flags & ALLOC_KSWAPD)) in steal_suitable_fallback() 2024 unsigned int alloc_flags) in __rmqueue_fallback() argument 2038 if (order < pageblock_order && alloc_flags & ALLOC_NOFRAGMENT) in __rmqueue_fallback() 2089 steal_suitable_fallback(zone, page, alloc_flags, start_migratetype, in __rmqueue_fallback() 2105 unsigned int alloc_flags) in __rmqueue() argument 2115 if (alloc_flags & ALLOC_CMA && in __rmqueue() 2126 if (alloc_flags & ALLOC_CMA) in __rmqueue() [all …]
|
H A D | compaction.c | 2351 int alloc_flags) in compaction_zonelist_suitable() argument 2409 cc->alloc_flags & ALLOC_WMARK_MASK); in compact_zone() 2411 cc->highest_zoneidx, cc->alloc_flags)) in compact_zone() 2620 unsigned int alloc_flags, int highest_zoneidx, in compact_zone_order() argument 2631 .alloc_flags = alloc_flags, in compact_zone_order() 2684 unsigned int alloc_flags, const struct alloc_context *ac, in try_to_compact_pages() argument 2708 alloc_flags, ac->highest_zoneidx, capture); in try_to_compact_pages()
|
H A D | internal.h | 566 const unsigned int alloc_flags; /* alloc flags of a direct compactor */ member
|
/openbmc/linux/drivers/base/regmap/ |
H A D | regcache-maple.c | 77 map->alloc_flags); in regcache_maple_write() 95 ret = mas_store_gfp(&mas, entry, map->alloc_flags); in regcache_maple_write() 138 map->alloc_flags); in regcache_maple_drop() 152 map->alloc_flags); in regcache_maple_drop() 166 ret = mas_store_gfp(&mas, lower, map->alloc_flags); in regcache_maple_drop() 174 ret = mas_store_gfp(&mas, upper, map->alloc_flags); in regcache_maple_drop() 208 buf = kmalloc(val_bytes * (max - min), map->alloc_flags); in regcache_maple_sync_block() 324 entry = kcalloc(last - first + 1, sizeof(unsigned long), map->alloc_flags); in regcache_maple_insert_block() 335 ret = mas_store_gfp(&mas, entry, map->alloc_flags); in regcache_maple_insert_block()
|
H A D | regcache-rbtree.c | 280 map->alloc_flags); in regcache_rbtree_insert_to_block() 289 map->alloc_flags); in regcache_rbtree_insert_to_block() 323 rbnode = kzalloc(sizeof(*rbnode), map->alloc_flags); in regcache_rbtree_node_alloc() 349 map->alloc_flags); in regcache_rbtree_node_alloc() 355 map->alloc_flags); in regcache_rbtree_node_alloc()
|
H A D | internal.h | 66 gfp_t alloc_flags; member
|
H A D | regmap.c | 767 map->alloc_flags = GFP_ATOMIC; in __regmap_init() 769 map->alloc_flags = GFP_KERNEL; in __regmap_init() 2361 wval = kmemdup(val, val_count * val_bytes, map->alloc_flags); in regmap_bulk_write()
|
/openbmc/linux/lib/ |
H A D | stackdepot.c | 360 gfp_t alloc_flags, bool can_alloc) in __stack_depot_save() argument 407 alloc_flags &= ~GFP_ZONEMASK; in __stack_depot_save() 408 alloc_flags &= (GFP_ATOMIC | GFP_KERNEL | __GFP_NOLOCKDEP); in __stack_depot_save() 409 alloc_flags |= __GFP_NOWARN; in __stack_depot_save() 410 page = alloc_pages(alloc_flags, DEPOT_POOL_ORDER); in __stack_depot_save() 454 gfp_t alloc_flags) in stack_depot_save() argument 456 return __stack_depot_save(entries, nr_entries, alloc_flags, true); in stack_depot_save()
|
/openbmc/linux/fs/xfs/libxfs/ |
H A D | xfs_alloc.c | 1540 uint32_t alloc_flags) in xfs_alloc_ag_vextent_near() argument 1560 alloc_flags |= XFS_ALLOC_FLAG_TRYFLUSH; in xfs_alloc_ag_vextent_near() 1625 acur.busy_gen, alloc_flags); in xfs_alloc_ag_vextent_near() 1629 alloc_flags &= ~XFS_ALLOC_FLAG_TRYFLUSH; in xfs_alloc_ag_vextent_near() 1655 uint32_t alloc_flags) in xfs_alloc_ag_vextent_size() argument 1670 alloc_flags |= XFS_ALLOC_FLAG_TRYFLUSH; in xfs_alloc_ag_vextent_size() 1740 busy_gen, alloc_flags); in xfs_alloc_ag_vextent_size() 1744 alloc_flags &= ~XFS_ALLOC_FLAG_TRYFLUSH; in xfs_alloc_ag_vextent_size() 1835 busy_gen, alloc_flags); in xfs_alloc_ag_vextent_size() 1839 alloc_flags &= ~XFS_ALLOC_FLAG_TRYFLUSH; in xfs_alloc_ag_vextent_size() [all …]
|
H A D | xfs_alloc.h | 199 int xfs_alloc_fix_freelist(struct xfs_alloc_arg *args, uint32_t alloc_flags);
|
/openbmc/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_amdkfd_gpuvm.c | 287 u32 alloc_flags = bo->kfd_bo->alloc_flags; in amdgpu_amdkfd_release_notify() local 290 amdgpu_amdkfd_unreserve_mem_limit(adev, size, alloc_flags, in amdgpu_amdkfd_release_notify() 317 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_USERPTR) in create_dmamap_sg_bo() 473 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE) in get_pte_flags() 475 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_EXECUTABLE) in get_pte_flags() 518 mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_dmamap_userptr() 620 mmio = (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_MMIO_REMAP); in kfd_mem_dmamap_sg_bo() 626 dir = mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_dmamap_sg_bo() 685 mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_dmaunmap_userptr() 747 dir = mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_dmaunmap_sg_bo() [all …]
|
H A D | amdgpu_amdkfd.h | 79 uint32_t alloc_flags; member
|
/openbmc/linux/include/linux/ |
H A D | compaction.h | 88 unsigned int order, unsigned int alloc_flags, 99 int alloc_flags);
|
H A D | mmzone.h | 1425 int highest_zoneidx, unsigned int alloc_flags, 1429 unsigned int alloc_flags);
|
/openbmc/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_chardev.c | 1938 bo_bucket->alloc_flags = (uint32_t)kgd_mem->alloc_flags; in criu_checkpoint_bos() 1941 if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_USERPTR) { in criu_checkpoint_bos() 1949 if (bo_bucket->alloc_flags in criu_checkpoint_bos() 1952 bo_bucket->alloc_flags & in criu_checkpoint_bos() 1961 if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_DOORBELL) in criu_checkpoint_bos() 1964 else if (bo_bucket->alloc_flags & in criu_checkpoint_bos() 1982 bo_bucket->alloc_flags, in criu_checkpoint_bos() 2006 if (bo_buckets[bo_index].alloc_flags in criu_checkpoint_bos() 2301 if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_DOORBELL) { in criu_restore_memory_of_gpu() 2309 } else if (bo_bucket->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_MMIO_REMAP) { in criu_restore_memory_of_gpu() [all …]
|
/openbmc/linux/drivers/md/ |
H A D | dm-zoned-reclaim.c | 286 int alloc_flags = DMZ_ALLOC_SEQ; in dmz_reclaim_rnd_data() local 292 alloc_flags | DMZ_ALLOC_RECLAIM); in dmz_reclaim_rnd_data() 293 if (!szone && alloc_flags == DMZ_ALLOC_SEQ && dmz_nr_cache_zones(zmd)) { in dmz_reclaim_rnd_data() 294 alloc_flags = DMZ_ALLOC_RND; in dmz_reclaim_rnd_data()
|
H A D | dm-zoned-metadata.c | 2053 int alloc_flags = zmd->nr_cache ? DMZ_ALLOC_CACHE : DMZ_ALLOC_RND; in dmz_get_chunk_mapping() local 2068 dzone = dmz_alloc_zone(zmd, 0, alloc_flags); in dmz_get_chunk_mapping() 2165 int alloc_flags = zmd->nr_cache ? DMZ_ALLOC_CACHE : DMZ_ALLOC_RND; in dmz_get_chunk_buffer() local 2174 bzone = dmz_alloc_zone(zmd, 0, alloc_flags); in dmz_get_chunk_buffer()
|
/openbmc/linux/fs/xfs/ |
H A D | xfs_extent_busy.c | 609 uint32_t alloc_flags) in xfs_extent_busy_flush() argument 620 if (alloc_flags & XFS_ALLOC_FLAG_TRYFLUSH) in xfs_extent_busy_flush() 626 if (alloc_flags & XFS_ALLOC_FLAG_FREEING) in xfs_extent_busy_flush()
|
H A D | xfs_extent_busy.h | 74 unsigned busy_gen, uint32_t alloc_flags);
|
/openbmc/linux/fs/btrfs/ |
H A D | block-group.c | 2907 u64 alloc_flags; in btrfs_inc_block_group_ro() local 2955 alloc_flags = btrfs_get_alloc_profile(fs_info, cache->flags); in btrfs_inc_block_group_ro() 2956 if (alloc_flags != cache->flags) { in btrfs_inc_block_group_ro() 2957 ret = btrfs_chunk_alloc(trans, alloc_flags, in btrfs_inc_block_group_ro() 2985 alloc_flags = btrfs_get_alloc_profile(fs_info, cache->space_info->flags); in btrfs_inc_block_group_ro() 2986 ret = btrfs_chunk_alloc(trans, alloc_flags, CHUNK_ALLOC_FORCE); in btrfs_inc_block_group_ro() 3002 alloc_flags = btrfs_get_alloc_profile(fs_info, cache->flags); in btrfs_inc_block_group_ro() 3004 check_system_chunk(trans, alloc_flags); in btrfs_inc_block_group_ro() 3850 u64 alloc_flags = btrfs_get_alloc_profile(trans->fs_info, type); in btrfs_force_chunk_alloc() local 3852 return btrfs_chunk_alloc(trans, alloc_flags, CHUNK_ALLOC_FORCE); in btrfs_force_chunk_alloc()
|
/openbmc/linux/drivers/iommu/ |
H A D | dma-iommu.c | 807 gfp_t alloc_flags = gfp; in __iommu_dma_alloc_pages() local 811 alloc_flags |= __GFP_NORETRY; in __iommu_dma_alloc_pages() 812 page = alloc_pages_node(nid, alloc_flags, order); in __iommu_dma_alloc_pages()
|
/openbmc/linux/drivers/gpu/drm/i915/gem/ |
H A D | i915_gem_object.h | 50 unsigned alloc_flags);
|
/openbmc/linux/include/uapi/linux/ |
H A D | kfd_ioctl.h | 633 __u32 alloc_flags; member
|
/openbmc/linux/drivers/net/ethernet/mellanox/mlx5/core/ |
H A D | cmd.c | 118 gfp_t alloc_flags = cbk ? GFP_ATOMIC : GFP_KERNEL; in cmd_alloc_ent() local 121 ent = kzalloc(sizeof(*ent), alloc_flags); in cmd_alloc_ent()
|
/openbmc/linux/arch/s390/kvm/ |
H A D | kvm-s390.c | 3298 gfp_t alloc_flags = GFP_KERNEL_ACCOUNT; in kvm_arch_init_vm() local 3321 alloc_flags |= GFP_DMA; in kvm_arch_init_vm() 3324 kvm->arch.sca = (struct bsca_block *) get_zeroed_page(alloc_flags); in kvm_arch_init_vm()
|