Lines Matching refs:pa

763 		struct ext4_prealloc_space *pa;  in __mb_check_buddy()  local
764 pa = list_entry(cur, struct ext4_prealloc_space, pa_group_list); in __mb_check_buddy()
765 ext4_get_group_no_and_offset(sb, pa->pa_pstart, &groupnr, &k); in __mb_check_buddy()
767 for (i = 0; i < pa->pa_len; i++) in __mb_check_buddy()
3747 struct ext4_prealloc_space *pa; in ext4_mb_cleanup_pa() local
3752 pa = list_entry(cur, struct ext4_prealloc_space, pa_group_list); in ext4_mb_cleanup_pa()
3753 list_del(&pa->pa_group_list); in ext4_mb_cleanup_pa()
3755 kmem_cache_free(ext4_pspace_cachep, pa); in ext4_mb_cleanup_pa()
4644 struct ext4_prealloc_space *pa = ac->ac_pa; in ext4_discard_allocated_blocks() local
4648 if (pa == NULL) { in ext4_discard_allocated_blocks()
4667 if (pa->pa_type == MB_INODE_PA) { in ext4_discard_allocated_blocks()
4668 spin_lock(&pa->pa_lock); in ext4_discard_allocated_blocks()
4669 pa->pa_free += ac->ac_b_ex.fe_len; in ext4_discard_allocated_blocks()
4670 spin_unlock(&pa->pa_lock); in ext4_discard_allocated_blocks()
4678 struct ext4_prealloc_space *pa) in ext4_mb_use_inode_pa() argument
4686 start = pa->pa_pstart + (ac->ac_o_ex.fe_logical - pa->pa_lstart); in ext4_mb_use_inode_pa()
4687 end = min(pa->pa_pstart + EXT4_C2B(sbi, pa->pa_len), in ext4_mb_use_inode_pa()
4694 ac->ac_pa = pa; in ext4_mb_use_inode_pa()
4696 BUG_ON(start < pa->pa_pstart); in ext4_mb_use_inode_pa()
4697 BUG_ON(end > pa->pa_pstart + EXT4_C2B(sbi, pa->pa_len)); in ext4_mb_use_inode_pa()
4698 BUG_ON(pa->pa_free < len); in ext4_mb_use_inode_pa()
4700 pa->pa_free -= len; in ext4_mb_use_inode_pa()
4702 mb_debug(ac->ac_sb, "use %llu/%d from inode pa %p\n", start, len, pa); in ext4_mb_use_inode_pa()
4709 struct ext4_prealloc_space *pa) in ext4_mb_use_group_pa() argument
4713 ext4_get_group_no_and_offset(ac->ac_sb, pa->pa_pstart, in ext4_mb_use_group_pa()
4718 ac->ac_pa = pa; in ext4_mb_use_group_pa()
4727 pa->pa_lstart, len, pa); in ext4_mb_use_group_pa()
4738 struct ext4_prealloc_space *pa, in ext4_mb_check_group_pa() argument
4744 atomic_inc(&pa->pa_count); in ext4_mb_check_group_pa()
4745 return pa; in ext4_mb_check_group_pa()
4748 new_distance = abs(goal_block - pa->pa_pstart); in ext4_mb_check_group_pa()
4755 atomic_inc(&pa->pa_count); in ext4_mb_check_group_pa()
4756 return pa; in ext4_mb_check_group_pa()
4764 struct ext4_prealloc_space *pa) in ext4_mb_pa_goal_check() argument
4778 start = pa->pa_pstart + in ext4_mb_pa_goal_check()
4779 (ac->ac_g_ex.fe_logical - pa->pa_lstart); in ext4_mb_pa_goal_check()
4783 if (ac->ac_g_ex.fe_len > pa->pa_len - in ext4_mb_pa_goal_check()
4784 EXT4_B2C(sbi, ac->ac_g_ex.fe_logical - pa->pa_lstart)) in ext4_mb_pa_goal_check()
5003 struct ext4_prealloc_space *pa; in ext4_mb_generate_from_pa() local
5022 pa = list_entry(cur, struct ext4_prealloc_space, pa_group_list); in ext4_mb_generate_from_pa()
5023 spin_lock(&pa->pa_lock); in ext4_mb_generate_from_pa()
5024 ext4_get_group_no_and_offset(sb, pa->pa_pstart, in ext4_mb_generate_from_pa()
5026 len = pa->pa_len; in ext4_mb_generate_from_pa()
5027 spin_unlock(&pa->pa_lock); in ext4_mb_generate_from_pa()
5038 struct ext4_prealloc_space *pa) in ext4_mb_mark_pa_deleted() argument
5042 if (pa->pa_deleted) { in ext4_mb_mark_pa_deleted()
5044 pa->pa_type, pa->pa_pstart, pa->pa_lstart, in ext4_mb_mark_pa_deleted()
5045 pa->pa_len); in ext4_mb_mark_pa_deleted()
5049 pa->pa_deleted = 1; in ext4_mb_mark_pa_deleted()
5051 if (pa->pa_type == MB_INODE_PA) { in ext4_mb_mark_pa_deleted()
5052 ei = EXT4_I(pa->pa_inode); in ext4_mb_mark_pa_deleted()
5057 static inline void ext4_mb_pa_free(struct ext4_prealloc_space *pa) in ext4_mb_pa_free() argument
5059 BUG_ON(!pa); in ext4_mb_pa_free()
5060 BUG_ON(atomic_read(&pa->pa_count)); in ext4_mb_pa_free()
5061 BUG_ON(pa->pa_deleted == 0); in ext4_mb_pa_free()
5062 kmem_cache_free(ext4_pspace_cachep, pa); in ext4_mb_pa_free()
5067 struct ext4_prealloc_space *pa; in ext4_mb_pa_callback() local
5069 pa = container_of(head, struct ext4_prealloc_space, u.pa_rcu); in ext4_mb_pa_callback()
5070 ext4_mb_pa_free(pa); in ext4_mb_pa_callback()
5078 struct super_block *sb, struct ext4_prealloc_space *pa) in ext4_mb_put_pa() argument
5085 spin_lock(&pa->pa_lock); in ext4_mb_put_pa()
5086 if (!atomic_dec_and_test(&pa->pa_count) || pa->pa_free != 0) { in ext4_mb_put_pa()
5087 spin_unlock(&pa->pa_lock); in ext4_mb_put_pa()
5091 if (pa->pa_deleted == 1) { in ext4_mb_put_pa()
5092 spin_unlock(&pa->pa_lock); in ext4_mb_put_pa()
5096 ext4_mb_mark_pa_deleted(sb, pa); in ext4_mb_put_pa()
5097 spin_unlock(&pa->pa_lock); in ext4_mb_put_pa()
5099 grp_blk = pa->pa_pstart; in ext4_mb_put_pa()
5104 if (pa->pa_type == MB_GROUP_PA) in ext4_mb_put_pa()
5124 list_del(&pa->pa_group_list); in ext4_mb_put_pa()
5127 if (pa->pa_type == MB_INODE_PA) { in ext4_mb_put_pa()
5128 write_lock(pa->pa_node_lock.inode_lock); in ext4_mb_put_pa()
5129 rb_erase(&pa->pa_node.inode_node, &ei->i_prealloc_node); in ext4_mb_put_pa()
5130 write_unlock(pa->pa_node_lock.inode_lock); in ext4_mb_put_pa()
5131 ext4_mb_pa_free(pa); in ext4_mb_put_pa()
5133 spin_lock(pa->pa_node_lock.lg_lock); in ext4_mb_put_pa()
5134 list_del_rcu(&pa->pa_node.lg_list); in ext4_mb_put_pa()
5135 spin_unlock(pa->pa_node_lock.lg_lock); in ext4_mb_put_pa()
5136 call_rcu(&(pa)->u.pa_rcu, ext4_mb_pa_callback); in ext4_mb_put_pa()
5173 struct ext4_prealloc_space *pa; in ext4_mb_new_inode_pa() local
5183 pa = ac->ac_pa; in ext4_mb_new_inode_pa()
5233 pa->pa_lstart = ac->ac_b_ex.fe_logical; in ext4_mb_new_inode_pa()
5234 pa->pa_pstart = ext4_grp_offs_to_block(sb, &ac->ac_b_ex); in ext4_mb_new_inode_pa()
5235 pa->pa_len = ac->ac_b_ex.fe_len; in ext4_mb_new_inode_pa()
5236 pa->pa_free = pa->pa_len; in ext4_mb_new_inode_pa()
5237 spin_lock_init(&pa->pa_lock); in ext4_mb_new_inode_pa()
5238 INIT_LIST_HEAD(&pa->pa_group_list); in ext4_mb_new_inode_pa()
5239 pa->pa_deleted = 0; in ext4_mb_new_inode_pa()
5240 pa->pa_type = MB_INODE_PA; in ext4_mb_new_inode_pa()
5242 mb_debug(sb, "new inode pa %p: %llu/%d for %u\n", pa, pa->pa_pstart, in ext4_mb_new_inode_pa()
5243 pa->pa_len, pa->pa_lstart); in ext4_mb_new_inode_pa()
5244 trace_ext4_mb_new_inode_pa(ac, pa); in ext4_mb_new_inode_pa()
5246 atomic_add(pa->pa_free, &sbi->s_mb_preallocated); in ext4_mb_new_inode_pa()
5247 ext4_mb_use_inode_pa(ac, pa); in ext4_mb_new_inode_pa()
5254 pa->pa_node_lock.inode_lock = &ei->i_prealloc_lock; in ext4_mb_new_inode_pa()
5255 pa->pa_inode = ac->ac_inode; in ext4_mb_new_inode_pa()
5257 list_add(&pa->pa_group_list, &grp->bb_prealloc_list); in ext4_mb_new_inode_pa()
5259 write_lock(pa->pa_node_lock.inode_lock); in ext4_mb_new_inode_pa()
5260 ext4_mb_pa_rb_insert(&ei->i_prealloc_node, &pa->pa_node.inode_node); in ext4_mb_new_inode_pa()
5261 write_unlock(pa->pa_node_lock.inode_lock); in ext4_mb_new_inode_pa()
5273 struct ext4_prealloc_space *pa; in ext4_mb_new_group_pa() local
5282 pa = ac->ac_pa; in ext4_mb_new_group_pa()
5284 pa->pa_pstart = ext4_grp_offs_to_block(sb, &ac->ac_b_ex); in ext4_mb_new_group_pa()
5285 pa->pa_lstart = pa->pa_pstart; in ext4_mb_new_group_pa()
5286 pa->pa_len = ac->ac_b_ex.fe_len; in ext4_mb_new_group_pa()
5287 pa->pa_free = pa->pa_len; in ext4_mb_new_group_pa()
5288 spin_lock_init(&pa->pa_lock); in ext4_mb_new_group_pa()
5289 INIT_LIST_HEAD(&pa->pa_node.lg_list); in ext4_mb_new_group_pa()
5290 INIT_LIST_HEAD(&pa->pa_group_list); in ext4_mb_new_group_pa()
5291 pa->pa_deleted = 0; in ext4_mb_new_group_pa()
5292 pa->pa_type = MB_GROUP_PA; in ext4_mb_new_group_pa()
5294 mb_debug(sb, "new group pa %p: %llu/%d for %u\n", pa, pa->pa_pstart, in ext4_mb_new_group_pa()
5295 pa->pa_len, pa->pa_lstart); in ext4_mb_new_group_pa()
5296 trace_ext4_mb_new_group_pa(ac, pa); in ext4_mb_new_group_pa()
5298 ext4_mb_use_group_pa(ac, pa); in ext4_mb_new_group_pa()
5299 atomic_add(pa->pa_free, &EXT4_SB(sb)->s_mb_preallocated); in ext4_mb_new_group_pa()
5307 pa->pa_node_lock.lg_lock = &lg->lg_prealloc_lock; in ext4_mb_new_group_pa()
5308 pa->pa_inode = NULL; in ext4_mb_new_group_pa()
5310 list_add(&pa->pa_group_list, &grp->bb_prealloc_list); in ext4_mb_new_group_pa()
5336 struct ext4_prealloc_space *pa) in ext4_mb_release_inode_pa() argument
5347 BUG_ON(pa->pa_deleted == 0); in ext4_mb_release_inode_pa()
5348 ext4_get_group_no_and_offset(sb, pa->pa_pstart, &group, &bit); in ext4_mb_release_inode_pa()
5349 grp_blk_start = pa->pa_pstart - EXT4_C2B(sbi, bit); in ext4_mb_release_inode_pa()
5350 BUG_ON(group != e4b->bd_group && pa->pa_len != 0); in ext4_mb_release_inode_pa()
5351 end = bit + pa->pa_len; in ext4_mb_release_inode_pa()
5364 trace_ext4_mb_release_inode_pa(pa, (grp_blk_start + in ext4_mb_release_inode_pa()
5367 mb_free_blocks(pa->pa_inode, e4b, bit, next - bit); in ext4_mb_release_inode_pa()
5370 if (free != pa->pa_free) { in ext4_mb_release_inode_pa()
5373 pa, (unsigned long) pa->pa_lstart, in ext4_mb_release_inode_pa()
5374 (unsigned long) pa->pa_pstart, in ext4_mb_release_inode_pa()
5375 pa->pa_len); in ext4_mb_release_inode_pa()
5377 free, pa->pa_free); in ext4_mb_release_inode_pa()
5390 struct ext4_prealloc_space *pa) in ext4_mb_release_group_pa() argument
5396 trace_ext4_mb_release_group_pa(sb, pa); in ext4_mb_release_group_pa()
5397 BUG_ON(pa->pa_deleted == 0); in ext4_mb_release_group_pa()
5398 ext4_get_group_no_and_offset(sb, pa->pa_pstart, &group, &bit); in ext4_mb_release_group_pa()
5399 if (unlikely(group != e4b->bd_group && pa->pa_len != 0)) { in ext4_mb_release_group_pa()
5401 e4b->bd_group, group, pa->pa_pstart); in ext4_mb_release_group_pa()
5404 mb_free_blocks(pa->pa_inode, e4b, bit, pa->pa_len); in ext4_mb_release_group_pa()
5405 atomic_add(pa->pa_len, &EXT4_SB(sb)->s_mb_discarded); in ext4_mb_release_group_pa()
5406 trace_ext4_mballoc_discard(sb, NULL, group, bit, pa->pa_len); in ext4_mb_release_group_pa()
5426 struct ext4_prealloc_space *pa, *tmp; in ext4_mb_discard_group_preallocations() local
5457 list_for_each_entry_safe(pa, tmp, in ext4_mb_discard_group_preallocations()
5459 spin_lock(&pa->pa_lock); in ext4_mb_discard_group_preallocations()
5460 if (atomic_read(&pa->pa_count)) { in ext4_mb_discard_group_preallocations()
5461 spin_unlock(&pa->pa_lock); in ext4_mb_discard_group_preallocations()
5465 if (pa->pa_deleted) { in ext4_mb_discard_group_preallocations()
5466 spin_unlock(&pa->pa_lock); in ext4_mb_discard_group_preallocations()
5471 ext4_mb_mark_pa_deleted(sb, pa); in ext4_mb_discard_group_preallocations()
5477 free += pa->pa_free; in ext4_mb_discard_group_preallocations()
5479 spin_unlock(&pa->pa_lock); in ext4_mb_discard_group_preallocations()
5481 list_del(&pa->pa_group_list); in ext4_mb_discard_group_preallocations()
5482 list_add(&pa->u.pa_tmp_list, &list); in ext4_mb_discard_group_preallocations()
5486 list_for_each_entry_safe(pa, tmp, &list, u.pa_tmp_list) { in ext4_mb_discard_group_preallocations()
5489 if (pa->pa_type == MB_GROUP_PA) { in ext4_mb_discard_group_preallocations()
5490 spin_lock(pa->pa_node_lock.lg_lock); in ext4_mb_discard_group_preallocations()
5491 list_del_rcu(&pa->pa_node.lg_list); in ext4_mb_discard_group_preallocations()
5492 spin_unlock(pa->pa_node_lock.lg_lock); in ext4_mb_discard_group_preallocations()
5494 write_lock(pa->pa_node_lock.inode_lock); in ext4_mb_discard_group_preallocations()
5495 ei = EXT4_I(pa->pa_inode); in ext4_mb_discard_group_preallocations()
5496 rb_erase(&pa->pa_node.inode_node, &ei->i_prealloc_node); in ext4_mb_discard_group_preallocations()
5497 write_unlock(pa->pa_node_lock.inode_lock); in ext4_mb_discard_group_preallocations()
5500 list_del(&pa->u.pa_tmp_list); in ext4_mb_discard_group_preallocations()
5502 if (pa->pa_type == MB_GROUP_PA) { in ext4_mb_discard_group_preallocations()
5503 ext4_mb_release_group_pa(&e4b, pa); in ext4_mb_discard_group_preallocations()
5504 call_rcu(&(pa)->u.pa_rcu, ext4_mb_pa_callback); in ext4_mb_discard_group_preallocations()
5506 ext4_mb_release_inode_pa(&e4b, bitmap_bh, pa); in ext4_mb_discard_group_preallocations()
5507 ext4_mb_pa_free(pa); in ext4_mb_discard_group_preallocations()
5534 struct ext4_prealloc_space *pa, *tmp; in ext4_discard_preallocations() local
5561 pa = rb_entry(iter, struct ext4_prealloc_space, in ext4_discard_preallocations()
5563 BUG_ON(pa->pa_node_lock.inode_lock != &ei->i_prealloc_lock); in ext4_discard_preallocations()
5565 spin_lock(&pa->pa_lock); in ext4_discard_preallocations()
5566 if (atomic_read(&pa->pa_count)) { in ext4_discard_preallocations()
5569 spin_unlock(&pa->pa_lock); in ext4_discard_preallocations()
5578 if (pa->pa_deleted == 0) { in ext4_discard_preallocations()
5579 ext4_mb_mark_pa_deleted(sb, pa); in ext4_discard_preallocations()
5580 spin_unlock(&pa->pa_lock); in ext4_discard_preallocations()
5581 rb_erase(&pa->pa_node.inode_node, &ei->i_prealloc_node); in ext4_discard_preallocations()
5582 list_add(&pa->u.pa_tmp_list, &list); in ext4_discard_preallocations()
5588 spin_unlock(&pa->pa_lock); in ext4_discard_preallocations()
5608 list_for_each_entry_safe(pa, tmp, &list, u.pa_tmp_list) { in ext4_discard_preallocations()
5609 BUG_ON(pa->pa_type != MB_INODE_PA); in ext4_discard_preallocations()
5610 group = ext4_get_group_number(sb, pa->pa_pstart); in ext4_discard_preallocations()
5630 list_del(&pa->pa_group_list); in ext4_discard_preallocations()
5631 ext4_mb_release_inode_pa(&e4b, bitmap_bh, pa); in ext4_discard_preallocations()
5637 list_del(&pa->u.pa_tmp_list); in ext4_discard_preallocations()
5638 ext4_mb_pa_free(pa); in ext4_discard_preallocations()
5644 struct ext4_prealloc_space *pa; in ext4_mb_pa_alloc() local
5647 pa = kmem_cache_zalloc(ext4_pspace_cachep, GFP_NOFS); in ext4_mb_pa_alloc()
5648 if (!pa) in ext4_mb_pa_alloc()
5650 atomic_set(&pa->pa_count, 1); in ext4_mb_pa_alloc()
5651 ac->ac_pa = pa; in ext4_mb_pa_alloc()
5657 struct ext4_prealloc_space *pa = ac->ac_pa; in ext4_mb_pa_put_free() local
5659 BUG_ON(!pa); in ext4_mb_pa_put_free()
5661 WARN_ON(!atomic_dec_and_test(&pa->pa_count)); in ext4_mb_pa_put_free()
5667 pa->pa_deleted = 1; in ext4_mb_pa_put_free()
5668 ext4_mb_pa_free(pa); in ext4_mb_pa_put_free()
5683 struct ext4_prealloc_space *pa; in ext4_mb_show_pa() local
5691 pa = list_entry(cur, struct ext4_prealloc_space, in ext4_mb_show_pa()
5693 spin_lock(&pa->pa_lock); in ext4_mb_show_pa()
5694 ext4_get_group_no_and_offset(sb, pa->pa_pstart, in ext4_mb_show_pa()
5696 spin_unlock(&pa->pa_lock); in ext4_mb_show_pa()
5698 pa->pa_len); in ext4_mb_show_pa()
5869 struct ext4_prealloc_space *pa, *tmp; in ext4_mb_discard_lg_preallocations() local
5874 list_for_each_entry_rcu(pa, &lg->lg_prealloc_list[order], in ext4_mb_discard_lg_preallocations()
5877 spin_lock(&pa->pa_lock); in ext4_mb_discard_lg_preallocations()
5878 if (atomic_read(&pa->pa_count)) { in ext4_mb_discard_lg_preallocations()
5884 spin_unlock(&pa->pa_lock); in ext4_mb_discard_lg_preallocations()
5887 if (pa->pa_deleted) { in ext4_mb_discard_lg_preallocations()
5888 spin_unlock(&pa->pa_lock); in ext4_mb_discard_lg_preallocations()
5892 BUG_ON(pa->pa_type != MB_GROUP_PA); in ext4_mb_discard_lg_preallocations()
5895 ext4_mb_mark_pa_deleted(sb, pa); in ext4_mb_discard_lg_preallocations()
5896 spin_unlock(&pa->pa_lock); in ext4_mb_discard_lg_preallocations()
5898 list_del_rcu(&pa->pa_node.lg_list); in ext4_mb_discard_lg_preallocations()
5899 list_add(&pa->u.pa_tmp_list, &discard_list); in ext4_mb_discard_lg_preallocations()
5914 list_for_each_entry_safe(pa, tmp, &discard_list, u.pa_tmp_list) { in ext4_mb_discard_lg_preallocations()
5917 group = ext4_get_group_number(sb, pa->pa_pstart); in ext4_mb_discard_lg_preallocations()
5926 list_del(&pa->pa_group_list); in ext4_mb_discard_lg_preallocations()
5927 ext4_mb_release_group_pa(&e4b, pa); in ext4_mb_discard_lg_preallocations()
5931 list_del(&pa->u.pa_tmp_list); in ext4_mb_discard_lg_preallocations()
5932 call_rcu(&(pa)->u.pa_rcu, ext4_mb_pa_callback); in ext4_mb_discard_lg_preallocations()
5950 struct ext4_prealloc_space *tmp_pa, *pa = ac->ac_pa; in ext4_mb_add_n_trim() local
5952 order = fls(pa->pa_free) - 1; in ext4_mb_add_n_trim()
5966 if (!added && pa->pa_free < tmp_pa->pa_free) { in ext4_mb_add_n_trim()
5968 list_add_tail_rcu(&pa->pa_node.lg_list, in ext4_mb_add_n_trim()
5980 list_add_tail_rcu(&pa->pa_node.lg_list, in ext4_mb_add_n_trim()
5996 struct ext4_prealloc_space *pa = ac->ac_pa; in ext4_mb_release_context() local
5997 if (pa) { in ext4_mb_release_context()
5998 if (pa->pa_type == MB_GROUP_PA) { in ext4_mb_release_context()
6000 spin_lock(&pa->pa_lock); in ext4_mb_release_context()
6001 pa->pa_pstart += EXT4_C2B(sbi, ac->ac_b_ex.fe_len); in ext4_mb_release_context()
6002 pa->pa_lstart += EXT4_C2B(sbi, ac->ac_b_ex.fe_len); in ext4_mb_release_context()
6003 pa->pa_free -= ac->ac_b_ex.fe_len; in ext4_mb_release_context()
6004 pa->pa_len -= ac->ac_b_ex.fe_len; in ext4_mb_release_context()
6005 spin_unlock(&pa->pa_lock); in ext4_mb_release_context()
6013 if (likely(pa->pa_free)) { in ext4_mb_release_context()
6014 spin_lock(pa->pa_node_lock.lg_lock); in ext4_mb_release_context()
6015 list_del_rcu(&pa->pa_node.lg_list); in ext4_mb_release_context()
6016 spin_unlock(pa->pa_node_lock.lg_lock); in ext4_mb_release_context()
6021 ext4_mb_put_pa(ac, ac->ac_sb, pa); in ext4_mb_release_context()