Lines Matching full:be

30  * be used to verify the extent tree is consistent as these should all match
48 * were called with since it could be different from ref_root. We also store
79 struct block_entry *be) in insert_block_entry() argument
88 if (entry->bytenr > be->bytenr) in insert_block_entry()
90 else if (entry->bytenr < be->bytenr) in insert_block_entry()
96 rb_link_node(&be->node, parent_node, p); in insert_block_entry()
97 rb_insert_color(&be->node, root); in insert_block_entry()
235 static void free_block_entry(struct block_entry *be) in free_block_entry() argument
242 while ((n = rb_first(&be->roots))) { in free_block_entry()
244 rb_erase(&re->node, &be->roots); in free_block_entry()
248 while((n = rb_first(&be->refs))) { in free_block_entry()
250 rb_erase(&ref->node, &be->refs); in free_block_entry()
254 while (!list_empty(&be->actions)) { in free_block_entry()
255 ra = list_first_entry(&be->actions, struct ref_action, in free_block_entry()
260 kfree(be); in free_block_entry()
267 struct block_entry *be = NULL, *exist; in add_block_entry() local
271 be = kzalloc(sizeof(struct block_entry), GFP_NOFS); in add_block_entry()
272 if (!be || !re) { in add_block_entry()
274 kfree(be); in add_block_entry()
277 be->bytenr = bytenr; in add_block_entry()
278 be->len = len; in add_block_entry()
284 exist = insert_block_entry(&fs_info->block_tree, be); in add_block_entry()
295 kfree(be); in add_block_entry()
299 be->num_refs = 0; in add_block_entry()
300 be->metadata = 0; in add_block_entry()
301 be->from_disk = 0; in add_block_entry()
302 be->roots = RB_ROOT; in add_block_entry()
303 be->refs = RB_ROOT; in add_block_entry()
304 INIT_LIST_HEAD(&be->actions); in add_block_entry()
306 insert_root_entry(&be->roots, re); in add_block_entry()
309 return be; in add_block_entry()
315 struct block_entry *be; in add_tree_block() local
332 be = add_block_entry(fs_info, bytenr, fs_info->nodesize, ref_root); in add_tree_block()
333 if (IS_ERR(be)) { in add_tree_block()
335 return PTR_ERR(be); in add_tree_block()
337 be->num_refs++; in add_tree_block()
338 be->from_disk = 1; in add_tree_block()
339 be->metadata = 1; in add_tree_block()
343 re = lookup_root_entry(&be->roots, ref_root); in add_tree_block()
347 exist = insert_ref_entry(&be->refs, ref); in add_tree_block()
361 struct block_entry *be; in add_shared_data_ref() local
367 be = add_block_entry(fs_info, bytenr, num_bytes, 0); in add_shared_data_ref()
368 if (IS_ERR(be)) { in add_shared_data_ref()
370 return PTR_ERR(be); in add_shared_data_ref()
372 be->num_refs += num_refs; in add_shared_data_ref()
376 if (insert_ref_entry(&be->refs, ref)) { in add_shared_data_ref()
391 struct block_entry *be; in add_extent_data_ref() local
402 be = add_block_entry(fs_info, bytenr, num_bytes, ref_root); in add_extent_data_ref()
403 if (IS_ERR(be)) { in add_extent_data_ref()
405 return PTR_ERR(be); in add_extent_data_ref()
407 be->num_refs += num_refs; in add_extent_data_ref()
414 if (insert_ref_entry(&be->refs, ref)) { in add_extent_data_ref()
421 re = lookup_root_entry(&be->roots, ref_root); in add_extent_data_ref()
583 /* Walk up to the next node that needs to be processed */
620 * Dumps all the information from the block entry to printk, it's going to be
624 struct block_entry *be) in dump_block_entry() argument
633 be->bytenr, be->len, be->num_refs, be->metadata, in dump_block_entry()
634 be->from_disk); in dump_block_entry()
636 for (n = rb_first(&be->refs); n; n = rb_next(n)) { in dump_block_entry()
644 for (n = rb_first(&be->roots); n; n = rb_next(n)) { in dump_block_entry()
650 list_for_each_entry(ra, &be->actions, list) in dump_block_entry()
667 struct block_entry *be = NULL; in btrfs_ref_tree_mod() local
735 be = add_block_entry(fs_info, bytenr, num_bytes, ref_root); in btrfs_ref_tree_mod()
736 if (IS_ERR(be)) { in btrfs_ref_tree_mod()
739 ret = PTR_ERR(be); in btrfs_ref_tree_mod()
742 be->num_refs++; in btrfs_ref_tree_mod()
744 be->metadata = 1; in btrfs_ref_tree_mod()
746 if (be->num_refs != 1) { in btrfs_ref_tree_mod()
749 dump_block_entry(fs_info, be); in btrfs_ref_tree_mod()
756 while (!list_empty(&be->actions)) { in btrfs_ref_tree_mod()
759 tmp = list_first_entry(&be->actions, struct ref_action, in btrfs_ref_tree_mod()
786 be = lookup_block_entry(&fs_info->block_tree, bytenr); in btrfs_ref_tree_mod()
787 if (!be) { in btrfs_ref_tree_mod()
796 } else if (be->num_refs == 0) { in btrfs_ref_tree_mod()
800 dump_block_entry(fs_info, be); in btrfs_ref_tree_mod()
809 tmp = insert_root_entry(&be->roots, re); in btrfs_ref_tree_mod()
817 exist = insert_ref_entry(&be->refs, ref); in btrfs_ref_tree_mod()
823 dump_block_entry(fs_info, be); in btrfs_ref_tree_mod()
831 rb_erase(&exist->node, &be->refs); in btrfs_ref_tree_mod()
834 } else if (!be->metadata) { in btrfs_ref_tree_mod()
839 dump_block_entry(fs_info, be); in btrfs_ref_tree_mod()
850 dump_block_entry(fs_info, be); in btrfs_ref_tree_mod()
852 rb_erase(&ref->node, &be->refs); in btrfs_ref_tree_mod()
860 re = lookup_root_entry(&be->roots, ref_root); in btrfs_ref_tree_mod()
864 * above when we lookup the be with !parent, but just in in btrfs_ref_tree_mod()
869 generic_ref->real_root, be->bytenr); in btrfs_ref_tree_mod()
870 dump_block_entry(fs_info, be); in btrfs_ref_tree_mod()
879 be->num_refs--; in btrfs_ref_tree_mod()
881 be->num_refs++; in btrfs_ref_tree_mod()
885 list_add_tail(&ra->list, &be->actions); in btrfs_ref_tree_mod()
900 struct block_entry *be; in btrfs_free_ref_cache() local
908 be = rb_entry(n, struct block_entry, node); in btrfs_free_ref_cache()
909 rb_erase(&be->node, &fs_info->block_tree); in btrfs_free_ref_cache()
910 free_block_entry(be); in btrfs_free_ref_cache()
919 struct block_entry *be = NULL, *entry; in btrfs_free_ref_tree_range() local
934 be = entry; in btrfs_free_ref_tree_range()
938 if (be == NULL || in btrfs_free_ref_tree_range()
939 (entry->bytenr < start && be->bytenr > start) || in btrfs_free_ref_tree_range()
940 (entry->bytenr < start && entry->bytenr > be->bytenr)) in btrfs_free_ref_tree_range()
941 be = entry; in btrfs_free_ref_tree_range()
948 if (!be) { in btrfs_free_ref_tree_range()
953 n = &be->node; in btrfs_free_ref_tree_range()
955 be = rb_entry(n, struct block_entry, node); in btrfs_free_ref_tree_range()
957 if (be->bytenr < start && be->bytenr + be->len > start) { in btrfs_free_ref_tree_range()
961 dump_block_entry(fs_info, be); in btrfs_free_ref_tree_range()
964 if (be->bytenr < start) in btrfs_free_ref_tree_range()
966 if (be->bytenr >= start + len) in btrfs_free_ref_tree_range()
968 if (be->bytenr + be->len > start + len) { in btrfs_free_ref_tree_range()
972 dump_block_entry(fs_info, be); in btrfs_free_ref_tree_range()
974 rb_erase(&be->node, &fs_info->block_tree); in btrfs_free_ref_tree_range()
975 free_block_entry(be); in btrfs_free_ref_tree_range()
980 /* Walk down all roots and build the ref tree, meant to be called at mount */