Lines Matching refs:bs
35 int coroutine_fn qcow2_shrink_l1_table(BlockDriverState *bs, in qcow2_shrink_l1_table() argument
38 BDRVQcow2State *s = bs->opaque; in qcow2_shrink_l1_table()
51 BLKDBG_CO_EVENT(bs->file, BLKDBG_L1_SHRINK_WRITE_TABLE); in qcow2_shrink_l1_table()
52 ret = bdrv_co_pwrite_zeroes(bs->file, in qcow2_shrink_l1_table()
59 ret = bdrv_co_flush(bs->file->bs); in qcow2_shrink_l1_table()
64 BLKDBG_CO_EVENT(bs->file, BLKDBG_L1_SHRINK_FREE_L2_CLUSTERS); in qcow2_shrink_l1_table()
69 qcow2_free_clusters(bs, s->l1_table[i] & L1E_OFFSET_MASK, in qcow2_shrink_l1_table()
86 int qcow2_grow_l1_table(BlockDriverState *bs, uint64_t min_size, in qcow2_grow_l1_table() argument
89 BDRVQcow2State *s = bs->opaque; in qcow2_grow_l1_table()
130 new_l1_table = qemu_try_blockalign(bs->file->bs, new_l1_size2); in qcow2_grow_l1_table()
141 BLKDBG_EVENT(bs->file, BLKDBG_L1_GROW_ALLOC_TABLE); in qcow2_grow_l1_table()
142 new_l1_table_offset = qcow2_alloc_clusters(bs, new_l1_size2); in qcow2_grow_l1_table()
148 ret = qcow2_cache_flush(bs, s->refcount_block_cache); in qcow2_grow_l1_table()
155 ret = qcow2_pre_write_overlap_check(bs, 0, new_l1_table_offset, in qcow2_grow_l1_table()
161 BLKDBG_EVENT(bs->file, BLKDBG_L1_GROW_WRITE_TABLE); in qcow2_grow_l1_table()
164 ret = bdrv_pwrite_sync(bs->file, new_l1_table_offset, new_l1_size2, in qcow2_grow_l1_table()
172 BLKDBG_EVENT(bs->file, BLKDBG_L1_GROW_ACTIVATE_TABLE); in qcow2_grow_l1_table()
175 ret = bdrv_pwrite_sync(bs->file, offsetof(QCowHeader, l1_size), in qcow2_grow_l1_table()
186 qcow2_free_clusters(bs, old_l1_table_offset, old_l1_size * L1E_SIZE, in qcow2_grow_l1_table()
191 qcow2_free_clusters(bs, new_l1_table_offset, new_l1_size2, in qcow2_grow_l1_table()
211 l2_load(BlockDriverState *bs, uint64_t offset, in l2_load() argument
214 BDRVQcow2State *s = bs->opaque; in l2_load()
218 return qcow2_cache_get(bs, s->l2_table_cache, l2_offset + start_of_slice, in l2_load()
227 int qcow2_write_l1_entry(BlockDriverState *bs, int l1_index) in qcow2_write_l1_entry() argument
229 BDRVQcow2State *s = bs->opaque; in qcow2_write_l1_entry()
233 MIN(bs->file->bs->bl.request_alignment, s->cluster_size)); in qcow2_write_l1_entry()
246 ret = qcow2_pre_write_overlap_check(bs, QCOW2_OL_ACTIVE_L1, in qcow2_write_l1_entry()
252 BLKDBG_EVENT(bs->file, BLKDBG_L1_UPDATE); in qcow2_write_l1_entry()
253 ret = bdrv_pwrite_sync(bs->file, in qcow2_write_l1_entry()
273 static int GRAPH_RDLOCK l2_allocate(BlockDriverState *bs, int l1_index) in l2_allocate() argument
275 BDRVQcow2State *s = bs->opaque; in l2_allocate()
284 trace_qcow2_l2_allocate(bs, l1_index); in l2_allocate()
288 l2_offset = qcow2_alloc_clusters(bs, s->l2_size * l2_entry_size(s)); in l2_allocate()
299 qcow2_signal_corruption(bs, true, -1, -1, "Preventing invalid " in l2_allocate()
305 ret = qcow2_cache_flush(bs, s->refcount_block_cache); in l2_allocate()
315 trace_qcow2_l2_allocate_get_empty(bs, l1_index); in l2_allocate()
317 ret = qcow2_cache_get_empty(bs, s->l2_table_cache, in l2_allocate()
333 BLKDBG_EVENT(bs->file, BLKDBG_L2_ALLOC_COW_READ); in l2_allocate()
334 ret = qcow2_cache_get(bs, s->l2_table_cache, old_l2_slice_offset, in l2_allocate()
346 BLKDBG_EVENT(bs->file, BLKDBG_L2_ALLOC_WRITE); in l2_allocate()
348 trace_qcow2_l2_allocate_write_l2(bs, l1_index); in l2_allocate()
353 ret = qcow2_cache_flush(bs, s->l2_table_cache); in l2_allocate()
359 trace_qcow2_l2_allocate_write_l1(bs, l1_index); in l2_allocate()
361 ret = qcow2_write_l1_entry(bs, l1_index); in l2_allocate()
366 trace_qcow2_l2_allocate_done(bs, l1_index, 0); in l2_allocate()
370 trace_qcow2_l2_allocate_done(bs, l1_index, ret); in l2_allocate()
376 qcow2_free_clusters(bs, l2_offset, s->l2_size * l2_entry_size(s), in l2_allocate()
395 qcow2_get_subcluster_range_type(BlockDriverState *bs, uint64_t l2_entry, in qcow2_get_subcluster_range_type() argument
399 BDRVQcow2State *s = bs->opaque; in qcow2_get_subcluster_range_type()
402 *type = qcow2_get_subcluster_type(bs, l2_entry, l2_bitmap, sc_from); in qcow2_get_subcluster_range_type()
445 count_contiguous_subclusters(BlockDriverState *bs, int nb_clusters, in count_contiguous_subclusters() argument
449 BDRVQcow2State *s = bs->opaque; in count_contiguous_subclusters()
461 int ret = qcow2_get_subcluster_range_type(bs, l2_entry, l2_bitmap, in count_contiguous_subclusters()
496 do_perform_cow_read(BlockDriverState *bs, uint64_t src_cluster_offset, in do_perform_cow_read() argument
505 BLKDBG_CO_EVENT(bs->file, BLKDBG_COW_READ); in do_perform_cow_read()
507 if (!bs->drv) { in do_perform_cow_read()
528 ret = bs->drv->bdrv_co_preadv_part(bs, in do_perform_cow_read()
539 do_perform_cow_write(BlockDriverState *bs, uint64_t cluster_offset, in do_perform_cow_write() argument
542 BDRVQcow2State *s = bs->opaque; in do_perform_cow_write()
549 ret = qcow2_pre_write_overlap_check(bs, 0, in do_perform_cow_write()
555 BLKDBG_CO_EVENT(bs->file, BLKDBG_COW_WRITE); in do_perform_cow_write()
586 int qcow2_get_host_offset(BlockDriverState *bs, uint64_t offset, in qcow2_get_host_offset() argument
590 BDRVQcow2State *s = bs->opaque; in qcow2_get_host_offset()
630 qcow2_signal_corruption(bs, true, -1, -1, "L2 table offset %#" PRIx64 in qcow2_get_host_offset()
638 ret = l2_load(bs, offset, l2_offset, &l2_slice); in qcow2_get_host_offset()
656 type = qcow2_get_subcluster_type(bs, l2_entry, l2_bitmap, sc_index); in qcow2_get_host_offset()
659 qcow2_signal_corruption(bs, true, -1, -1, "Zero cluster entry found" in qcow2_get_host_offset()
669 if (has_data_file(bs)) { in qcow2_get_host_offset()
670 qcow2_signal_corruption(bs, true, -1, -1, "Compressed cluster " in qcow2_get_host_offset()
688 qcow2_signal_corruption(bs, true, -1, -1, in qcow2_get_host_offset()
696 if (has_data_file(bs) && *host_offset != offset) { in qcow2_get_host_offset()
697 qcow2_signal_corruption(bs, true, -1, -1, in qcow2_get_host_offset()
712 sc = count_contiguous_subclusters(bs, nb_clusters, sc_index, in qcow2_get_host_offset()
715 qcow2_signal_corruption(bs, true, -1, -1, "Invalid cluster entry found " in qcow2_get_host_offset()
756 get_cluster_table(BlockDriverState *bs, uint64_t offset, in get_cluster_table() argument
759 BDRVQcow2State *s = bs->opaque; in get_cluster_table()
769 ret = qcow2_grow_l1_table(bs, l1_index + 1, false); in get_cluster_table()
778 qcow2_signal_corruption(bs, true, -1, -1, "L2 table offset %#" PRIx64 in get_cluster_table()
786 ret = l2_allocate(bs, l1_index); in get_cluster_table()
793 qcow2_free_clusters(bs, l2_offset, s->l2_size * l2_entry_size(s), in get_cluster_table()
803 ret = l2_load(bs, offset, l2_offset, &l2_slice); in get_cluster_table()
828 qcow2_alloc_compressed_cluster_offset(BlockDriverState *bs, uint64_t offset, in qcow2_alloc_compressed_cluster_offset() argument
831 BDRVQcow2State *s = bs->opaque; in qcow2_alloc_compressed_cluster_offset()
837 if (has_data_file(bs)) { in qcow2_alloc_compressed_cluster_offset()
841 ret = get_cluster_table(bs, offset, &l2_slice, &l2_index); in qcow2_alloc_compressed_cluster_offset()
854 cluster_offset = qcow2_alloc_bytes(bs, compressed_size); in qcow2_alloc_compressed_cluster_offset()
875 BLKDBG_CO_EVENT(bs->file, BLKDBG_L2_UPDATE_COMPRESSED); in qcow2_alloc_compressed_cluster_offset()
888 perform_cow(BlockDriverState *bs, QCowL2Meta *m) in perform_cow() argument
890 BDRVQcow2State *s = bs->opaque; in perform_cow()
918 size_t align = bdrv_opt_mem_align(bs); in perform_cow()
927 start_buffer = qemu_try_blockalign(bs, buffer_size); in perform_cow()
946 ret = do_perform_cow_read(bs, m->offset, start->offset, &qiov); in perform_cow()
949 ret = do_perform_cow_read(bs, m->offset, start->offset, &qiov); in perform_cow()
956 ret = do_perform_cow_read(bs, m->offset, end->offset, &qiov); in perform_cow()
963 if (bs->encrypted) { in perform_cow()
964 ret = qcow2_co_encrypt(bs, in perform_cow()
972 ret = qcow2_co_encrypt(bs, in perform_cow()
995 BLKDBG_CO_EVENT(bs->file, BLKDBG_WRITE_AIO); in perform_cow()
996 ret = do_perform_cow_write(bs, m->alloc_offset, start->offset, &qiov); in perform_cow()
1001 ret = do_perform_cow_write(bs, m->alloc_offset, start->offset, &qiov); in perform_cow()
1008 ret = do_perform_cow_write(bs, m->alloc_offset, end->offset, &qiov); in perform_cow()
1028 int coroutine_fn qcow2_alloc_cluster_link_l2(BlockDriverState *bs, in qcow2_alloc_cluster_link_l2() argument
1031 BDRVQcow2State *s = bs->opaque; in qcow2_alloc_cluster_link_l2()
1046 ret = perform_cow(bs, m); in qcow2_alloc_cluster_link_l2()
1053 qcow2_mark_dirty(bs); in qcow2_alloc_cluster_link_l2()
1056 qcow2_cache_set_dependency(bs, s->l2_table_cache, in qcow2_alloc_cluster_link_l2()
1060 ret = get_cluster_table(bs, m->offset, &l2_slice, &l2_index); in qcow2_alloc_cluster_link_l2()
1115 qcow2_free_any_cluster(bs, old_cluster[i], QCOW2_DISCARD_NEVER); in qcow2_alloc_cluster_link_l2()
1129 void coroutine_fn qcow2_alloc_cluster_abort(BlockDriverState *bs, QCowL2Meta *m) in qcow2_alloc_cluster_abort() argument
1131 BDRVQcow2State *s = bs->opaque; in qcow2_alloc_cluster_abort()
1132 if (!has_data_file(bs) && !m->keep_old_clusters) { in qcow2_alloc_cluster_abort()
1133 qcow2_free_clusters(bs, m->alloc_offset, in qcow2_alloc_cluster_abort()
1160 calculate_l2_meta(BlockDriverState *bs, uint64_t host_cluster_offset, in calculate_l2_meta() argument
1164 BDRVQcow2State *s = bs->opaque; in calculate_l2_meta()
1187 int cnt = qcow2_get_subcluster_range_type(bs, l2_entry, l2_bitmap, in calculate_l2_meta()
1195 type = qcow2_get_subcluster_type(bs, l2_entry, l2_bitmap, 0); in calculate_l2_meta()
1200 qcow2_signal_corruption(bs, true, -1, -1, "Invalid cluster " in calculate_l2_meta()
1216 type = qcow2_get_subcluster_type(bs, l2_entry, l2_bitmap, sc_index); in calculate_l2_meta()
1261 type = qcow2_get_subcluster_type(bs, l2_entry, l2_bitmap, sc_index); in calculate_l2_meta()
1333 cluster_needs_new_alloc(BlockDriverState *bs, uint64_t l2_entry) in cluster_needs_new_alloc() argument
1335 switch (qcow2_get_cluster_type(bs, l2_entry)) { in cluster_needs_new_alloc()
1365 count_single_write_clusters(BlockDriverState *bs, int nb_clusters, in count_single_write_clusters() argument
1368 BDRVQcow2State *s = bs->opaque; in count_single_write_clusters()
1375 if (cluster_needs_new_alloc(bs, l2_entry) != new_alloc) { in count_single_write_clusters()
1404 static int coroutine_fn handle_dependencies(BlockDriverState *bs, in handle_dependencies() argument
1408 BDRVQcow2State *s = bs->opaque; in handle_dependencies()
1495 handle_copied(BlockDriverState *bs, uint64_t guest_offset, in handle_copied() argument
1498 BDRVQcow2State *s = bs->opaque; in handle_copied()
1525 ret = get_cluster_table(bs, guest_offset, &l2_slice, &l2_index); in handle_copied()
1533 if (!cluster_needs_new_alloc(bs, l2_entry)) { in handle_copied()
1535 qcow2_signal_corruption(bs, true, -1, -1, "%s cluster offset " in handle_copied()
1552 keep_clusters = count_single_write_clusters(bs, nb_clusters, l2_slice, in handle_copied()
1561 ret = calculate_l2_meta(bs, cluster_offset, guest_offset, in handle_copied()
1605 do_alloc_cluster_offset(BlockDriverState *bs, uint64_t guest_offset, in do_alloc_cluster_offset() argument
1608 BDRVQcow2State *s = bs->opaque; in do_alloc_cluster_offset()
1613 if (has_data_file(bs)) { in do_alloc_cluster_offset()
1624 qcow2_alloc_clusters(bs, *nb_clusters * s->cluster_size); in do_alloc_cluster_offset()
1631 int64_t ret = qcow2_alloc_clusters_at(bs, *host_offset, *nb_clusters); in do_alloc_cluster_offset()
1662 handle_alloc(BlockDriverState *bs, uint64_t guest_offset, in handle_alloc() argument
1665 BDRVQcow2State *s = bs->opaque; in handle_alloc()
1690 ret = get_cluster_table(bs, guest_offset, &l2_slice, &l2_index); in handle_alloc()
1695 nb_clusters = count_single_write_clusters(bs, nb_clusters, in handle_alloc()
1706 ret = do_alloc_cluster_offset(bs, guest_offset, &alloc_cluster_offset, in handle_alloc()
1743 ret = calculate_l2_meta(bs, alloc_cluster_offset, guest_offset, *bytes, in handle_alloc()
1783 int coroutine_fn qcow2_alloc_host_offset(BlockDriverState *bs, uint64_t offset, in qcow2_alloc_host_offset() argument
1788 BDRVQcow2State *s = bs->opaque; in qcow2_alloc_host_offset()
1843 ret = handle_dependencies(bs, start, &cur_bytes, m); in qcow2_alloc_host_offset()
1863 ret = handle_copied(bs, start, &cluster_offset, &cur_bytes, m); in qcow2_alloc_host_offset()
1876 ret = handle_alloc(bs, start, &cluster_offset, &cur_bytes, m); in qcow2_alloc_host_offset()
1902 discard_in_l2_slice(BlockDriverState *bs, uint64_t offset, uint64_t nb_clusters, in discard_in_l2_slice() argument
1905 BDRVQcow2State *s = bs->opaque; in discard_in_l2_slice()
1911 ret = get_cluster_table(bs, offset, &l2_slice, &l2_index); in discard_in_l2_slice()
1926 qcow2_get_cluster_type(bs, old_l2_entry); in discard_in_l2_slice()
1947 } else if (bs->backing || qcow2_cluster_is_allocated(cluster_type)) { in discard_in_l2_slice()
1980 qcow2_free_any_cluster(bs, old_l2_entry, type); in discard_in_l2_slice()
1995 int qcow2_cluster_discard(BlockDriverState *bs, uint64_t offset, in qcow2_cluster_discard() argument
1999 BDRVQcow2State *s = bs->opaque; in qcow2_cluster_discard()
2008 end_offset == bs->total_sectors << BDRV_SECTOR_BITS); in qcow2_cluster_discard()
2016 cleared = discard_in_l2_slice(bs, offset, nb_clusters, type, in qcow2_cluster_discard()
2030 qcow2_process_discards(bs, ret); in qcow2_cluster_discard()
2041 zero_in_l2_slice(BlockDriverState *bs, uint64_t offset, in zero_in_l2_slice() argument
2044 BDRVQcow2State *s = bs->opaque; in zero_in_l2_slice()
2050 ret = get_cluster_table(bs, offset, &l2_slice, &l2_index); in zero_in_l2_slice()
2062 QCow2ClusterType type = qcow2_get_cluster_type(bs, old_l2_entry); in zero_in_l2_slice()
2094 qcow2_free_any_cluster(bs, old_l2_entry, QCOW2_DISCARD_REQUEST); in zero_in_l2_slice()
2111 zero_l2_subclusters(BlockDriverState *bs, uint64_t offset, in zero_l2_subclusters() argument
2114 BDRVQcow2State *s = bs->opaque; in zero_l2_subclusters()
2124 ret = get_cluster_table(bs, offset, &l2_slice, &l2_index); in zero_l2_subclusters()
2129 switch (qcow2_get_cluster_type(bs, get_l2_entry(s, l2_slice, l2_index))) { in zero_l2_subclusters()
2157 int coroutine_fn qcow2_subcluster_zeroize(BlockDriverState *bs, uint64_t offset, in qcow2_subcluster_zeroize() argument
2160 BDRVQcow2State *s = bs->opaque; in qcow2_subcluster_zeroize()
2169 if (data_file_is_raw(bs)) { in qcow2_subcluster_zeroize()
2170 assert(has_data_file(bs)); in qcow2_subcluster_zeroize()
2180 end_offset >= bs->total_sectors << BDRV_SECTOR_BITS); in qcow2_subcluster_zeroize()
2187 if (!bs->backing) { in qcow2_subcluster_zeroize()
2188 return qcow2_cluster_discard(bs, offset, bytes, in qcow2_subcluster_zeroize()
2197 tail = (end_offset >= bs->total_sectors << BDRV_SECTOR_BITS) ? 0 : in qcow2_subcluster_zeroize()
2204 ret = zero_l2_subclusters(bs, offset - head, in qcow2_subcluster_zeroize()
2215 cleared = zero_in_l2_slice(bs, offset, nb_clusters, flags); in qcow2_subcluster_zeroize()
2226 ret = zero_l2_subclusters(bs, end_offset, size_to_subclusters(s, tail)); in qcow2_subcluster_zeroize()
2235 qcow2_process_discards(bs, ret); in qcow2_subcluster_zeroize()
2249 expand_zero_clusters_in_l1(BlockDriverState *bs, uint64_t *l1_table, in expand_zero_clusters_in_l1() argument
2255 BDRVQcow2State *s = bs->opaque; in expand_zero_clusters_in_l1()
2271 l2_slice = qemu_try_blockalign(bs->file->bs, slice_size2); in expand_zero_clusters_in_l1()
2285 status_cb(bs, *visited_l1_entries, l1_entries, cb_opaque); in expand_zero_clusters_in_l1()
2291 qcow2_signal_corruption(bs, true, -1, -1, "L2 table offset %#" in expand_zero_clusters_in_l1()
2298 ret = qcow2_get_refcount(bs, l2_offset >> s->cluster_bits, in expand_zero_clusters_in_l1()
2309 ret = qcow2_cache_get(bs, s->l2_table_cache, slice_offset, in expand_zero_clusters_in_l1()
2313 ret = bdrv_pread(bs->file, slice_offset, slice_size2, in expand_zero_clusters_in_l1()
2324 qcow2_get_cluster_type(bs, l2_entry); in expand_zero_clusters_in_l1()
2332 if (!bs->backing) { in expand_zero_clusters_in_l1()
2343 offset = qcow2_alloc_clusters(bs, s->cluster_size); in expand_zero_clusters_in_l1()
2356 bs, offset >> s->cluster_bits, in expand_zero_clusters_in_l1()
2360 qcow2_free_clusters(bs, offset, s->cluster_size, in expand_zero_clusters_in_l1()
2370 bs, true, -1, -1, in expand_zero_clusters_in_l1()
2376 qcow2_free_clusters(bs, offset, s->cluster_size, in expand_zero_clusters_in_l1()
2383 ret = qcow2_pre_write_overlap_check(bs, 0, offset, in expand_zero_clusters_in_l1()
2387 qcow2_free_clusters(bs, offset, s->cluster_size, in expand_zero_clusters_in_l1()
2397 qcow2_free_clusters(bs, offset, s->cluster_size, in expand_zero_clusters_in_l1()
2424 bs, QCOW2_OL_INACTIVE_L2 | QCOW2_OL_ACTIVE_L2, in expand_zero_clusters_in_l1()
2430 ret = bdrv_pwrite(bs->file, slice_offset, slice_size2, in expand_zero_clusters_in_l1()
2441 status_cb(bs, *visited_l1_entries, l1_entries, cb_opaque); in expand_zero_clusters_in_l1()
2464 int qcow2_expand_zero_clusters(BlockDriverState *bs, in qcow2_expand_zero_clusters() argument
2468 BDRVQcow2State *s = bs->opaque; in qcow2_expand_zero_clusters()
2481 ret = expand_zero_clusters_in_l1(bs, s->l1_table, s->l1_size, in qcow2_expand_zero_clusters()
2495 ret = qcow2_cache_empty(bs, s->l2_table_cache); in qcow2_expand_zero_clusters()
2505 ret = qcow2_validate_table(bs, s->snapshots[i].l1_table_offset, in qcow2_expand_zero_clusters()
2524 ret = bdrv_pread(bs->file, s->snapshots[i].l1_table_offset, l1_size2, in qcow2_expand_zero_clusters()
2534 ret = expand_zero_clusters_in_l1(bs, l1_table, s->snapshots[i].l1_size, in qcow2_expand_zero_clusters()
2549 void qcow2_parse_compressed_l2_entry(BlockDriverState *bs, uint64_t l2_entry, in qcow2_parse_compressed_l2_entry() argument
2552 BDRVQcow2State *s = bs->opaque; in qcow2_parse_compressed_l2_entry()
2555 assert(qcow2_get_cluster_type(bs, l2_entry) == QCOW2_CLUSTER_COMPRESSED); in qcow2_parse_compressed_l2_entry()