Lines Matching full:vi
29 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_load_full_lcluster() local
31 vi->inode_isize + vi->xattr_isize) + in z_erofs_load_full_lcluster()
50 m->clusterofs = 1 << vi->z_logical_clusterbits; in z_erofs_load_full_lcluster()
53 if (!(vi->z_advise & (Z_EROFS_ADVISE_BIG_PCLUSTER_1 | in z_erofs_load_full_lcluster()
70 if (m->clusterofs >= 1 << vi->z_logical_clusterbits) { in z_erofs_load_full_lcluster()
121 struct erofs_inode *const vi = EROFS_I(m->inode); in unpack_compacted_index() local
122 const unsigned int lclusterbits = vi->z_logical_clusterbits; in unpack_compacted_index()
138 big_pcluster = vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1; in unpack_compacted_index()
229 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_load_compact_lcluster() local
231 ALIGN(erofs_iloc(inode) + vi->inode_isize + vi->xattr_isize, 8); in z_erofs_load_compact_lcluster()
237 if (lcn >= totalidx || vi->z_logical_clusterbits > 14) in z_erofs_load_compact_lcluster()
246 if ((vi->z_advise & Z_EROFS_ADVISE_COMPACTED_2B) && in z_erofs_load_compact_lcluster()
293 struct erofs_inode *const vi = EROFS_I(m->inode); in z_erofs_extent_lookback() local
294 const unsigned int lclusterbits = vi->z_logical_clusterbits; in z_erofs_extent_lookback()
318 m->type, lcn, vi->nid); in z_erofs_extent_lookback()
325 lookback_distance, m->lcn, vi->nid); in z_erofs_extent_lookback()
334 struct erofs_inode *const vi = EROFS_I(m->inode); in z_erofs_get_extent_compressedlen() local
336 const unsigned int lclusterbits = vi->z_logical_clusterbits; in z_erofs_get_extent_compressedlen()
347 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1)) || in z_erofs_get_extent_compressedlen()
349 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_2))) { in z_erofs_get_extent_compressedlen()
390 vi->nid); in z_erofs_get_extent_compressedlen()
398 erofs_err(sb, "bogus CBLKCNT @ lcn %lu of nid %llu", lcn, vi->nid); in z_erofs_get_extent_compressedlen()
406 struct erofs_inode *vi = EROFS_I(inode); in z_erofs_get_extent_decompressedlen() local
408 unsigned int lclusterbits = vi->z_logical_clusterbits; in z_erofs_get_extent_decompressedlen()
437 m->type, lcn, vi->nid); in z_erofs_get_extent_decompressedlen()
450 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_do_map_blocks() local
451 bool ztailpacking = vi->z_advise & Z_EROFS_ADVISE_INLINE_PCLUSTER; in z_erofs_do_map_blocks()
452 bool fragment = vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER; in z_erofs_do_map_blocks()
462 lclusterbits = vi->z_logical_clusterbits; in z_erofs_do_map_blocks()
472 vi->z_idataoff = m.nextpackoff; in z_erofs_do_map_blocks()
497 vi->nid); in z_erofs_do_map_blocks()
514 m.type, ofs, vi->nid); in z_erofs_do_map_blocks()
523 vi->z_tailextent_headlcn = m.lcn; in z_erofs_do_map_blocks()
525 if (fragment && vi->datalayout == EROFS_INODE_COMPRESSED_FULL) in z_erofs_do_map_blocks()
526 vi->z_fragmentoff |= (u64)m.pblk << 32; in z_erofs_do_map_blocks()
528 if (ztailpacking && m.lcn == vi->z_tailextent_headlcn) { in z_erofs_do_map_blocks()
530 map->m_pa = vi->z_idataoff; in z_erofs_do_map_blocks()
531 map->m_plen = vi->z_idata_size; in z_erofs_do_map_blocks()
532 } else if (fragment && m.lcn == vi->z_tailextent_headlcn) { in z_erofs_do_map_blocks()
547 afmt = vi->z_advise & Z_EROFS_ADVISE_INTERLACED_PCLUSTER ? in z_erofs_do_map_blocks()
552 vi->z_algorithmtype[1] : vi->z_algorithmtype[0]; in z_erofs_do_map_blocks()
555 afmt, vi->nid); in z_erofs_do_map_blocks()
579 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_fill_inode_lazy() local
587 if (test_bit(EROFS_I_Z_INITED_BIT, &vi->flags)) { in z_erofs_fill_inode_lazy()
596 if (wait_on_bit_lock(&vi->flags, EROFS_I_BL_Z_BIT, TASK_KILLABLE)) in z_erofs_fill_inode_lazy()
600 if (test_bit(EROFS_I_Z_INITED_BIT, &vi->flags)) in z_erofs_fill_inode_lazy()
603 pos = ALIGN(erofs_iloc(inode) + vi->inode_isize + vi->xattr_isize, 8); in z_erofs_fill_inode_lazy()
616 vi->z_advise = Z_EROFS_ADVISE_FRAGMENT_PCLUSTER; in z_erofs_fill_inode_lazy()
617 vi->z_fragmentoff = le64_to_cpu(*(__le64 *)h) ^ (1ULL << 63); in z_erofs_fill_inode_lazy()
618 vi->z_tailextent_headlcn = 0; in z_erofs_fill_inode_lazy()
621 vi->z_advise = le16_to_cpu(h->h_advise); in z_erofs_fill_inode_lazy()
622 vi->z_algorithmtype[0] = h->h_algorithmtype & 15; in z_erofs_fill_inode_lazy()
623 vi->z_algorithmtype[1] = h->h_algorithmtype >> 4; in z_erofs_fill_inode_lazy()
626 if (vi->z_algorithmtype[0] >= Z_EROFS_COMPRESSION_MAX || in z_erofs_fill_inode_lazy()
627 vi->z_algorithmtype[++headnr] >= Z_EROFS_COMPRESSION_MAX) { in z_erofs_fill_inode_lazy()
629 headnr + 1, vi->z_algorithmtype[headnr], vi->nid); in z_erofs_fill_inode_lazy()
634 vi->z_logical_clusterbits = sb->s_blocksize_bits + (h->h_clusterbits & 7); in z_erofs_fill_inode_lazy()
636 vi->z_advise & (Z_EROFS_ADVISE_BIG_PCLUSTER_1 | in z_erofs_fill_inode_lazy()
639 vi->nid); in z_erofs_fill_inode_lazy()
643 if (vi->datalayout == EROFS_INODE_COMPRESSED_COMPACT && in z_erofs_fill_inode_lazy()
644 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1) ^ in z_erofs_fill_inode_lazy()
645 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_2)) { in z_erofs_fill_inode_lazy()
647 vi->nid); in z_erofs_fill_inode_lazy()
652 if (vi->z_advise & Z_EROFS_ADVISE_INLINE_PCLUSTER) { in z_erofs_fill_inode_lazy()
657 vi->z_idata_size = le16_to_cpu(h->h_idata_size); in z_erofs_fill_inode_lazy()
672 if (vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER && in z_erofs_fill_inode_lazy()
678 vi->z_fragmentoff = le32_to_cpu(h->h_fragmentoff); in z_erofs_fill_inode_lazy()
688 set_bit(EROFS_I_Z_INITED_BIT, &vi->flags); in z_erofs_fill_inode_lazy()
692 clear_and_wake_up_bit(EROFS_I_BL_Z_BIT, &vi->flags); in z_erofs_fill_inode_lazy()
699 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_map_blocks_iter() local
716 if ((vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER) && in z_erofs_map_blocks_iter()
717 !vi->z_tailextent_headlcn) { in z_erofs_map_blocks_iter()