Lines Matching refs:vi

29 	struct erofs_inode *const vi = EROFS_I(inode);  in z_erofs_load_full_lcluster()  local
31 vi->inode_isize + vi->xattr_isize) + in z_erofs_load_full_lcluster()
50 m->clusterofs = 1 << vi->z_logical_clusterbits; in z_erofs_load_full_lcluster()
53 if (!(vi->z_advise & (Z_EROFS_ADVISE_BIG_PCLUSTER_1 | in z_erofs_load_full_lcluster()
70 if (m->clusterofs >= 1 << vi->z_logical_clusterbits) { in z_erofs_load_full_lcluster()
121 struct erofs_inode *const vi = EROFS_I(m->inode); in unpack_compacted_index() local
122 const unsigned int lclusterbits = vi->z_logical_clusterbits; in unpack_compacted_index()
138 big_pcluster = vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1; in unpack_compacted_index()
229 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_load_compact_lcluster() local
231 ALIGN(erofs_iloc(inode) + vi->inode_isize + vi->xattr_isize, 8); in z_erofs_load_compact_lcluster()
246 if ((vi->z_advise & Z_EROFS_ADVISE_COMPACTED_2B) && in z_erofs_load_compact_lcluster()
293 struct erofs_inode *const vi = EROFS_I(m->inode); in z_erofs_extent_lookback() local
294 const unsigned int lclusterbits = vi->z_logical_clusterbits; in z_erofs_extent_lookback()
318 m->type, lcn, vi->nid); in z_erofs_extent_lookback()
325 lookback_distance, m->lcn, vi->nid); in z_erofs_extent_lookback()
334 struct erofs_inode *const vi = EROFS_I(m->inode); in z_erofs_get_extent_compressedlen() local
336 const unsigned int lclusterbits = vi->z_logical_clusterbits; in z_erofs_get_extent_compressedlen()
347 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1)) || in z_erofs_get_extent_compressedlen()
349 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_2))) { in z_erofs_get_extent_compressedlen()
390 vi->nid); in z_erofs_get_extent_compressedlen()
398 erofs_err(sb, "bogus CBLKCNT @ lcn %lu of nid %llu", lcn, vi->nid); in z_erofs_get_extent_compressedlen()
406 struct erofs_inode *vi = EROFS_I(inode); in z_erofs_get_extent_decompressedlen() local
408 unsigned int lclusterbits = vi->z_logical_clusterbits; in z_erofs_get_extent_decompressedlen()
435 m->type, lcn, vi->nid); in z_erofs_get_extent_decompressedlen()
449 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_do_map_blocks() local
450 bool ztailpacking = vi->z_advise & Z_EROFS_ADVISE_INLINE_PCLUSTER; in z_erofs_do_map_blocks()
451 bool fragment = vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER; in z_erofs_do_map_blocks()
461 lclusterbits = vi->z_logical_clusterbits; in z_erofs_do_map_blocks()
471 vi->z_idataoff = m.nextpackoff; in z_erofs_do_map_blocks()
496 vi->nid); in z_erofs_do_map_blocks()
513 m.type, ofs, vi->nid); in z_erofs_do_map_blocks()
522 vi->z_tailextent_headlcn = m.lcn; in z_erofs_do_map_blocks()
524 if (fragment && vi->datalayout == EROFS_INODE_COMPRESSED_FULL) in z_erofs_do_map_blocks()
525 vi->z_fragmentoff |= (u64)m.pblk << 32; in z_erofs_do_map_blocks()
527 if (ztailpacking && m.lcn == vi->z_tailextent_headlcn) { in z_erofs_do_map_blocks()
529 map->m_pa = vi->z_idataoff; in z_erofs_do_map_blocks()
530 map->m_plen = vi->z_idata_size; in z_erofs_do_map_blocks()
531 } else if (fragment && m.lcn == vi->z_tailextent_headlcn) { in z_erofs_do_map_blocks()
546 afmt = vi->z_advise & Z_EROFS_ADVISE_INTERLACED_PCLUSTER ? in z_erofs_do_map_blocks()
551 vi->z_algorithmtype[1] : vi->z_algorithmtype[0]; in z_erofs_do_map_blocks()
554 afmt, vi->nid); in z_erofs_do_map_blocks()
578 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_fill_inode_lazy() local
586 if (test_bit(EROFS_I_Z_INITED_BIT, &vi->flags)) { in z_erofs_fill_inode_lazy()
595 if (wait_on_bit_lock(&vi->flags, EROFS_I_BL_Z_BIT, TASK_KILLABLE)) in z_erofs_fill_inode_lazy()
599 if (test_bit(EROFS_I_Z_INITED_BIT, &vi->flags)) in z_erofs_fill_inode_lazy()
602 pos = ALIGN(erofs_iloc(inode) + vi->inode_isize + vi->xattr_isize, 8); in z_erofs_fill_inode_lazy()
615 vi->z_advise = Z_EROFS_ADVISE_FRAGMENT_PCLUSTER; in z_erofs_fill_inode_lazy()
616 vi->z_fragmentoff = le64_to_cpu(*(__le64 *)h) ^ (1ULL << 63); in z_erofs_fill_inode_lazy()
617 vi->z_tailextent_headlcn = 0; in z_erofs_fill_inode_lazy()
620 vi->z_advise = le16_to_cpu(h->h_advise); in z_erofs_fill_inode_lazy()
621 vi->z_algorithmtype[0] = h->h_algorithmtype & 15; in z_erofs_fill_inode_lazy()
622 vi->z_algorithmtype[1] = h->h_algorithmtype >> 4; in z_erofs_fill_inode_lazy()
625 if (vi->z_algorithmtype[0] >= Z_EROFS_COMPRESSION_MAX || in z_erofs_fill_inode_lazy()
626 vi->z_algorithmtype[++headnr] >= Z_EROFS_COMPRESSION_MAX) { in z_erofs_fill_inode_lazy()
628 headnr + 1, vi->z_algorithmtype[headnr], vi->nid); in z_erofs_fill_inode_lazy()
633 vi->z_logical_clusterbits = sb->s_blocksize_bits + (h->h_clusterbits & 7); in z_erofs_fill_inode_lazy()
635 vi->z_advise & (Z_EROFS_ADVISE_BIG_PCLUSTER_1 | in z_erofs_fill_inode_lazy()
638 vi->nid); in z_erofs_fill_inode_lazy()
642 if (vi->datalayout == EROFS_INODE_COMPRESSED_COMPACT && in z_erofs_fill_inode_lazy()
643 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_1) ^ in z_erofs_fill_inode_lazy()
644 !(vi->z_advise & Z_EROFS_ADVISE_BIG_PCLUSTER_2)) { in z_erofs_fill_inode_lazy()
646 vi->nid); in z_erofs_fill_inode_lazy()
651 if (vi->z_advise & Z_EROFS_ADVISE_INLINE_PCLUSTER) { in z_erofs_fill_inode_lazy()
656 vi->z_idata_size = le16_to_cpu(h->h_idata_size); in z_erofs_fill_inode_lazy()
671 if (vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER && in z_erofs_fill_inode_lazy()
677 vi->z_fragmentoff = le32_to_cpu(h->h_fragmentoff); in z_erofs_fill_inode_lazy()
687 set_bit(EROFS_I_Z_INITED_BIT, &vi->flags); in z_erofs_fill_inode_lazy()
691 clear_and_wake_up_bit(EROFS_I_BL_Z_BIT, &vi->flags); in z_erofs_fill_inode_lazy()
698 struct erofs_inode *const vi = EROFS_I(inode); in z_erofs_map_blocks_iter() local
715 if ((vi->z_advise & Z_EROFS_ADVISE_FRAGMENT_PCLUSTER) && in z_erofs_map_blocks_iter()
716 !vi->z_tailextent_headlcn) { in z_erofs_map_blocks_iter()