Lines Matching refs:sc

33 	struct xfs_scrub	*sc)  in xchk_setup_ag_iallocbt()  argument
35 if (xchk_need_intent_drain(sc)) in xchk_setup_ag_iallocbt()
36 xchk_fsgates_enable(sc, XCHK_FSGATES_DRAIN); in xchk_setup_ag_iallocbt()
37 return xchk_setup_ag_btree(sc, sc->flags & XCHK_TRY_HARDER); in xchk_setup_ag_iallocbt()
66 struct xfs_scrub *sc, in xchk_inobt_xref_finobt() argument
73 struct xfs_btree_cur *cur = sc->sa.fino_cur; in xchk_inobt_xref_finobt()
101 xchk_btree_xref_set_corrupt(sc, cur, 0); in xchk_inobt_xref_finobt()
103 xchk_btree_xref_set_corrupt(sc, cur, 0); in xchk_inobt_xref_finobt()
123 xchk_btree_xref_set_corrupt(sc, cur, 0); in xchk_inobt_xref_finobt()
133 struct xfs_scrub *sc, in xchk_inobt_chunk_xref_finobt() argument
142 ASSERT(sc->sm->sm_type == XFS_SCRUB_TYPE_INOBT); in xchk_inobt_chunk_xref_finobt()
144 if (!sc->sa.fino_cur || xchk_skip_xref(sc->sm)) in xchk_inobt_chunk_xref_finobt()
157 error = xchk_inobt_xref_finobt(sc, irec, i, free, hole); in xchk_inobt_chunk_xref_finobt()
158 if (!xchk_should_check_xref(sc, &error, &sc->sa.fino_cur)) in xchk_inobt_chunk_xref_finobt()
169 struct xfs_scrub *sc, in xchk_finobt_xref_inobt() argument
176 struct xfs_btree_cur *cur = sc->sa.ino_cur; in xchk_finobt_xref_inobt()
204 xchk_btree_xref_set_corrupt(sc, cur, 0); in xchk_finobt_xref_inobt()
206 xchk_btree_xref_set_corrupt(sc, cur, 0); in xchk_finobt_xref_inobt()
211 xchk_btree_xref_set_corrupt(sc, cur, 0); in xchk_finobt_xref_inobt()
221 struct xfs_scrub *sc, in xchk_finobt_chunk_xref_inobt() argument
230 ASSERT(sc->sm->sm_type == XFS_SCRUB_TYPE_FINOBT); in xchk_finobt_chunk_xref_inobt()
232 if (!sc->sa.ino_cur || xchk_skip_xref(sc->sm)) in xchk_finobt_chunk_xref_inobt()
245 error = xchk_finobt_xref_inobt(sc, frec, i, ffree, fhole); in xchk_finobt_chunk_xref_inobt()
246 if (!xchk_should_check_xref(sc, &error, &sc->sa.ino_cur)) in xchk_finobt_chunk_xref_inobt()
259 struct xfs_scrub *sc = bs->sc; in xchk_iallocbt_chunk() local
269 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_chunk()
271 if (bs->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_iallocbt_chunk()
274 xchk_xref_is_used_space(sc, agbno, len); in xchk_iallocbt_chunk()
275 if (sc->sm->sm_type == XFS_SCRUB_TYPE_INOBT) in xchk_iallocbt_chunk()
276 xchk_inobt_chunk_xref_finobt(sc, irec, agino, nr_inodes); in xchk_iallocbt_chunk()
278 xchk_finobt_chunk_xref_inobt(sc, irec, agino, nr_inodes); in xchk_iallocbt_chunk()
279 xchk_xref_is_only_owned_by(sc, agbno, len, &XFS_RMAP_OINFO_INODES); in xchk_iallocbt_chunk()
280 xchk_xref_is_not_shared(sc, agbno, len); in xchk_iallocbt_chunk()
281 xchk_xref_is_not_cow_staging(sc, agbno, len); in xchk_iallocbt_chunk()
314 if (xchk_should_terminate(bs->sc, &error)) in xchk_iallocbt_check_cluster_ifree()
327 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster_ifree()
331 error = xchk_inode_is_allocated(bs->sc, agino, &ino_inuse); in xchk_iallocbt_check_cluster_ifree()
335 if (!(bs->sc->flags & XCHK_TRY_HARDER) && !freemask_ok) in xchk_iallocbt_check_cluster_ifree()
349 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster_ifree()
406 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster()
418 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster()
424 xchk_xref_is_not_owned_by(bs->sc, agbno, in xchk_iallocbt_check_cluster()
430 xchk_xref_is_only_owned_by(bs->sc, agbno, M_IGEO(mp)->blocks_per_cluster, in xchk_iallocbt_check_cluster()
435 if (!xchk_btree_xref_process_error(bs->sc, bs->cur, 0, &error)) in xchk_iallocbt_check_cluster()
443 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_check_cluster()
481 cluster_base += M_IGEO(bs->sc->mp)->inodes_per_cluster) { in xchk_iallocbt_check_clusters()
501 struct xfs_mount *mp = bs->sc->mp; in xchk_iallocbt_rec_alignment()
523 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
534 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
550 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
555 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec_alignment()
589 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
596 if (bs->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_iallocbt_rec()
604 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
618 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
632 xchk_btree_set_corrupt(bs->sc, bs->cur, 0); in xchk_iallocbt_rec()
635 if (bs->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT) in xchk_iallocbt_rec()
652 struct xfs_scrub *sc, in xchk_iallocbt_xref_rmap_btreeblks() argument
660 if (!sc->sa.ino_cur || !sc->sa.rmap_cur || in xchk_iallocbt_xref_rmap_btreeblks()
661 (xfs_has_finobt(sc->mp) && !sc->sa.fino_cur) || in xchk_iallocbt_xref_rmap_btreeblks()
662 xchk_skip_xref(sc->sm)) in xchk_iallocbt_xref_rmap_btreeblks()
666 error = xfs_btree_count_blocks(sc->sa.ino_cur, &inobt_blocks); in xchk_iallocbt_xref_rmap_btreeblks()
667 if (!xchk_process_error(sc, 0, 0, &error)) in xchk_iallocbt_xref_rmap_btreeblks()
670 if (sc->sa.fino_cur) { in xchk_iallocbt_xref_rmap_btreeblks()
671 error = xfs_btree_count_blocks(sc->sa.fino_cur, &finobt_blocks); in xchk_iallocbt_xref_rmap_btreeblks()
672 if (!xchk_process_error(sc, 0, 0, &error)) in xchk_iallocbt_xref_rmap_btreeblks()
676 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_iallocbt_xref_rmap_btreeblks()
678 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_iallocbt_xref_rmap_btreeblks()
681 xchk_btree_set_corrupt(sc, sc->sa.ino_cur, 0); in xchk_iallocbt_xref_rmap_btreeblks()
690 struct xfs_scrub *sc, in xchk_iallocbt_xref_rmap_inodes() argument
698 if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm)) in xchk_iallocbt_xref_rmap_inodes()
702 error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur, in xchk_iallocbt_xref_rmap_inodes()
704 if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur)) in xchk_iallocbt_xref_rmap_inodes()
706 inode_blocks = XFS_B_TO_FSB(sc->mp, inodes * sc->mp->m_sb.sb_inodesize); in xchk_iallocbt_xref_rmap_inodes()
708 xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0); in xchk_iallocbt_xref_rmap_inodes()
714 struct xfs_scrub *sc, in xchk_iallocbt() argument
725 cur = which == XFS_BTNUM_INO ? sc->sa.ino_cur : sc->sa.fino_cur; in xchk_iallocbt()
726 error = xchk_btree(sc, cur, xchk_iallocbt_rec, &XFS_RMAP_OINFO_INOBT, in xchk_iallocbt()
731 xchk_iallocbt_xref_rmap_btreeblks(sc, which); in xchk_iallocbt()
741 xchk_iallocbt_xref_rmap_inodes(sc, which, iabt.inodes); in xchk_iallocbt()
748 struct xfs_scrub *sc) in xchk_inobt() argument
750 return xchk_iallocbt(sc, XFS_BTNUM_INO); in xchk_inobt()
755 struct xfs_scrub *sc) in xchk_finobt() argument
757 return xchk_iallocbt(sc, XFS_BTNUM_FINO); in xchk_finobt()
763 struct xfs_scrub *sc, in xchk_xref_inode_check() argument
772 if (!(*icur) || xchk_skip_xref(sc->sm)) in xchk_xref_inode_check()
776 if (!xchk_should_check_xref(sc, &error, icur)) in xchk_xref_inode_check()
779 xchk_btree_xref_set_corrupt(sc, *icur, 0); in xchk_xref_inode_check()
785 struct xfs_scrub *sc, in xchk_xref_is_not_inode_chunk() argument
789 xchk_xref_inode_check(sc, agbno, len, &sc->sa.ino_cur, in xchk_xref_is_not_inode_chunk()
791 xchk_xref_inode_check(sc, agbno, len, &sc->sa.fino_cur, in xchk_xref_is_not_inode_chunk()
798 struct xfs_scrub *sc, in xchk_xref_is_inode_chunk() argument
802 xchk_xref_inode_check(sc, agbno, len, &sc->sa.ino_cur, in xchk_xref_is_inode_chunk()