Lines Matching refs:sector_size

179 	unsigned short sector_size;  member
432 if (cc->sector_size != (1 << SECTOR_SHIFT)) { in crypt_iv_lmk_ctr()
590 if (cc->sector_size != (1 << SECTOR_SHIFT)) { in crypt_iv_tcw_ctr()
769 *(__le64 *)buf = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_eboiv_gen()
971 *(__le64 *)es = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_elephant()
997 memcpy(data_offset, data2 + sg2->offset, cc->sector_size); in crypt_iv_elephant()
1002 diffuser_disk_to_cpu((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1003 diffuser_b_decrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1004 diffuser_a_decrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1005 diffuser_cpu_to_disk((__le32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1008 for (i = 0; i < (cc->sector_size / 32); i++) in crypt_iv_elephant()
1012 diffuser_disk_to_cpu((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1013 diffuser_a_encrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1014 diffuser_b_encrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1015 diffuser_cpu_to_disk((__le32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1193 if (1 << bi->interval_exp != cc->sector_size) { in crypt_integrity_ctr()
1314 if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) in crypt_convert_block_aead()
1341 sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_aead()
1347 sg_set_page(&dmreq->sg_out[2], bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_aead()
1369 cc->sector_size, iv); in crypt_convert_block_aead()
1376 cc->sector_size + cc->integrity_tag_size, iv); in crypt_convert_block_aead()
1395 bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); in crypt_convert_block_aead()
1396 bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); in crypt_convert_block_aead()
1415 if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) in crypt_convert_block_skcipher()
1438 sg_set_page(sg_in, bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_skcipher()
1441 sg_set_page(sg_out, bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_skcipher()
1462 skcipher_request_set_crypt(req, sg_in, sg_out, cc->sector_size, iv); in crypt_convert_block_skcipher()
1472 bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); in crypt_convert_block_skcipher()
1473 bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); in crypt_convert_block_skcipher()
1568 unsigned int sector_step = cc->sector_size >> SECTOR_SHIFT; in crypt_convert()
3191 } else if (sscanf(opt_string, "sector_size:%hu%c", &cc->sector_size, &dummy) == 1) { in crypt_ctr_optional()
3192 if (cc->sector_size < (1 << SECTOR_SHIFT) || in crypt_ctr_optional()
3193 cc->sector_size > 4096 || in crypt_ctr_optional()
3194 (cc->sector_size & (cc->sector_size - 1))) { in crypt_ctr_optional()
3198 if (ti->len & ((cc->sector_size >> SECTOR_SHIFT) - 1)) { in crypt_ctr_optional()
3202 cc->sector_shift = __ffs(cc->sector_size) - SECTOR_SHIFT; in crypt_ctr_optional()
3260 cc->sector_size = (1 << SECTOR_SHIFT); in crypt_ctr()
3342 (tmpll & ((cc->sector_size >> SECTOR_SHIFT) - 1))) { in crypt_ctr()
3477 if (unlikely((bio->bi_iter.bi_sector & ((cc->sector_size >> SECTOR_SHIFT) - 1)) != 0)) in crypt_map()
3480 if (unlikely(bio->bi_iter.bi_size & (cc->sector_size - 1))) in crypt_map()
3556 num_feature_args += cc->sector_size != (1 << SECTOR_SHIFT); in crypt_status()
3574 if (cc->sector_size != (1 << SECTOR_SHIFT)) in crypt_status()
3575 DMEMIT(" sector_size:%d", cc->sector_size); in crypt_status()
3597 if (cc->sector_size != (1 << SECTOR_SHIFT)) in crypt_status()
3598 DMEMIT(",sector_size=%d", cc->sector_size); in crypt_status()
3703 max_t(unsigned int, limits->logical_block_size, cc->sector_size); in crypt_io_hints()
3705 max_t(unsigned int, limits->physical_block_size, cc->sector_size); in crypt_io_hints()
3706 limits->io_min = max_t(unsigned int, limits->io_min, cc->sector_size); in crypt_io_hints()