dm-crypt.c (5ef26e966d3fd105ad9a7e8e8f6d12c7fbd4c03d) | dm-crypt.c (45fe93dff2fb58b22de04c729f8447ba0f773d93) |
---|---|
1/* 2 * Copyright (C) 2003 Jana Saout <jana@saout.de> 3 * Copyright (C) 2004 Clemens Fruhwirth <clemens@endorphin.org> 4 * Copyright (C) 2006-2017 Red Hat, Inc. All rights reserved. 5 * Copyright (C) 2013-2017 Milan Broz <gmazyland@gmail.com> 6 * 7 * This file is released under the GPL. 8 */ --- 744 unchanged lines hidden (view full) --- 753{ 754 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; 755 __le64 sector = cpu_to_le64(dmreq->iv_sector); 756 u8 buf[TCW_WHITENING_SIZE]; 757 SHASH_DESC_ON_STACK(desc, tcw->crc32_tfm); 758 int i, r; 759 760 /* xor whitening with sector number */ | 1/* 2 * Copyright (C) 2003 Jana Saout <jana@saout.de> 3 * Copyright (C) 2004 Clemens Fruhwirth <clemens@endorphin.org> 4 * Copyright (C) 2006-2017 Red Hat, Inc. All rights reserved. 5 * Copyright (C) 2013-2017 Milan Broz <gmazyland@gmail.com> 6 * 7 * This file is released under the GPL. 8 */ --- 744 unchanged lines hidden (view full) --- 753{ 754 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; 755 __le64 sector = cpu_to_le64(dmreq->iv_sector); 756 u8 buf[TCW_WHITENING_SIZE]; 757 SHASH_DESC_ON_STACK(desc, tcw->crc32_tfm); 758 int i, r; 759 760 /* xor whitening with sector number */ |
761 memcpy(buf, tcw->whitening, TCW_WHITENING_SIZE); 762 crypto_xor(buf, (u8 *)§or, 8); 763 crypto_xor(&buf[8], (u8 *)§or, 8); | 761 crypto_xor_cpy(buf, tcw->whitening, (u8 *)§or, 8); 762 crypto_xor_cpy(&buf[8], tcw->whitening + 8, (u8 *)§or, 8); |
764 765 /* calculate crc32 for every 32bit part and xor it */ 766 desc->tfm = tcw->crc32_tfm; 767 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP; 768 for (i = 0; i < 4; i++) { 769 r = crypto_shash_init(desc); 770 if (r) 771 goto out; --- 28 unchanged lines hidden (view full) --- 800 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { 801 sg = crypt_get_sg_data(cc, dmreq->sg_in); 802 src = kmap_atomic(sg_page(sg)); 803 r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset); 804 kunmap_atomic(src); 805 } 806 807 /* Calculate IV */ | 763 764 /* calculate crc32 for every 32bit part and xor it */ 765 desc->tfm = tcw->crc32_tfm; 766 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP; 767 for (i = 0; i < 4; i++) { 768 r = crypto_shash_init(desc); 769 if (r) 770 goto out; --- 28 unchanged lines hidden (view full) --- 799 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { 800 sg = crypt_get_sg_data(cc, dmreq->sg_in); 801 src = kmap_atomic(sg_page(sg)); 802 r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset); 803 kunmap_atomic(src); 804 } 805 806 /* Calculate IV */ |
808 memcpy(iv, tcw->iv_seed, cc->iv_size); 809 crypto_xor(iv, (u8 *)§or, 8); | 807 crypto_xor_cpy(iv, tcw->iv_seed, (u8 *)§or, 8); |
810 if (cc->iv_size > 8) | 808 if (cc->iv_size > 8) |
811 crypto_xor(&iv[8], (u8 *)§or, cc->iv_size - 8); | 809 crypto_xor_cpy(&iv[8], tcw->iv_seed + 8, (u8 *)§or, 810 cc->iv_size - 8); |
812 813 return r; 814} 815 816static int crypt_iv_tcw_post(struct crypt_config *cc, u8 *iv, 817 struct dm_crypt_request *dmreq) 818{ 819 struct scatterlist *sg; --- 2225 unchanged lines hidden --- | 811 812 return r; 813} 814 815static int crypt_iv_tcw_post(struct crypt_config *cc, u8 *iv, 816 struct dm_crypt_request *dmreq) 817{ 818 struct scatterlist *sg; --- 2225 unchanged lines hidden --- |