Lines Matching refs:AES_BLOCK_SIZE

134 	u8 dg[AES_BLOCK_SIZE];
189 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
194 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
209 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
214 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
227 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk()
232 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk()
256 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk()
261 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk()
282 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt()
292 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
293 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
300 cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
308 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
319 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
339 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt()
349 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
350 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
357 cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_decrypt()
365 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_decrypt()
376 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_decrypt()
417 blocks = walk.nbytes / AES_BLOCK_SIZE; in essiv_cbc_encrypt()
424 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in essiv_cbc_encrypt()
439 blocks = walk.nbytes / AES_BLOCK_SIZE; in essiv_cbc_decrypt()
446 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in essiv_cbc_decrypt()
465 u8 buf[AES_BLOCK_SIZE]; in xctr_encrypt()
475 if (unlikely(nbytes < AES_BLOCK_SIZE)) in xctr_encrypt()
479 nbytes &= ~(AES_BLOCK_SIZE - 1); in xctr_encrypt()
486 if (unlikely(nbytes < AES_BLOCK_SIZE)) in xctr_encrypt()
510 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt()
520 if (unlikely(nbytes < AES_BLOCK_SIZE)) in ctr_encrypt()
524 nbytes &= ~(AES_BLOCK_SIZE - 1); in ctr_encrypt()
531 if (unlikely(nbytes < AES_BLOCK_SIZE)) in ctr_encrypt()
546 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_encrypt()
552 if (req->cryptlen < AES_BLOCK_SIZE) in xts_encrypt()
559 AES_BLOCK_SIZE) - 2; in xts_encrypt()
568 xts_blocks * AES_BLOCK_SIZE, in xts_encrypt()
576 for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { in xts_encrypt()
580 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_encrypt()
597 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in xts_encrypt()
618 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_decrypt()
624 if (req->cryptlen < AES_BLOCK_SIZE) in xts_decrypt()
631 AES_BLOCK_SIZE) - 2; in xts_decrypt()
640 xts_blocks * AES_BLOCK_SIZE, in xts_decrypt()
648 for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { in xts_decrypt()
652 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_decrypt()
669 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in xts_decrypt()
692 .cra_blocksize = AES_BLOCK_SIZE,
706 .cra_blocksize = AES_BLOCK_SIZE,
712 .ivsize = AES_BLOCK_SIZE,
727 .ivsize = AES_BLOCK_SIZE,
728 .chunksize = AES_BLOCK_SIZE,
743 .ivsize = AES_BLOCK_SIZE,
744 .chunksize = AES_BLOCK_SIZE,
753 .cra_blocksize = AES_BLOCK_SIZE,
759 .ivsize = AES_BLOCK_SIZE,
760 .walksize = 2 * AES_BLOCK_SIZE,
770 .cra_blocksize = AES_BLOCK_SIZE,
776 .ivsize = AES_BLOCK_SIZE,
777 .walksize = 2 * AES_BLOCK_SIZE,
786 .cra_blocksize = AES_BLOCK_SIZE,
792 .ivsize = AES_BLOCK_SIZE,
831 aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, ctx->key.key_enc, in cmac_setkey()
844 static u8 const ks[3][AES_BLOCK_SIZE] = { in xcbc_setkey()
845 { [0 ... AES_BLOCK_SIZE - 1] = 0x1 }, in xcbc_setkey()
846 { [0 ... AES_BLOCK_SIZE - 1] = 0x2 }, in xcbc_setkey()
847 { [0 ... AES_BLOCK_SIZE - 1] = 0x3 }, in xcbc_setkey()
852 u8 key[AES_BLOCK_SIZE]; in xcbc_setkey()
871 memset(ctx->dg, 0, AES_BLOCK_SIZE); in mac_init()
890 in += (blocks - rem) * AES_BLOCK_SIZE; in mac_do_update()
899 crypto_xor(dg, in, AES_BLOCK_SIZE); in mac_do_update()
900 in += AES_BLOCK_SIZE; in mac_do_update()
916 if ((ctx->len % AES_BLOCK_SIZE) == 0 && in mac_update()
917 (ctx->len + len) > AES_BLOCK_SIZE) { in mac_update()
919 int blocks = len / AES_BLOCK_SIZE; in mac_update()
921 len %= AES_BLOCK_SIZE; in mac_update()
926 p += blocks * AES_BLOCK_SIZE; in mac_update()
929 ctx->len = AES_BLOCK_SIZE; in mac_update()
935 l = min(len, AES_BLOCK_SIZE - ctx->len); in mac_update()
937 if (l <= AES_BLOCK_SIZE) { in mac_update()
955 memcpy(out, ctx->dg, AES_BLOCK_SIZE); in cbcmac_final()
966 if (ctx->len != AES_BLOCK_SIZE) { in cmac_final()
968 consts += AES_BLOCK_SIZE; in cmac_final()
973 memcpy(out, ctx->dg, AES_BLOCK_SIZE); in cmac_final()
982 .base.cra_blocksize = AES_BLOCK_SIZE,
984 2 * AES_BLOCK_SIZE,
987 .digestsize = AES_BLOCK_SIZE,
997 .base.cra_blocksize = AES_BLOCK_SIZE,
999 2 * AES_BLOCK_SIZE,
1002 .digestsize = AES_BLOCK_SIZE,
1016 .digestsize = AES_BLOCK_SIZE,